diff --git a/CHANGELOG.md b/CHANGELOG.md index b6cd7120..aeb07b03 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,11 +7,122 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- **`_readPatchBlob` null-guard** — `readBlob()` returning null (corrupt/missing blob) now throws `PersistenceError` with `E_MISSING_OBJECT` instead of passing null to the CBOR decoder. +- **`browser.d.ts` missing exports** — Added `WarpError`, `createVersionVector`, and `generateWriterId` type declarations to match `browser.js` runtime exports. Fixed `WarpGraph` re-export from default to named. +- **`package.json` files array missing type declarations** — Added `browser.d.ts` and `sha1sync.d.ts` to the `files` array so npm consumers receive browser/sha1sync type definitions. +- **`isLoopback()` wildcard address documentation** — Added JSDoc and test coverage to explicitly document that wildcard bind addresses (`0.0.0.0`, `::`, `0:0:0:0:0:0:0:0`) are not treated as loopback and require `--expose`. +- **Browser and sha1sync subpath exports missing `types` field** — `package.json` `"./browser"` and `"./sha1sync"` exports now include `"types"` entries pointing to `browser.d.ts` and `sha1sync.d.ts`, enabling TypeScript resolution for subpath consumers. +- **`jsr.json` missing `browser.js` in publish.include** — JSR consumers importing `@git-stunts/git-warp/browser` now receive the file. +- **`git warp serve` help text missing `--port`, `--host`, `--expose` flags** — All serve-specific options now appear in `--help` output. +- **`WarpServeService` non-integer seek ceiling** — Fractional ceilings (e.g. `3.5`) are now rejected with `E_INVALID_PAYLOAD`. `Infinity` is intentionally accepted (treated as head). +- **`WarpServeService` oversized message guard** — Messages exceeding 1 MiB are rejected with `E_MESSAGE_TOO_LARGE` before `JSON.parse`, preventing OOM on malicious payloads. +- **`WarpServeService` oversized property value guard** — Wildcard-typed mutation args exceeding 64 KiB are rejected with `E_INVALID_ARGS`. +- **`SyncProtocol` / `WormholeService` null blob guard** — `readBlob()` / `retrieve()` results are now null-checked, throwing `PersistenceError(E_MISSING_OBJECT)` instead of passing `null` to the codec. +- **`hexDecode` regex replaced with charCode loop** — Direct character code validation avoids regex backtracking on large inputs. +- **WS adapter pre-handler message buffering** — Messages arriving before `onMessage(handler)` is called are now buffered and flushed when the handler is set. Prevents message loss in all WS adapters (Node, Bun, Deno) when connection setup is asynchronous. +- **NodeWsAdapter `onError` callback** — Constructor now accepts an optional `onError` callback that surfaces runtime server errors instead of silently swallowing them. +- **`wsAdapterUtils.messageToString()` TextDecoder reuse** — Hoisted `TextDecoder` to module level, avoiding per-call allocation. +- **Static file handler response objects frozen** — `FORBIDDEN` and `NOT_FOUND` response constants are now `Object.freeze()`d to prevent accidental mutation. +- **`sha1sync` comment clarification** — Updated misleading comment about the `>= 0x20000000` guard to explain it ensures `msg.length * 8` fits in uint32. +- **`_broadcastDiff` Set mutation during iteration** — Deleting dead clients from `this._clients` mid-`for...of` could skip the next entry. Dead connections are now collected and evicted after the loop completes. +- **Double-SIGINT re-entrancy in `serve` shutdown** — Rapid Ctrl+C fired `shutdown()` concurrently twice, racing `close()` and `process.exit()`. Added a `closing` guard. +- **Catch-all error envelope double-parsing** — The last-resort `.catch()` on `_onMessage` re-parsed the raw JSON to extract the correlation `id`. The ID is now extracted before the async call, avoiding double-parse and ensuring availability even if the raw message was consumed. +- **`WarpServeService` bare `Function` types** — Replaced loose `Function` JSDoc types in `resolveGraph`, constructor, and `_applyMutateOps` with a typed `GraphHandle` typedef carrying specific method signatures. +- **`jsr.json` missing `./browser` and `./sha1sync` exports** — Subpath exports added to `package.json` were not mirrored in `jsr.json`. JSR consumers can now import both. +- **`CasBlobAdapter` JSDoc `Buffer|Uint8Array`** — Narrowed `encryptionKey` type to `Uint8Array` per project convention. +- **`WarpServeService.listen()` double-call guard** — Calling `listen()` twice no longer silently creates duplicate subscriptions. Second call throws `"Server is already listening"`. +- **`WarpServeService.close()` dangling sockets** — Active WebSocket connections are now closed during shutdown instead of being silently abandoned. +- **`WarpServeService._handleOpen()` premature openGraphs add** — Graph is now marked as open only after materialization succeeds, preventing stale entries on failure. +- **`WarpServeService._applyMutateOps()` interleaved validation** — All ops in a batch are validated before `createPatch()` is called, avoiding wasted patch allocations on invalid input. +- **`base64Decode` silent garbage acceptance** — Malformed base64 input now throws `RangeError` instead of silently decoding to wrong output. +- **`NodeWsAdapter` state leak on failed start** — `listen()` failures now reset internal state (`_wss`, `_httpServer`), unblocking subsequent retry attempts. +- **`isLoopback()` incomplete range** — Now recognizes the full `127.0.0.0/8` range, not just `127.0.0.1`. +- **`buildSeekCacheKey` outside try/catch** — Cache key generation failure (e.g., crypto unavailable) is now caught and treated as a cache miss instead of breaking materialization. +- **`BunWsAdapter` test `globalThis.Bun` leak** — Tests now save and restore the original `globalThis.Bun` instead of deleting it unconditionally. +- **`vi.waitFor()` boolean callbacks in serve tests** — Replaced 22 boolean-returning callbacks with assertion-based ones to prevent premature resolution. +- **`WarpServeService.listen()` leaked subscriptions on bind failure** — If `server.listen()` rejected (e.g., EADDRINUSE), graph subscriptions were already registered and never cleaned up, causing ghost broadcast handlers. `listen()` now defers `_server` assignment and subscription registration until bind succeeds, and cleans up on failure. +- **`_onConnection` catch leaked internal error details** — The last-resort catch handler sent raw `err.message` (which could contain file paths, stack traces, etc.) to untrusted WebSocket clients. Now sends a generic `"Internal error"` message. +- **`git warp serve` silent blob data loss** — Mutation ops like `attachContent` and `attachEdgeContent` are async (they write blobs), but `_applyMutateOps` was not awaiting them. `patch.commit()` could fire before the blob write completed. Now all ops are awaited. +- **DenoWsAdapter port-0 resolution** — When binding to port 0 (OS-assigned), `onListen` resolved with the requested port (0) instead of the actual assigned port. Now reads `server.addr.port`, matching Node and Bun adapter behavior. +- **Static file handler symlink traversal** — A symlink inside `staticDir` pointing outside the root could bypass `safePath()` and serve arbitrary files. `tryReadFile` now resolves symlinks with `realpath()` and re-checks the prefix before reading. +- **`base64Encode` / `base64Decode` memory overhead** — Replaced intermediate binary string approach (`String.fromCharCode` / `charCodeAt` via `btoa`/`atob`) with direct table-based base64 encoding/decoding, eliminating memory spikes on large buffers (e.g., StreamingBitmapIndexBuilder shards). +- **Static file handler null-byte bypass** — `safePath()` now re-checks for `\0` after `decodeURIComponent()` (prevents `%00` bypass) and catches malformed percent-encoding (e.g., `%ZZ`) instead of throwing. +- **`git warp serve` writerId validation** — The auto-generated writerId (`serve:host:port`) contained colons, which are not allowed by `validateWriterId`. Now sanitizes to `serve-host-port` by replacing invalid characters with dashes. +- **`git warp serve` port-0 writerId collision** — When binding to port 0 (OS-assigned ephemeral port), every invocation produced the same writerId `serve-127.0.0.1-0`. Now includes a timestamp and PID component (`ephemeral--`) to prevent collisions even across concurrent invocations in the same millisecond. +- **`git warp serve` IPv6 URL bracketing** — IPv6 addresses like `::1` are now bracketed in WebSocket and HTTP URLs (`ws://[::1]:3000`) per RFC 3986. +- **Inspector WebSocket default URL** — Hardcoded `ws://localhost:3000` replaced with `window.location`-derived URL, so `--static` serving on any port connects correctly without needing `?server=` param. +- **JSDoc type annotations** — Resolved 39 pre-existing `tsc --noEmit` strict-mode errors across 17 source files. Added missing `encrypted`, `blobStorage`, and `patchBlobStorage` fields to JSDoc `@param`/`@typedef` types; created `WarpGraphWithMixins` typedef for mixin methods calling `_readPatchBlob`; installed `@types/ws` for Node WebSocket adapter; fixed `Uint8Array` assignability issues; narrowed `chunking.strategy` literal types for CAS adapters; added type annotations to callback parameters in WS adapters. +- **Inspector: "Go live" after time-travel** — `setCeiling(Infinity)` now calls `socket.open()` to re-materialize at head instead of sending `seek` with no ceiling. The server also now accepts `Infinity` as a ceiling value (treating it as "materialize at head") for robustness. +- **Inspector: localStorage persistence timing** — Server URL is now persisted to `localStorage` only after a successful connection, preventing a bad URL from locking users into a reconnect loop on reload. +- **CasBlobAdapter error propagation** — `retrieve()` now uses `CasError.code` (`MANIFEST_NOT_FOUND`, `GIT_ERROR`) from `@git-stunts/git-cas` to identify legacy blob fallback cases, with message-based matching as a fallback for non-CasError exceptions. Previously used brittle string matching on all error messages. +- **Dead `writerIds` code removed** — `WarpServeService` no longer stores per-session `writerIds` from `open` messages. The field was populated but never consumed — all mutations use the server's writer identity. +- **`_broadcastDiff` dead-client resilience** — A single dead WebSocket connection in `_broadcastDiff` could abort the loop, preventing remaining subscribed clients from receiving the diff. Each `send()` is now wrapped in try/catch; dead connections are evicted. +- **`attachContent`/`attachEdgeContent` wire validation** — Mutation arg validation now requires string content for `attachContent` and `attachEdgeContent` over WebSocket JSON. Previously accepted any type via wildcard (`*`), but `Uint8Array` cannot survive JSON serialization. +- **BunWsAdapter `close()` fire-and-forget** — `BunWsAdapter.close()` used `void server.stop()` and returned immediately. Now awaits the `stop()` promise, ensuring graceful shutdown. +- **EncryptionError unused `code` option** — Removed `code` from the constructor options typedef. The error code is always `E_ENCRYPTED_PATCH`; the option was dead. +- **CasBlobAdapter `Buffer.from` → `TextEncoder`** — Replaced `Buffer.from(content, 'utf8')` with `new TextEncoder().encode(content)` for consistency with the Uint8Array domain boundary. +- **Crypto adapter hmac wrapping** — Replaced `new Uint8Array(result.buffer, result.byteOffset, result.byteLength)` with `new Uint8Array(result)` in both `defaultCrypto` and `NodeCryptoAdapter.hmac()`, preventing shared ArrayBuffer pool aliasing. +- **Test `Buffer` usage cleanup** — Replaced `Buffer.from()` in type-check consumer test and `Buffer.from(result.buffer)` in CasSeekCacheAdapter test with `TextEncoder`/`TextDecoder`. +- **Duplicate `open()` in encryption test** — Consolidated redundant second `WarpGraph.open()` call in `WarpGraph.encryption.test.js` into a second assertion on the same promise. + +### Changed + +- **BREAKING: Uint8Array migration** — All domain-layer and port contract types narrowed from `Buffer|Uint8Array` to `Uint8Array`. Return types of `readBlob()`, `hmac()`, `serialize()`, `getContent()`, `getEdgeContent()`, and all bitmap index methods now return `Uint8Array` instead of `Buffer`. Downstream TypeScript consumers using Buffer-specific APIs (`.toString('hex')`, `.equals()`) on return values must migrate to `hexEncode()`/`textDecode()` from `domain/utils/bytes.js` and standard comparison operators. Buffer is now confined to infrastructure adapters only. +- **`TrustCrypto` re-export shim deleted** — `src/domain/trust/TrustCrypto.js` (which re-exported from infrastructure) has been removed. Import directly from `src/infrastructure/adapters/TrustCryptoAdapter.js`. The domain layer no longer contains any infrastructure imports. +- **`buildSeekCacheKey` is now async** — Replaced direct `node:crypto` import with domain-local `defaultCrypto.hash()`, eliminating a hexagonal boundary violation. Both call sites were already async. +- **`process.stdout.columns` removed from visualization layer** — Terminal width is now injected from the CLI presenter (composition root). The visualization layer no longer references Node-only globals. +- **HTTP adapter DRY cleanup** — Shared `toPortRequest()`, error body constants, and pre-encoded byte arrays extracted into `httpAdapterUtils.js`. BunHttpAdapter and DenoHttpAdapter now import from the shared module. +- **Lazy CAS init extracted** — The duplicated lazy-promise-with-error-reset pattern in `CasBlobAdapter._getCas()` and `CasSeekCacheAdapter._getCas()` replaced with shared `createLazyCas()` factory in `lazyCasInit.js`. +- **`computeRecordId()` and `verifyRecordId()` are now async** — These functions in `TrustCanonical.js` now use the injected `CryptoPort` instead of importing `node:crypto` directly. Callers must `await` the result. +- **`hmac()` returns `Uint8Array`** — `NodeCryptoAdapter.hmac()`, `WebCryptoAdapter.hmac()`, and `defaultCrypto.hmac()` now return `Uint8Array` instead of `Buffer`. The raw HMAC digest bytes are identical; only the wrapper type changed. +- **`@git-stunts/git-cas` v3.0.0 → v5.2.4** — Two major version jump. New capabilities now available: `ObservabilityPort` (replaces EventEmitter), streaming restore, CDC chunking (98.4% chunk reuse), envelope encryption (DEK/KEK), key rotation. No breaking changes for git-warp's usage — `CasSeekCacheAdapter` continues to work as-is. +- **CDC chunking for seek cache** — `CasSeekCacheAdapter` now uses content-defined chunking (`CdcChunker`) instead of fixed-size chunking. Consecutive seek snapshots share most content; CDC's rolling-hash boundaries yield ~98.4% chunk reuse on incremental edits, significantly reducing Git object storage for the seek cache. +- **Encrypted seek cache** — `CasSeekCacheAdapter` accepts an optional `encryptionKey` constructor param. When set, cached state snapshots are encrypted at rest using AES-256-GCM via git-cas. +- **CAS observability bridge** — New `LoggerObservabilityBridge` adapter translates git-cas `ObservabilityPort` calls (metric, log, span) into git-warp `LoggerPort` calls. `CasSeekCacheAdapter` accepts an optional `logger` param to surface CAS operations through git-warp's structured logging. +- **Blob attachments via CAS (B160)** — New `BlobStoragePort` and `CasBlobAdapter` provide a hexagonal abstraction for content blob storage. When `blobStorage` is injected, `attachContent()`/`attachEdgeContent()` store blobs via git-cas (CDC-chunked, optionally encrypted) instead of raw Git blobs. `getContent()`/`getEdgeContent()` retrieve via CAS with automatic fallback to raw Git blobs for backward compatibility with pre-CAS content. +- **Streaming seek cache restore (B163)** — `CasSeekCacheAdapter.get()` now prefers `cas.restoreStream()` (git-cas v4+) for I/O pipelining — chunk reads overlap with buffer accumulation. Falls back to `cas.restore()` for older git-cas versions. +- **Graph encryption at rest (B164)** — New `patchBlobStorage` option on `WarpGraph.open()`. When a `BlobStoragePort` (e.g. `CasBlobAdapter` with encryption key) is injected, patch CBOR is encrypted before writing to Git and decrypted on read. An `eg-encrypted: true` commit trailer marks encrypted patches. All 6 patch read sites and the write path are threaded. `EncryptionError` is thrown when attempting to read encrypted patches without a key. Mixed encrypted and unencrypted patches are fully supported — plain patches read via `persistence.readBlob()`, encrypted via `patchBlobStorage.retrieve()`. + ### Added +- **`--writer-id` flag for `git warp serve`** — Allows setting an explicit, stable writer identity instead of the auto-derived `serve--` value. Useful for reproducible testing and multi-instance orchestration where deterministic writer identities are needed. +- **`src/domain/utils/bytes.js`** — Portable byte-manipulation utilities replacing Node.js Buffer methods: `hexEncode`, `hexDecode`, `base64Encode`, `base64Decode`, `concatBytes`, `textEncode`, `textDecode`. Works identically on Node, Bun, Deno, and browsers. +- **ESLint `no-restricted-globals` for Buffer** — `Buffer` is now banned in `src/domain/**/*.js` via ESLint. Future regressions are caught at lint time. +- **`git warp serve --expose` flag** — Binding to a non-loopback address now requires `--expose` to prevent accidental network exposure. Without the flag, the command exits with a usage error. +- **`wsAdapterUtils.js`** — Shared utilities for WebSocket adapters (`normalizeHost`, `assertNotListening`, `messageToString`), following the `httpAdapterUtils.js` pattern. All three WS adapters (Bun, Deno, Node) now use these instead of duplicating host normalization, listen guards, and message decoding. +- **Inspector: architecture pivot to WebSocket** — Rewired the Vue app from in-memory `WarpGraph` instances to a live WebSocket connection via `WarpSocket`. The browser now connects to `git warp serve` and views/edits a real Git-backed graph. Replaced the 4-viewport multi-writer demo with a single-viewport, single-connection model. All mutations go through `socket.mutate()` and state updates arrive via server-pushed diffs. +- **Bun + Deno WebSocket adapters** — `git warp serve` now auto-detects the runtime and uses native WebSocket APIs on all three platforms. `BunWsAdapter` uses `Bun.serve()` with the `websocket` handler option; `DenoWsAdapter` uses `Deno.serve()` + `Deno.upgradeWebSocket()`. The `serve` CLI command dynamically imports only the relevant adapter via `createWsAdapter()`, so the `ws` npm package is never loaded on Bun/Deno. +- **Static file serving** — `git warp serve --static ` serves a built SPA (or any static directory) over HTTP on the same port as the WebSocket server. Supports SPA client-side routing fallback, correct MIME types for common web assets, and path traversal protection. +- **Browser-compatible `InMemoryGraphAdapter`** — Replaced hard `node:crypto` and `node:stream` imports with lazy-loaded fallbacks. A new `hash` constructor option lets callers inject a synchronous SHA-1 function for environments where `node:crypto` is unavailable (e.g. browsers). `node:stream` is now dynamically imported only in `logNodesStream()`. +- **Browser-safe `defaultCrypto`** — The domain-level crypto default now lazy-loads `node:crypto` via top-level `await import()` with a try/catch, so importing `WarpGraph` in a browser no longer crashes at module evaluation time. Callers must inject a CryptoPort explicitly when `node:crypto` is unavailable. +- **`sha1sync` utility** (`@git-stunts/git-warp/sha1sync`) — Minimal synchronous SHA-1 implementation (~110 LOC) for browser content addressing with `InMemoryGraphAdapter`. Not for security — only for Git object ID computation. +- **`browser.js` entry point** (`@git-stunts/git-warp/browser`) — Curated re-export of browser-safe code: `WarpGraph`, `InMemoryGraphAdapter`, `WebCryptoAdapter`, CRDT primitives, errors, and `generateWriterId`. No `node:` imports in the critical path. - **Documentation enhancements in README.md** — Added a high-level Documentation Map, a detailed Graph Traversal Directory, an expanded Time-Travel (Seek) guide, and updated Runtime Compatibility information (Node.js, Bun, Deno). - **Local-First Applications use-case** — Added git-warp as a backend for LoFi software. +### Removed + +- **Inspector extracted to standalone repo** — The Git WARP Inspector (formerly `demo/browsa/`) has been extracted to [git-stunts/git-warp-web-inspector](https://github.com/git-stunts/git-warp-web-inspector). The `demo/` directory, `test/unit/browsa/`, and `TASKS.md` have been removed from this repository. +- **Inspector: scenario runner** — Removed `ScenarioPanel.vue` and all scenario infrastructure. Multi-writer scenarios don't apply to the single-connection WebSocket model. +- **Inspector: in-memory sync** — Removed `InProcessSyncBus.js` and `InsecureCryptoAdapter.js`. No in-memory sync or browser-side crypto needed with the server-backed architecture. +- **Inspector: multi-viewport grid** — Removed 4-viewport layout, sync buttons, and online/offline toggles. Multiple browser windows serve the multi-writer use case instead. +- **Inspector: Vite stubs** — Removed `src/stubs/` directory (empty.js, node-crypto.js, node-stream.js, node-module.js), `trailerCodecBufferShim()` plugin, and all resolve aliases. The browser no longer imports git-warp — it communicates via WebSocket only. + +### Security + +- **WebSocket mutation op allowlist** — `WarpServeService._handleMutate` now validates mutation ops against `ALLOWED_MUTATE_OPS` (`addNode`, `removeNode`, `addEdge`, `removeEdge`, `setProperty`, `setEdgeProperty`, `attachContent`, `attachEdgeContent`). Previously, any method on the `PatchBuilderV2` prototype could be invoked by a WebSocket client, including internal methods. +- **WebSocket mutation arg validation** — `WarpServeService._applyMutateOps` now validates argument count and types per-op before calling `patch[op](...args)`. Untrusted JSON args with wrong types or counts are rejected with `E_INVALID_ARGS`. +- **Protocol payload validation** — All `WarpServeService` message handlers (`open`, `mutate`, `inspect`, `seek`) now validate incoming payloads for required fields and correct types before processing. Invalid payloads receive `E_INVALID_PAYLOAD` error envelopes. +- **`hexDecode` input validation** — `hexDecode()` now throws `RangeError` on odd-length or non-hex input instead of silently coercing invalid characters to `0x00`. +- **WarpSocket request timeout** — `WarpSocket._request()` now enforces a configurable timeout (default 30s). Pending requests that receive no server response reject with a timeout error instead of leaking forever. +- **Vite `allowedHosts` scoped** — Inspector dev server no longer sets `allowedHosts: true`. Restricted to `localhost` and `127.0.0.1` to prevent DNS rebinding. + +### Documentation + +- **README `git warp serve` flags** — Added `--expose` and `--writer-id` to the CLI usage example. + ## [13.1.0] - 2026-03-04 ### Added diff --git a/README.md b/README.md index e0e53378..a64a9311 100644 --- a/README.md +++ b/README.md @@ -550,6 +550,9 @@ git warp history --writer alice # Check graph health, status, and GC metrics git warp check + +# Start WebSocket server for browser viewer +git warp serve [--port 3000] [--host 127.0.0.1] [--static ] [--expose] [--writer-id ] ``` ### Time-Travel (Seek) @@ -591,6 +594,12 @@ All commands accept `--repo ` to target a specific Git repository, `--json git warp seek time-travel demo

+### Git WARP Inspector + +The [Git WARP Inspector](https://github.com/git-stunts/git-warp-web-inspector) is an interactive browser-based graph viewer that connects to a live `git warp serve` instance over WebSocket. It renders graphs using ELK layout, supports time-travel via seek, and shows real-time diffs as the graph changes. + +See the [git-warp-web-inspector](https://github.com/git-stunts/git-warp-web-inspector) repository for setup and development instructions. + ## Architecture ```mermaid diff --git a/ROADMAP.md b/ROADMAP.md index cfa2816b..347c537d 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,6 +1,6 @@ # ROADMAP — @git-stunts/git-warp -> **Current version:** v13.0.0 +> **Current version:** v14.0.0 > **Last reconciled:** 2026-03-04 (priority triage: 45 standalone items sorted into P0–P6 tiers, wave-based execution order, dependency chains mapped) > **Completed milestones:** [docs/ROADMAP/COMPLETED.md](docs/ROADMAP/COMPLETED.md) @@ -189,10 +189,13 @@ No dependencies. Do these first. ### P1 — Correctness & Test Infrastructure -B36 and B37 improve velocity for all future test work — do them early. B19 + B22 batch as one PR (Conformance Property Pack). +B36 and B37 improve velocity for all future test work — do them early. B19 + B22 batch as one PR (Conformance Property Pack). B165 and B167 completed (Defensive Hardening Sprint); B166 remains. | ID | Item | Effort | |----|------|--------| +| B165 | ✅ **WARPSERVESERVICE `listen()` DEFERRED STATE MUTATION** — `listen()` now defers `_server` assignment and subscription registration until bind succeeds; on failure, cleans up subscriptions. `_onConnection` catch now sends generic `"Internal error"` instead of raw `err.message`. **File:** `src/domain/services/WarpServeService.js` | S | +| B166 | **ADAPTER CLEANUP CONTRACTS** — `NodeWsAdapter.close()` doesn't reset `state.wss`/`state.httpServer`/remove listeners after shutdown; `listenWithHttp` error path leaks partial state. **File:** `src/infrastructure/adapters/NodeWsAdapter.js` | M | +| B167 | ✅ **SERVE TEST COVERAGE GAPS** — Added tests for: listen-failure cleanup (leaked subscriptions), double-listen guard, error sanitization (no internal detail leakage), `attachContent`/`attachEdgeContent` smoke tests through mutation pipeline. **File:** `test/unit/domain/services/WarpServeService.test.js` | S | | B36 | **FLUENT STATE BUILDER FOR TESTS** — `StateBuilder` helper replacing manual `WarpStateV5` literals | M | | B37 | **SHARED MOCK PERSISTENCE FIXTURE** — dedup `createMockPersistence()` across trust test files | S | | B48 | **ESLINT BAN `= {}` CONSTRUCTOR DEFAULTS WITH REQUIRED PARAMS** — catches the pattern where `= {}` silently makes required options optional at the type level (found in CommitDagTraversalService, DagTraversal, DagPathFinding, DagTopology, BitmapIndexReader) | S | @@ -250,6 +253,7 @@ No hard dependencies. Pick up opportunistically after P2. |----|------|--------| | B155 | **`levels()` AS LIGHTWEIGHT `--view` LAYOUT** — `levels()` is exactly the Y-axis assignment a layered DAG layout needs. For simple DAGs, `levels()` + left-to-right X sweep could produce clean layouts without the 2.5MB ELK import. Offer `--view --layout=levels` as an instant rendering mode, reserving ELK for complex graphs. **Files:** `src/visualization/layouts/`, `bin/cli/commands/view.js` | M | | B156 | **STRUCTURAL DIFF VIA TRANSITIVE REDUCTION** — compute `transitiveReduction(stateA)` vs `transitiveReduction(stateB)` to produce a compact structural diff that strips implied edges and shows only "load-bearing" changes. Natural fit for H1 (Time-Travel Delta Engine) as `warp diff --mode=structural`. | L | +| B157 | ✅ **BROWSER COMPATIBILITY (Phase 1-3)** — Make `InMemoryGraphAdapter` and `defaultCrypto` browser-safe by lazy-loading `node:crypto`/`node:stream`. New `sha1sync` utility for browser content addressing. New `browser.js` entry point and `./browser`+`./sha1sync` package exports. | M | ### P6 — Documentation & Process @@ -267,6 +271,20 @@ Low urgency. Fold into PRs that already touch related files. | B129 | **CONTRIBUTOR REVIEW-LOOP HYGIENE GUIDE** — add section to `CONTRIBUTING.md` covering commit sizing, CodeRabbit cooldown strategy, and when to request bot review. From BACKLOG 2026-02-27. | S | | B147 | **RFC FIELD COUNT DRIFT DETECTOR** — script that counts WarpGraph instance fields (grep `this._` in constructor) and warns if design RFC field counts diverge. Prevents stale numbers in `warpgraph-decomposition.md`. From B145 PR review. **Depends on:** B143 RFC (exists) | S | +### P7 — git-cas Modernization + +Upgrade from `@git-stunts/git-cas` v3.0.0 to v5.2.4 and leverage new capabilities. Currently git-warp only uses git-cas for the seek cache (`CasSeekCacheAdapter`). The v4.x/v5.x releases add ObservabilityPort, streaming restore, CDC chunking (98.4% chunk reuse on edits), envelope encryption (DEK/KEK), and key rotation. + +| ID | Item | Effort | +|----|------|--------| +| B158 | ✅ **UPGRADE `@git-stunts/git-cas` TO v5** — bumped `^3.0.0` → `^5.2.4`. 4872 tests pass, zero regressions. | S | +| B159 | ✅ **CDC CHUNKING FOR SEEK CACHE** — `CasSeekCacheAdapter._initCas()` now constructs CAS with `chunking: { strategy: 'cdc' }`. ~98% chunk reuse on incremental snapshots. | S | +| B160 | ✅ **BLOB ATTACHMENTS VIA CAS** — New `BlobStoragePort` + `CasBlobAdapter` provide a hexagonal abstraction for content blob storage. `PatchBuilderV2.attachContent()`/`attachEdgeContent()` use CAS (chunked, CDC-deduped, optionally encrypted) when `blobStorage` is injected; fall back to raw `persistence.writeBlob()` without it. `getContent()`/`getEdgeContent()` retrieve via `blobStorage.retrieve()` with automatic fallback to raw Git blobs for pre-CAS content. Wired through `WarpGraph`, `Writer`, and all patch creation paths. 16 new tests (4909 total). | M | +| B161 | ✅ **ENCRYPTED SEEK CACHE** — `CasSeekCacheAdapter` now accepts optional `encryptionKey` constructor param. When set, all `store()` and `restore()` calls pass the key to git-cas for AES-256-GCM encryption/decryption. 6 new tests (52 total). | S | +| B162 | ✅ **OBSERVABILITY ALIGNMENT** — new `LoggerObservabilityBridge` adapter translates git-cas `ObservabilityPort` calls (metric, log, span) into git-warp `LoggerPort` calls. `CasSeekCacheAdapter` accepts optional `logger` param; when provided, CAS operations surface through git-warp's structured logging. 7 new bridge tests + 2 adapter tests. | M | +| B163 | ✅ **STREAMING RESTORE FOR LARGE STATES** — `CasSeekCacheAdapter.get()` now prefers `cas.restoreStream()` (git-cas v4+) for I/O pipelining, accumulating chunks via async iterator. Falls back to `cas.restore()` for older git-cas. 2 new tests (58 total). | M | +| B164 | ✅ **GRAPH ENCRYPTION AT REST** — New `patchBlobStorage` option on `WarpGraph.open()`. When a `BlobStoragePort` (e.g. `CasBlobAdapter` with encryption key) is injected, patch CBOR is written/read via CAS instead of raw Git blobs. `eg-encrypted: true` trailer marks encrypted commits. All 6 read sites + write path threaded. `EncryptionError` thrown when reading encrypted patches without key. Mixed encrypted/plain patches supported via backward-compatible fallback. 14 new tests (4969 total). | L | + ### Uncategorized / Platform | ID | Item | Effort | @@ -352,6 +370,16 @@ Internal chain: B97 (P0, Wave 1) → B85 → B57. B123 is the largest — may sp 18. **B156** — structural diff (if H1 is in play) 19. Docs/process items (B34, B35, B76, B79, B102–B104, B129, B147) folded into related PRs +#### Wave 7: git-cas Modernization (P7) + +20. **B158** — upgrade `@git-stunts/git-cas` to v5 (unblocks all P7 items) +21. **B159** — CDC chunking for seek cache (quick win after B158) +22. **B161** — encrypted seek cache +23. **B160** — blob attachments via CAS +24. **B162** — observability alignment +25. **B163** — streaming restore for large states +26. **B164** — graph encryption at rest (largest, last) + ### Dependency Chains ```text @@ -366,6 +394,13 @@ B154 (P0) ─────┘ adjList dedup (quick fix) B151 (P4) ──→ B152 (P4) closure streaming → full async generator API B36 (P1) ──→ (improves velocity for B99, B19, B22, future tests) + +B158 (P7) ──→ B159 (P7) CDC seek cache + ├──→ B160 (P7) blob attachments + ├──→ B161 (P7) encrypted seek cache + ├──→ B162 (P7) observability alignment + ├──→ B163 (P7) streaming restore + └──→ B164 (P7) graph encryption at rest ``` --- @@ -381,11 +416,11 @@ B36 (P1) ──→ (improves velocity for B99, B19, B22, future tests) | **Milestone (M12)** | 18 | B66, B67, B70, B73, B75, B105–B115, B117, B118 | | **Milestone (M13)** | 1 | B116 (internal: DONE; wire-format: DEFERRED) | | **Milestone (M14)** | 16 | B130–B145 | -| **Standalone** | 45 | B12, B19, B22, B28, B34–B37, B43, B48, B49, B53, B54, B57, B76, B79–B81, B83, B85–B88, B95–B99, B102–B104, B119, B123, B127–B129, B147, B149–B156 | -| **Standalone (done)** | 29 | B26, B44, B46, B47, B50–B52, B55, B71, B72, B77, B78, B82, B84, B89–B94, B100, B120–B122, B124, B125, B126, B146, B148 | +| **Standalone** | 46 | B12, B19, B22, B28, B34–B37, B43, B48, B49, B53, B54, B57, B76, B79–B81, B83, B85–B88, B95–B99, B102–B104, B119, B123, B127–B129, B147, B149–B156, B166 | +| **Standalone (done)** | 39 | B26, B44, B46, B47, B50–B52, B55, B71, B72, B77, B78, B82, B84, B89–B94, B100, B120–B122, B124, B125, B126, B146, B148, B157, B158, B159, B160, B161, B162, B163, B164, B165, B167 | | **Deferred** | 7 | B4, B7, B16, B20, B21, B27, B101 | | **Rejected** | 7 | B5, B6, B13, B17, B18, B25, B45 | -| **Total tracked** | **133** total; 29 standalone done | | +| **Total tracked** | **144** total; 39 standalone done | | ### STANK.md Cross-Reference diff --git a/bin/cli/commands/registry.js b/bin/cli/commands/registry.js index 5b04ae94..d925f8b3 100644 --- a/bin/cli/commands/registry.js +++ b/bin/cli/commands/registry.js @@ -15,6 +15,7 @@ import handleTrust from './trust.js'; import handlePatch from './patch.js'; import handleTree from './tree.js'; import handleBisect from './bisect.js'; +import handleServe from './serve.js'; /** @type {Map} */ export const COMMANDS = new Map(/** @type {[string, Function][]} */ ([ @@ -35,4 +36,5 @@ export const COMMANDS = new Map(/** @type {[string, Function][]} */ ([ ['bisect', handleBisect], ['view', handleView], ['install-hooks', handleInstallHooks], + ['serve', handleServe], ])); diff --git a/bin/cli/commands/seek.js b/bin/cli/commands/seek.js index 916804b5..72a5b0da 100644 --- a/bin/cli/commands/seek.js +++ b/bin/cli/commands/seek.js @@ -1,5 +1,6 @@ import { summarizeOps } from '../../../src/visualization/renderers/ascii/history.js'; import { diffStates } from '../../../src/domain/services/StateDiff.js'; +import { textEncode } from '../../../src/domain/utils/bytes.js'; import { buildCursorActiveRef, buildCursorSavedRef, @@ -68,7 +69,7 @@ async function readSavedCursor(persistence, graphName, name) { async function writeSavedCursor(persistence, graphName, name, cursor) { const ref = buildCursorSavedRef(graphName, name); const json = JSON.stringify(cursor); - const oid = await persistence.writeBlob(Buffer.from(json, 'utf8')); + const oid = await persistence.writeBlob(textEncode(json)); await persistence.updateRef(ref, oid); } diff --git a/bin/cli/commands/serve.js b/bin/cli/commands/serve.js new file mode 100644 index 00000000..032a9818 --- /dev/null +++ b/bin/cli/commands/serve.js @@ -0,0 +1,208 @@ +import process from 'node:process'; +import { resolve } from 'node:path'; +import { stat } from 'node:fs/promises'; +import { parseCommandArgs, usageError, notFoundError } from '../infrastructure.js'; +import { serveSchema } from '../schemas.js'; +import { createPersistence, listGraphNames } from '../shared.js'; +import WarpGraph from '../../../src/domain/WarpGraph.js'; +import WebCryptoAdapter from '../../../src/infrastructure/adapters/WebCryptoAdapter.js'; +import WarpServeService from '../../../src/domain/services/WarpServeService.js'; + +/** + * Creates the appropriate WebSocket adapter for the current runtime. + * + * @param {string|null} [staticDir] + * @returns {Promise} + */ +async function createWsAdapter(staticDir) { + const opts = staticDir ? { staticDir } : {}; + if (globalThis.Bun) { + const { default: BunWsAdapter } = await import( + '../../../src/infrastructure/adapters/BunWsAdapter.js' + ); + return new BunWsAdapter(opts); + } + if (globalThis.Deno) { + const { default: DenoWsAdapter } = await import( + '../../../src/infrastructure/adapters/DenoWsAdapter.js' + ); + return new DenoWsAdapter(opts); + } + const { default: NodeWsAdapter } = await import( + '../../../src/infrastructure/adapters/NodeWsAdapter.js' + ); + return new NodeWsAdapter(opts); +} + +/** + * Returns true when the host string resolves to the loopback interface. + * + * Wildcard addresses (`0.0.0.0`, `::`, `0:0:0:0:0:0:0:0`) bind to ALL + * interfaces — including public ones — and are intentionally NOT treated + * as loopback. {@link assertExposeSafety} will require `--expose` for them. + * + * @param {string} h + * @returns {boolean} + */ +function isLoopback(h) { + return h === '127.0.0.1' || h === '::1' || h === 'localhost' || h.startsWith('127.'); +} + +/** @typedef {import('../types.js').CliOptions} CliOptions */ + +const SERVE_OPTIONS = { + port: { type: 'string', default: '3000' }, + host: { type: 'string', default: '127.0.0.1' }, + static: { type: 'string' }, + expose: { type: 'boolean', default: false }, + 'writer-id': { type: 'string' }, +}; + +/** + * Opens WarpGraph instances for the specified graph names. + * + * @param {import('../types.js').Persistence} persistence + * @param {string[]} graphNames + * @param {string} writerId + * @returns {Promise>} + */ +async function openGraphs(persistence, graphNames, writerId) { + const graphs = []; + for (const graphName of graphNames) { + const graph = await WarpGraph.open({ + persistence: /** @type {import('../../../src/domain/types/WarpPersistence.js').CorePersistence} */ (/** @type {unknown} */ (persistence)), + graphName, + writerId, + crypto: new WebCryptoAdapter(), + }); + graphs.push(graph); + } + return graphs; +} + +/** + * Resolve and validate the `--static` directory, if provided. + * + * @param {string|undefined} raw + * @returns {Promise} + */ +async function resolveStaticDir(raw) { + if (!raw) { + return null; + } + const dir = resolve(raw); + const st = await stat(dir).catch(() => null); + if (!st || !st.isDirectory()) { + throw usageError(`--static path is not a directory: ${raw}`); + } + return dir; +} + +/** + * Determine which graphs to serve and validate the selection. + * + * @param {import('../types.js').Persistence} persistence + * @param {string|null} [graphOption] + * @returns {Promise} + */ +async function resolveTargetGraphs(persistence, graphOption) { + const graphNames = await listGraphNames(persistence); + if (graphNames.length === 0) { + throw usageError('No WARP graphs found in this repository'); + } + if (graphOption && !graphNames.includes(graphOption)) { + throw notFoundError(`Graph not found: ${graphOption}`); + } + return graphOption ? [graphOption] : graphNames; +} + +/** + * Build a unique writerId from the host and requested port. + * When port is 0 the OS assigns an ephemeral port, so a timestamp + * component prevents collisions across successive invocations. + * + * @param {string} host + * @param {number} port + * @returns {string} + */ +function deriveWriterId(host, port) { + const portLabel = port === 0 + ? `ephemeral-${Date.now().toString(36)}-${process.pid}` + : String(port); + return `serve-${host}-${portLabel}`.replace(/[^A-Za-z0-9._-]/g, '-'); +} + +/** + * Bracket an IPv6 host for use in URLs. + * + * @param {string} h + * @returns {string} + */ +function bracketHost(h) { + return h.includes(':') ? `[${h}]` : h; +} + +/** + * Guards against binding to a non-loopback address without --expose. + * + * @param {string} host + * @param {boolean} expose + */ +function assertExposeSafety(host, expose) { + if (!isLoopback(host) && !expose) { + throw usageError( + `Binding to non-loopback address '${host}' exposes the server to the network. ` + + 'Pass --expose to confirm this is intentional.', + ); + } +} + +/** + * Logs startup information to stderr. + * + * @param {{url: string, targetGraphs: string[], staticDir: string|null, urlHost: string, port: number}} info + */ +function logStartup({ url, targetGraphs, staticDir, urlHost, port }) { + process.stderr.write(`Listening on ${url}\n`); + process.stderr.write(`Serving graph(s): ${targetGraphs.join(', ')}\n`); + if (staticDir) { + process.stderr.write(`Serving static files from ${staticDir}\n`); + process.stderr.write(`Open http://${urlHost}:${port} in your browser\n`); + } +} + +/** + * Handles the `serve` command: starts a WebSocket server exposing + * graph(s) in the repository for browser-based viewing and mutation. + * + * @param {{options: CliOptions, args: string[]}} params + * @returns {Promise<{payload: {url: string, host: string, port: number, graphs: string[]}, close: () => Promise}>} + */ +export default async function handleServe({ options, args }) { + const { values } = parseCommandArgs(args, SERVE_OPTIONS, serveSchema, { allowPositionals: false }); + const { port, host, expose, writerId: explicitWriterId } = values; + assertExposeSafety(host, expose); + + const staticDir = await resolveStaticDir(values.static); + const { persistence } = await createPersistence(options.repo); + const targetGraphs = await resolveTargetGraphs(persistence, options.graph); + + const writerId = explicitWriterId || deriveWriterId(host, port); + const graphs = await openGraphs(persistence, targetGraphs, writerId); + const wsPort = await createWsAdapter(staticDir); + const service = new WarpServeService({ wsPort, graphs }); + const addr = await service.listen(port, host); + + const urlHost = bracketHost(addr.host); + const url = `ws://${urlHost}:${addr.port}`; + logStartup({ url, targetGraphs, staticDir, urlHost, port: addr.port }); + + return { + payload: { url, host: addr.host, port: addr.port, graphs: targetGraphs }, + // WarpServeService.close() unsubscribes all graph subscriptions and + // shuts down the WebSocket server. WarpGraph/GitGraphAdapter hold no + // long-lived resources beyond in-memory state, so process exit is + // sufficient for their cleanup. + close: () => service.close(), + }; +} diff --git a/bin/cli/infrastructure.js b/bin/cli/infrastructure.js index bcd2f21e..3e257ce4 100644 --- a/bin/cli/infrastructure.js +++ b/bin/cli/infrastructure.js @@ -50,6 +50,12 @@ Commands: patch Decode and inspect raw patches tree ASCII tree traversal from root nodes bisect Binary search for first bad patch in writer history + serve Start WebSocket server for browser-based graph viewer + --port Port to bind (default: 3000, 0 = OS-assigned) + --host Bind address (default: 127.0.0.1) + --expose Allow binding to non-loopback addresses + --static Serve static files (SPA) on the same port + --writer-id Explicit writer identity (default: derived from host:port) view Interactive TUI graph browser (requires @git-stunts/git-warp-tui) install-hooks Install post-merge git hook @@ -154,7 +160,7 @@ export function notFoundError(message) { return new CliError(message, { code: 'E_NOT_FOUND', exitCode: EXIT_CODES.NOT_FOUND }); } -export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'doctor', 'materialize', 'seek', 'verify-audit', 'verify-index', 'reindex', 'trust', 'patch', 'tree', 'bisect', 'install-hooks', 'view']; +export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'doctor', 'materialize', 'seek', 'verify-audit', 'verify-index', 'reindex', 'trust', 'patch', 'tree', 'bisect', 'install-hooks', 'serve', 'view']; const BASE_OPTIONS = { repo: { type: 'string', short: 'r' }, diff --git a/bin/cli/schemas.js b/bin/cli/schemas.js index 773788db..67dfe39e 100644 --- a/bin/cli/schemas.js +++ b/bin/cli/schemas.js @@ -204,3 +204,21 @@ export const verifyIndexSchema = z.object({ // No command-level options; schema exists for forward compatibility export const reindexSchema = z.object({}).strict(); + +// ============================================================================ +// Serve +// ============================================================================ + +export const serveSchema = z.object({ + port: z.coerce.number().int().min(0).max(65535).default(3000), + host: z.string().min(1).default('127.0.0.1'), + static: z.string().min(1, 'Missing value for --static').optional(), + expose: z.boolean().default(false), + 'writer-id': z.string().min(1, 'Missing value for --writer-id').regex(/^[A-Za-z0-9._-]+$/, 'writer-id must contain only [A-Za-z0-9._-]').optional(), +}).strict().transform((val) => ({ + port: val.port, + host: val.host, + static: val.static, + expose: val.expose, + writerId: val['writer-id'], +})); diff --git a/bin/cli/shared.js b/bin/cli/shared.js index 7bfbd3c0..16e35d3a 100644 --- a/bin/cli/shared.js +++ b/bin/cli/shared.js @@ -3,6 +3,7 @@ import path from 'node:path'; import process from 'node:process'; import readline from 'node:readline'; import { execFileSync } from 'node:child_process'; +import { textEncode } from '../../src/domain/utils/bytes.js'; // @ts-expect-error — no type declarations for @git-stunts/plumbing import GitPlumbing, { ShellRunnerFactory } from '@git-stunts/plumbing'; import WarpGraph from '../../src/domain/WarpGraph.js'; @@ -27,7 +28,7 @@ import { usageError, notFoundError } from './infrastructure.js'; export async function createPersistence(repoPath) { const runner = ShellRunnerFactory.create(); const plumbing = new GitPlumbing({ cwd: repoPath, runner }); - const persistence = new GitGraphAdapter({ plumbing }); + const persistence = /** @type {Persistence} */ (/** @type {unknown} */ (new GitGraphAdapter({ plumbing }))); const ping = await persistence.ping(); if (!ping.ok) { throw usageError(`Repository not accessible: ${repoPath}`); @@ -161,7 +162,7 @@ export async function readActiveCursor(persistence, graphName) { export async function writeActiveCursor(persistence, graphName, cursor) { const ref = buildCursorActiveRef(graphName); const json = JSON.stringify(cursor); - const oid = await persistence.writeBlob(Buffer.from(json, 'utf8')); + const oid = await persistence.writeBlob(textEncode(json)); await persistence.updateRef(ref, oid); } diff --git a/bin/cli/types.js b/bin/cli/types.js index 54144106..c8550d05 100644 --- a/bin/cli/types.js +++ b/bin/cli/types.js @@ -4,8 +4,8 @@ * @property {(ref: string) => Promise} readRef * @property {(ref: string, oid: string) => Promise} updateRef * @property {(ref: string) => Promise} deleteRef - * @property {(oid: string) => Promise} readBlob - * @property {(buf: Buffer) => Promise} writeBlob + * @property {(oid: string) => Promise} readBlob + * @property {(buf: Uint8Array) => Promise} writeBlob * @property {(sha: string) => Promise<{date?: string|null}>} getNodeInfo * @property {(sha: string) => Promise} nodeExists * @property {(sha: string, coverageSha: string) => Promise} isAncestor diff --git a/bin/presenters/index.js b/bin/presenters/index.js index 277b06d4..6de413e0 100644 --- a/bin/presenters/index.js +++ b/bin/presenters/index.js @@ -106,7 +106,7 @@ const VIEW_RENDERERS = new Map(/** @type {[string, function(unknown): string][]} ['info', renderInfoView], ['check', renderCheckView], ['history', renderHistoryView], - ['path', renderPathView], + ['path', (/** @type {Parameters[0]} */ payload) => renderPathView(payload, { terminalWidth: process.stdout.columns })], ['materialize', renderMaterializeView], ['seek', renderSeekView], ])); diff --git a/bin/warp-graph.js b/bin/warp-graph.js index 39c5ee86..2f49da32 100755 --- a/bin/warp-graph.js +++ b/bin/warp-graph.js @@ -70,6 +70,26 @@ async function main() { const format = options.ndjson ? 'ndjson' : options.json ? 'json' : 'text'; present(/** @type {Record} */ (normalized.payload), { format, command, view: /** @type {string | null | boolean} */ (options.view ?? null) }); } + + // Long-running commands (e.g. serve) return a `close` function. + // Wait for SIGINT/SIGTERM instead of exiting immediately. + const close = result && typeof result === 'object' && 'close' in /** @type {Record} */ (result) + ? /** @type {() => Promise} */ (/** @type {Record} */ (result).close) + : null; + + if (close) { + let closing = false; + const shutdown = async () => { + if (closing) { return; } + closing = true; + await close(); + process.exit(EXIT_CODES.OK); + }; + process.on('SIGINT', () => { shutdown().catch(() => process.exit(1)); }); + process.on('SIGTERM', () => { shutdown().catch(() => process.exit(1)); }); + return; // Keep the process alive + } + // Use process.exit() to avoid waiting for fire-and-forget I/O (e.g. seek cache writes). process.exit(normalized.exitCode ?? EXIT_CODES.OK); } diff --git a/browser.d.ts b/browser.d.ts new file mode 100644 index 00000000..2faea164 --- /dev/null +++ b/browser.d.ts @@ -0,0 +1,42 @@ +/** + * Browser entry point for @git-stunts/git-warp. + * + * Re-exports only browser-safe code — no node:crypto, node:stream, + * or @git-stunts/plumbing imports. + */ + +export { default as WarpGraph } from './index'; +export { + GraphNode, + InMemoryGraphAdapter, + WebCryptoAdapter, + EncryptionError, + ForkError, + QueryError, + StorageError, + TraversalError, + SyncError, +} from './index'; + +/** + * Base error class for all WARP domain errors. + */ +export class WarpError extends Error { + readonly name: string; + readonly code: string; + readonly context: Record; + constructor(message: string, options?: { code?: string; context?: Record }); +} + +/** + * Creates an empty VersionVector (Map). + */ +export function createVersionVector(): Map; + +/** + * Generates a new canonical writer ID. + * + * @param options - Options with optional custom RNG for testing + * @returns A canonical writer ID (e.g., 'w_0123456789abcdefghjkmnpqrs') + */ +export function generateWriterId(options?: { randomBytes?: (n: number) => Uint8Array }): string; diff --git a/browser.js b/browser.js new file mode 100644 index 00000000..2d2094ef --- /dev/null +++ b/browser.js @@ -0,0 +1,54 @@ +/** + * Browser entry point for @git-stunts/git-warp. + * + * Re-exports only browser-safe code — no node:crypto, node:stream, + * or @git-stunts/plumbing imports. Use with InMemoryGraphAdapter + * and WebCryptoAdapter for fully in-browser WARP graph operation. + * + * @module browser + * + * @example + * ```js + * import { + * WarpGraph, + * InMemoryGraphAdapter, + * WebCryptoAdapter, + * generateWriterId, + * } from '@git-stunts/git-warp/browser'; + * import { sha1sync } from '@git-stunts/git-warp/sha1sync'; + * + * const adapter = new InMemoryGraphAdapter({ hash: sha1sync }); + * const crypto = new WebCryptoAdapter(); + * const graph = await WarpGraph.open({ + * persistence: adapter, + * graphName: 'demo', + * writerId: generateWriterId(), + * crypto, + * }); + * ``` + */ + +// Core API +export { default as WarpGraph } from './src/domain/WarpGraph.js'; +export { default as GraphNode } from './src/domain/entities/GraphNode.js'; + +// Browser-compatible adapters +export { default as InMemoryGraphAdapter } from './src/infrastructure/adapters/InMemoryGraphAdapter.js'; +export { default as WebCryptoAdapter } from './src/infrastructure/adapters/WebCryptoAdapter.js'; + +// CRDT primitives +export { createVersionVector } from './src/domain/crdt/VersionVector.js'; + +// Errors +export { default as WarpError } from './src/domain/errors/WarpError.js'; +export { + EncryptionError, + ForkError, + QueryError, + StorageError, + TraversalError, + SyncError, +} from './src/domain/errors/index.js'; + +// Utilities +export { generateWriterId } from './src/domain/utils/WriterId.js'; diff --git a/contracts/type-surface.m8.json b/contracts/type-surface.m8.json index eb89c7b4..06507b8b 100644 --- a/contracts/type-surface.m8.json +++ b/contracts/type-surface.m8.json @@ -1,8 +1,781 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$comment": "M8 IRONCLAD type surface manifest — source of truth for T3 (index.d.ts) and T9 (consumer test)", + "$comment": "M8 IRONCLAD type surface manifest \u2014 source of truth for T3 (index.d.ts) and T9 (consumer test)", "version": 1, "exports": { + "AggregateResult": { + "kind": "interface" + }, + "AggregateSpec": { + "kind": "interface" + }, + "AncestorOptions": { + "kind": "interface" + }, + "ApplySyncResult": { + "kind": "interface" + }, + "BTR": { + "kind": "interface" + }, + "BTRVerificationResult": { + "kind": "interface" + }, + "BisectResult": { + "kind": "type" + }, + "BisectService": { + "kind": "class" + }, + "BitmapIndexBuilder": { + "kind": "class" + }, + "BitmapIndexReader": { + "kind": "class" + }, + "BlobStoragePort": { + "kind": "class" + }, + "BulkNodeSpec": { + "kind": "interface" + }, + "BunHttpAdapter": { + "kind": "class" + }, + "CONTENT_PROPERTY_KEY": { + "kind": "const" + }, + "ClockAdapter": { + "kind": "class" + }, + "ClockPort": { + "kind": "abstract-class" + }, + "CommitDagTraversalService": { + "kind": "class" + }, + "CommonAncestorsOptions": { + "kind": "interface" + }, + "ComposeWormholesOptions": { + "kind": "interface" + }, + "ConsoleLogger": { + "kind": "class" + }, + "CreateBTROptions": { + "kind": "interface" + }, + "CreateNodeOptions": { + "kind": "interface" + }, + "CreateWormholeOptions": { + "kind": "interface" + }, + "CryptoPort": { + "kind": "abstract-class" + }, + "DenoHttpAdapter": { + "kind": "class" + }, + "EdgeChange": { + "kind": "interface" + }, + "EdgeWeightFn": { + "kind": "type" + }, + "EncryptionError": { + "kind": "class" + }, + "EventId": { + "kind": "interface" + }, + "ForkError": { + "kind": "class" + }, + "GCExecuteResult": { + "kind": "interface" + }, + "GCMetrics": { + "kind": "interface" + }, + "GCPolicyConfig": { + "kind": "interface" + }, + "GitGraphAdapter": { + "kind": "class" + }, + "GitPlumbing": { + "kind": "interface" + }, + "GraphNode": { + "kind": "class" + }, + "GraphPersistencePort": { + "kind": "abstract-class" + }, + "HealthCheckService": { + "kind": "class" + }, + "HealthResult": { + "kind": "interface" + }, + "HealthStatus": { + "kind": "const" + }, + "HopOptions": { + "kind": "interface" + }, + "HttpServerPort": { + "kind": "abstract-class" + }, + "InMemoryGraphAdapter": { + "kind": "class" + }, + "IndexError": { + "kind": "class" + }, + "IndexHealth": { + "kind": "interface" + }, + "IndexRebuildService": { + "kind": "class" + }, + "IndexStoragePort": { + "kind": "abstract-class" + }, + "IterateNodesOptions": { + "kind": "interface" + }, + "JoinReceipt": { + "kind": "interface" + }, + "ListNodesOptions": { + "kind": "interface" + }, + "LoadOptions": { + "kind": "interface" + }, + "LogLevel": { + "kind": "const" + }, + "LogLevelValue": { + "kind": "type" + }, + "LoggerPort": { + "kind": "abstract-class" + }, + "LogicalTraversal": { + "kind": "interface" + }, + "MaybeGCResult": { + "kind": "interface" + }, + "NoOpLogger": { + "kind": "class" + }, + "NodeCryptoAdapter": { + "kind": "class" + }, + "NodeInfo": { + "kind": "interface" + }, + "NodeWeightFn": { + "kind": "type" + }, + "ObserverConfig": { + "kind": "interface" + }, + "ObserverView": { + "kind": "class", + "instance": { + "hasNode": { + "async": true, + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "Promise" + }, + "getNodes": { + "async": true, + "params": [], + "returns": "Promise" + }, + "getNodeProps": { + "async": true, + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "Promise | null>" + }, + "getEdges": { + "async": true, + "params": [], + "returns": "Promise }>>" + }, + "query": { + "params": [], + "returns": "QueryBuilder" + } + }, + "properties": { + "name": { + "type": "string", + "readonly": true + }, + "traverse": { + "type": "LogicalTraversal" + } + } + }, + "OpEdgeAdd": { + "kind": "interface" + }, + "OpEdgeTombstone": { + "kind": "interface" + }, + "OpNodeAdd": { + "kind": "interface" + }, + "OpNodeTombstone": { + "kind": "interface" + }, + "OpOutcome": { + "kind": "interface" + }, + "OpPropSet": { + "kind": "interface" + }, + "OperationAbortedError": { + "kind": "class" + }, + "PatchBuilderV2": { + "kind": "class", + "note": "Not directly exported from index.js but referenced as return type of createPatch()", + "instance": { + "addNode": { + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "PatchBuilderV2" + }, + "removeNode": { + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "PatchBuilderV2" + }, + "addEdge": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } + ], + "returns": "PatchBuilderV2" + }, + "removeEdge": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } + ], + "returns": "PatchBuilderV2" + }, + "setProperty": { + "params": [ + { + "name": "nodeId", + "type": "string" + }, + { + "name": "key", + "type": "string" + }, + { + "name": "value", + "type": "unknown" + } + ], + "returns": "PatchBuilderV2" + }, + "setEdgeProperty": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + }, + { + "name": "key", + "type": "string" + }, + { + "name": "value", + "type": "unknown" + } + ], + "returns": "PatchBuilderV2" + }, + "attachContent": { + "async": true, + "params": [ + { + "name": "nodeId", + "type": "string" + }, + { + "name": "content", + "type": "Buffer | string" + } + ], + "returns": "Promise" + }, + "attachEdgeContent": { + "async": true, + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + }, + { + "name": "content", + "type": "Buffer | string" + } + ], + "returns": "Promise" + }, + "build": { + "params": [], + "returns": "PatchV2" + }, + "commit": { + "async": true, + "params": [], + "returns": "Promise" + } + }, + "properties": { + "opCount": { + "type": "number", + "readonly": true + } + } + }, + "PatchEntry": { + "kind": "interface" + }, + "PatchError": { + "kind": "class" + }, + "PatchSession": { + "kind": "class", + "instance": { + "addNode": { + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "this" + }, + "removeNode": { + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], + "returns": "this" + }, + "addEdge": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } + ], + "returns": "this" + }, + "removeEdge": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } + ], + "returns": "this" + }, + "setProperty": { + "params": [ + { + "name": "nodeId", + "type": "string" + }, + { + "name": "key", + "type": "string" + }, + { + "name": "value", + "type": "unknown" + } + ], + "returns": "this" + }, + "setEdgeProperty": { + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + }, + { + "name": "key", + "type": "string" + }, + { + "name": "value", + "type": "unknown" + } + ], + "returns": "this" + }, + "attachContent": { + "async": true, + "params": [ + { + "name": "nodeId", + "type": "string" + }, + { + "name": "content", + "type": "Buffer | string" + } + ], + "returns": "Promise" + }, + "attachEdgeContent": { + "async": true, + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + }, + { + "name": "content", + "type": "Buffer | string" + } + ], + "returns": "Promise" + }, + "build": { + "params": [], + "returns": "PatchV2" + }, + "commit": { + "async": true, + "params": [], + "returns": "Promise" + } + }, + "properties": { + "opCount": { + "type": "number", + "readonly": true + } + } + }, + "PatchV2": { + "kind": "interface" + }, + "PathOptions": { + "kind": "interface" + }, + "PathResult": { + "kind": "interface" + }, + "PingResult": { + "kind": "interface" + }, + "PropRemoved": { + "kind": "interface" + }, + "PropSet": { + "kind": "interface" + }, + "ProvenanceIndex": { + "kind": "class" + }, + "ProvenancePayload": { + "kind": "class" + }, + "QueryBuilder": { + "kind": "class", + "instance": { + "match": { + "params": [ + { + "name": "pattern", + "type": "string" + } + ], + "returns": "QueryBuilder" + }, + "where": { + "params": [ + { + "name": "fn", + "type": "((node: QueryNodeSnapshot) => boolean) | Record" + } + ], + "returns": "QueryBuilder" + }, + "outgoing": { + "params": [ + { + "name": "label", + "type": "string", + "optional": true + }, + { + "name": "options", + "type": "HopOptions", + "optional": true + } + ], + "returns": "QueryBuilder" + }, + "incoming": { + "params": [ + { + "name": "label", + "type": "string", + "optional": true + }, + { + "name": "options", + "type": "HopOptions", + "optional": true + } + ], + "returns": "QueryBuilder" + }, + "select": { + "params": [ + { + "name": "fields", + "type": "Array<'id' | 'props'>", + "optional": true + } + ], + "returns": "QueryBuilder" + }, + "aggregate": { + "params": [ + { + "name": "spec", + "type": "AggregateSpec" + } + ], + "returns": "QueryBuilder" + }, + "run": { + "async": true, + "params": [], + "returns": "Promise" + } + } + }, + "QueryError": { + "kind": "class" + }, + "QueryNodeSnapshot": { + "kind": "interface" + }, + "QueryResultV1": { + "kind": "interface" + }, + "RebuildOptions": { + "kind": "interface" + }, + "RepositoryHealth": { + "kind": "interface" + }, + "SchemaUnsupportedError": { + "kind": "class" + }, + "SeekCachePort": { + "kind": "abstract-class" + }, + "ShardCorruptionError": { + "kind": "class" + }, + "ShardLoadError": { + "kind": "class" + }, + "ShardValidationError": { + "kind": "class" + }, + "StateDiffResult": { + "kind": "interface" + }, + "StorageError": { + "kind": "class" + }, + "SyncAuthClientOptions": { + "kind": "interface" + }, + "SyncAuthServerOptions": { + "kind": "interface" + }, + "SyncError": { + "kind": "class" + }, + "SyncRequest": { + "kind": "interface" + }, + "SyncResponse": { + "kind": "interface" + }, + "TICK_RECEIPT_OP_TYPES": { + "kind": "const" + }, + "TICK_RECEIPT_RESULT_TYPES": { + "kind": "const" + }, + "TemporalNodeSnapshot": { + "kind": "interface" + }, + "TemporalQuery": { + "kind": "interface" + }, + "TickReceipt": { + "kind": "interface" + }, + "TickReceiptOpType": { + "kind": "type" + }, + "TickReceiptResult": { + "kind": "type" + }, + "TopologicalSortOptions": { + "kind": "interface" + }, + "TranslationCostBreakdown": { + "kind": "interface" + }, + "TranslationCostResult": { + "kind": "interface" + }, + "TraversalDirection": { + "kind": "type" + }, + "TraversalError": { + "kind": "class" + }, + "TraversalNode": { + "kind": "interface" + }, + "TraversalOptions": { + "kind": "interface" + }, + "TraversalService": { + "kind": "class", + "deprecated": true, + "alias": "CommitDagTraversalService" + }, + "TraverseFacadeOptions": { + "kind": "interface" + }, + "ValueRef": { + "kind": "type" + }, + "ValueRefBlob": { + "kind": "interface" + }, + "ValueRefInline": { + "kind": "interface" + }, + "VerifyBTROptions": { + "kind": "interface" + }, "WarpGraph": { "kind": "class", "default": true, @@ -27,85 +800,166 @@ "patch": { "async": true, "params": [ - { "name": "build", "type": "(patch: PatchBuilderV2) => void | Promise" } + { + "name": "build", + "type": "(patch: PatchBuilderV2) => void | Promise" + } ], "returns": "Promise" }, "materialize": { "async": true, "params": [ - { "name": "options", "type": "{ receipts?: boolean; ceiling?: number }", "optional": true } + { + "name": "options", + "type": "{ receipts?: boolean; ceiling?: number }", + "optional": true + } ], "returns": "Promise", "overloads": [ { - "params": [{ "name": "options", "type": "{ receipts: true; ceiling?: number }" }], + "params": [ + { + "name": "options", + "type": "{ receipts: true; ceiling?: number }" + } + ], "returns": "Promise<{ state: WarpStateV5; receipts: TickReceipt[] }>" }, { - "params": [{ "name": "options", "type": "{ receipts?: false; ceiling?: number }", "optional": true }], + "params": [ + { + "name": "options", + "type": "{ receipts?: false; ceiling?: number }", + "optional": true + } + ], "returns": "Promise" } ] }, "materializeAt": { "async": true, - "params": [{ "name": "checkpointSha", "type": "string" }], + "params": [ + { + "name": "checkpointSha", + "type": "string" + } + ], "returns": "Promise" }, "hasNode": { "async": true, - "params": [{ "name": "nodeId", "type": "string" }], + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], "returns": "Promise" }, "getNodeProps": { "async": true, - "params": [{ "name": "nodeId", "type": "string" }], + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], "returns": "Promise | null>" }, "getEdgeProps": { "async": true, "params": [ - { "name": "from", "type": "string" }, - { "name": "to", "type": "string" }, - { "name": "label", "type": "string" } + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } ], "returns": "Promise | null>" }, "getContentOid": { "async": true, - "params": [{ "name": "nodeId", "type": "string" }], + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], "returns": "Promise" }, "getContent": { "async": true, - "params": [{ "name": "nodeId", "type": "string" }], + "params": [ + { + "name": "nodeId", + "type": "string" + } + ], "returns": "Promise" }, "getEdgeContentOid": { "async": true, "params": [ - { "name": "from", "type": "string" }, - { "name": "to", "type": "string" }, - { "name": "label", "type": "string" } + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } ], "returns": "Promise" }, "getEdgeContent": { "async": true, "params": [ - { "name": "from", "type": "string" }, - { "name": "to", "type": "string" }, - { "name": "label", "type": "string" } + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + } ], "returns": "Promise" }, "neighbors": { "async": true, "params": [ - { "name": "nodeId", "type": "string" }, - { "name": "direction", "type": "'outgoing' | 'incoming' | 'both'", "optional": true }, - { "name": "edgeLabel", "type": "string", "optional": true } + { + "name": "nodeId", + "type": "string" + }, + { + "name": "direction", + "type": "'outgoing' | 'incoming' | 'both'", + "optional": true + }, + { + "name": "edgeLabel", + "type": "string", + "optional": true + } ], "returns": "Promise>" }, @@ -157,16 +1011,28 @@ "observer": { "async": true, "params": [ - { "name": "name", "type": "string" }, - { "name": "config", "type": "ObserverConfig" } + { + "name": "name", + "type": "string" + }, + { + "name": "config", + "type": "ObserverConfig" + } ], "returns": "Promise" }, "translationCost": { "async": true, "params": [ - { "name": "configA", "type": "ObserverConfig" }, - { "name": "configB", "type": "ObserverConfig" } + { + "name": "configA", + "type": "ObserverConfig" + }, + { + "name": "configB", + "type": "ObserverConfig" + } ], "returns": "Promise" }, @@ -183,8 +1049,15 @@ "syncWith": { "async": true, "params": [ - { "name": "remote", "type": "string | WarpGraph" }, - { "name": "options", "type": "SyncWithOptions", "optional": true } + { + "name": "remote", + "type": "string | WarpGraph" + }, + { + "name": "options", + "type": "SyncWithOptions", + "optional": true + } ], "returns": "Promise<{ applied: number; attempts: number; skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>; state?: WarpStateV5 }>" }, @@ -201,35 +1074,65 @@ "createWormhole": { "async": true, "params": [ - { "name": "fromSha", "type": "string" }, - { "name": "toSha", "type": "string" } + { + "name": "fromSha", + "type": "string" + }, + { + "name": "toSha", + "type": "string" + } ], "returns": "Promise" }, "patchesFor": { "async": true, - "params": [{ "name": "entityId", "type": "string" }], + "params": [ + { + "name": "entityId", + "type": "string" + } + ], "returns": "Promise" }, "materializeSlice": { "async": true, "params": [ - { "name": "nodeId", "type": "string" }, - { "name": "options", "type": "{ receipts?: boolean }", "optional": true } + { + "name": "nodeId", + "type": "string" + }, + { + "name": "options", + "type": "{ receipts?: boolean }", + "optional": true + } ], "returns": "Promise<{ state: WarpStateV5; patchCount: number; receipts?: TickReceipt[] }>" }, "getWriterPatches": { "async": true, "params": [ - { "name": "writerId", "type": "string" }, - { "name": "stopAtSha", "type": "string | null", "optional": true } + { + "name": "writerId", + "type": "string" + }, + { + "name": "stopAtSha", + "type": "string | null", + "optional": true + } ], "returns": "Promise>" }, "join": { "async": false, - "params": [{ "name": "otherState", "type": "WarpStateV5" }], + "params": [ + { + "name": "otherState", + "type": "WarpStateV5" + } + ], "returns": "{ state: WarpStateV5; receipt: JoinReceipt }" }, "subscribe": { @@ -245,7 +1148,10 @@ "watch": { "async": false, "params": [ - { "name": "pattern", "type": "string" }, + { + "name": "pattern", + "type": "string" + }, { "name": "options", "type": "{ onChange: (diff: StateDiffResult) => void; onError?: (error: Error) => void; poll?: number }" @@ -260,29 +1166,54 @@ }, "processSyncRequest": { "async": true, - "params": [{ "name": "request", "type": "SyncRequest" }], + "params": [ + { + "name": "request", + "type": "SyncRequest" + } + ], "returns": "Promise" }, "applySyncResponse": { "async": false, - "params": [{ "name": "response", "type": "SyncResponse" }], + "params": [ + { + "name": "response", + "type": "SyncResponse" + } + ], "returns": "ApplySyncResult" }, "syncNeeded": { "async": true, - "params": [{ "name": "remoteFrontier", "type": "Map" }], + "params": [ + { + "name": "remoteFrontier", + "type": "Map" + } + ], "returns": "Promise" }, "writer": { "async": true, - "params": [{ "name": "writerId", "type": "string", "optional": true }], + "params": [ + { + "name": "writerId", + "type": "string", + "optional": true + } + ], "returns": "Promise" }, "createWriter": { "async": true, "deprecated": true, "params": [ - { "name": "opts", "type": "{ persist?: 'config' | 'none'; alias?: string }", "optional": true } + { + "name": "opts", + "type": "{ persist?: 'config' | 'none'; alias?: string }", + "optional": true + } ], "returns": "Promise" }, @@ -313,255 +1244,261 @@ } }, "properties": { - "graphName": { "type": "string", "readonly": true }, - "writerId": { "type": "string", "readonly": true }, - "seekCache": { "type": "SeekCachePort | null", "readonly": true }, - "provenanceIndex": { "type": "ProvenanceIndex | null", "readonly": true }, - "persistence": { "type": "GraphPersistencePort", "readonly": true, "getter": true }, - "onDeleteWithData": { "type": "'reject' | 'cascade' | 'warn'", "readonly": true, "getter": true }, - "gcPolicy": { "type": "GCPolicyConfig", "readonly": true, "getter": true }, - "temporal": { "type": "TemporalQuery", "readonly": true, "getter": true }, - "traverse": { "type": "LogicalTraversal" }, + "graphName": { + "type": "string", + "readonly": true + }, + "writerId": { + "type": "string", + "readonly": true + }, + "seekCache": { + "type": "SeekCachePort | null", + "readonly": true + }, + "provenanceIndex": { + "type": "ProvenanceIndex | null", + "readonly": true + }, + "persistence": { + "type": "GraphPersistencePort", + "readonly": true, + "getter": true + }, + "onDeleteWithData": { + "type": "'reject' | 'cascade' | 'warn'", + "readonly": true, + "getter": true + }, + "gcPolicy": { + "type": "GCPolicyConfig", + "readonly": true, + "getter": true + }, + "temporal": { + "type": "TemporalQuery", + "readonly": true, + "getter": true + }, + "traverse": { + "type": "LogicalTraversal" + }, "setSeekCache": { - "params": [{ "name": "cache", "type": "SeekCachePort | null" }], + "params": [ + { + "name": "cache", + "type": "SeekCachePort | null" + } + ], "returns": "void" } } }, - "PatchBuilderV2": { - "kind": "class", - "note": "Not directly exported from index.js but referenced as return type of createPatch()", - "instance": { - "addNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "PatchBuilderV2" }, - "removeNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "PatchBuilderV2" }, - "addEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "PatchBuilderV2" }, - "removeEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "PatchBuilderV2" }, - "setProperty": { "params": [{ "name": "nodeId", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "PatchBuilderV2" }, - "setEdgeProperty": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "PatchBuilderV2" }, - "attachContent": { "async": true, "params": [{ "name": "nodeId", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" }, - "attachEdgeContent": { "async": true, "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" }, - "build": { "params": [], "returns": "PatchV2" }, - "commit": { "async": true, "params": [], "returns": "Promise" } - }, - "properties": { - "opCount": { "type": "number", "readonly": true } - } + "WarpGraphStatus": { + "kind": "interface" }, - "PatchSession": { - "kind": "class", - "instance": { - "addNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "this" }, - "removeNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "this" }, - "addEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "this" }, - "removeEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "this" }, - "setProperty": { "params": [{ "name": "nodeId", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "this" }, - "setEdgeProperty": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "this" }, - "attachContent": { "async": true, "params": [{ "name": "nodeId", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" }, - "attachEdgeContent": { "async": true, "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" }, - "build": { "params": [], "returns": "PatchV2" }, - "commit": { "async": true, "params": [], "returns": "Promise" } - }, - "properties": { - "opCount": { "type": "number", "readonly": true } - } + "WarpStateIndexBuilder": { + "kind": "class" + }, + "WarpStateV5": { + "kind": "interface" + }, + "WebCryptoAdapter": { + "kind": "class" + }, + "WeightedCostSelector": { + "kind": "type" + }, + "WormholeEdge": { + "kind": "interface" + }, + "WormholeError": { + "kind": "class" }, "Writer": { "kind": "class", "instance": { - "head": { "async": true, "params": [], "returns": "Promise" }, - "beginPatch": { "async": true, "params": [], "returns": "Promise" }, - "commitPatch": { "async": true, "params": [{ "name": "build", "type": "(p: PatchSession) => void | Promise" }], "returns": "Promise" } + "head": { + "async": true, + "params": [], + "returns": "Promise" + }, + "beginPatch": { + "async": true, + "params": [], + "returns": "Promise" + }, + "commitPatch": { + "async": true, + "params": [ + { + "name": "build", + "type": "(p: PatchSession) => void | Promise" + } + ], + "returns": "Promise" + } }, "properties": { - "writerId": { "type": "string", "readonly": true }, - "graphName": { "type": "string", "readonly": true } + "writerId": { + "type": "string", + "readonly": true + }, + "graphName": { + "type": "string", + "readonly": true + } } }, - "QueryBuilder": { - "kind": "class", - "instance": { - "match": { "params": [{ "name": "pattern", "type": "string" }], "returns": "QueryBuilder" }, - "where": { "params": [{ "name": "fn", "type": "((node: QueryNodeSnapshot) => boolean) | Record" }], "returns": "QueryBuilder" }, - "outgoing": { "params": [{ "name": "label", "type": "string", "optional": true }, { "name": "options", "type": "HopOptions", "optional": true }], "returns": "QueryBuilder" }, - "incoming": { "params": [{ "name": "label", "type": "string", "optional": true }, { "name": "options", "type": "HopOptions", "optional": true }], "returns": "QueryBuilder" }, - "select": { "params": [{ "name": "fields", "type": "Array<'id' | 'props'>", "optional": true }], "returns": "QueryBuilder" }, - "aggregate": { "params": [{ "name": "spec", "type": "AggregateSpec" }], "returns": "QueryBuilder" }, - "run": { "async": true, "params": [], "returns": "Promise" } - } + "WriterError": { + "kind": "class" }, - "ObserverView": { - "kind": "class", - "instance": { - "hasNode": { "async": true, "params": [{ "name": "nodeId", "type": "string" }], "returns": "Promise" }, - "getNodes": { "async": true, "params": [], "returns": "Promise" }, - "getNodeProps": { "async": true, "params": [{ "name": "nodeId", "type": "string" }], "returns": "Promise | null>" }, - "getEdges": { "async": true, "params": [], "returns": "Promise }>>" }, - "query": { "params": [], "returns": "QueryBuilder" } - }, - "properties": { - "name": { "type": "string", "readonly": true }, - "traverse": { "type": "LogicalTraversal" } - } + "buildWarpStateIndex": { + "kind": "function", + "async": true + }, + "checkAborted": { + "kind": "function", + "params": [ + { + "name": "signal", + "type": "AbortSignal", + "optional": true + }, + { + "name": "operation", + "type": "string", + "optional": true + } + ], + "returns": "void" + }, + "composeWormholes": { + "kind": "function", + "async": true + }, + "computeStateHashV5": { + "kind": "function", + "async": true + }, + "computeTranslationCost": { + "kind": "function" + }, + "createBTR": { + "kind": "function", + "async": true + }, + "createBlobValue": { + "kind": "function" + }, + "createEdgeAdd": { + "kind": "function" + }, + "createEdgeTombstone": { + "kind": "function" + }, + "createEventId": { + "kind": "function" + }, + "createInlineValue": { + "kind": "function" + }, + "createNodeAdd": { + "kind": "function" + }, + "createNodeTombstone": { + "kind": "function" + }, + "createPropSet": { + "kind": "function" + }, + "createTickReceipt": { + "kind": "function" + }, + "createTimeoutSignal": { + "kind": "function", + "params": [ + { + "name": "ms", + "type": "number" + } + ], + "returns": "AbortSignal" + }, + "createWormhole": { + "kind": "function", + "async": true + }, + "decodeEdgePropKey": { + "kind": "function", + "params": [ + { + "name": "encoded", + "type": "string" + } + ], + "returns": "{ from: string; to: string; label: string; propKey: string }" + }, + "deserializeBTR": { + "kind": "function" + }, + "deserializeWormhole": { + "kind": "function" + }, + "encodeEdgePropKey": { + "kind": "function", + "params": [ + { + "name": "from", + "type": "string" + }, + { + "name": "to", + "type": "string" + }, + { + "name": "label", + "type": "string" + }, + { + "name": "propKey", + "type": "string" + } + ], + "returns": "string" + }, + "isEdgePropKey": { + "kind": "function", + "params": [ + { + "name": "key", + "type": "string" + } + ], + "returns": "boolean" + }, + "migrateV4toV5": { + "kind": "function" + }, + "replayBTR": { + "kind": "function", + "async": true + }, + "replayWormhole": { + "kind": "function" + }, + "serializeBTR": { + "kind": "function" + }, + "serializeWormhole": { + "kind": "function" + }, + "tickReceiptCanonicalJson": { + "kind": "function" }, - "ProvenancePayload": { "kind": "class" }, - "ProvenanceIndex": { "kind": "class" }, - "GitGraphAdapter": { "kind": "class" }, - "InMemoryGraphAdapter": { "kind": "class" }, - "GraphNode": { "kind": "class" }, - "BitmapIndexBuilder": { "kind": "class" }, - "BitmapIndexReader": { "kind": "class" }, - "IndexRebuildService": { "kind": "class" }, - "HealthCheckService": { "kind": "class" }, - "BisectService": { "kind": "class" }, - "CommitDagTraversalService": { "kind": "class" }, - "GraphPersistencePort": { "kind": "abstract-class" }, - "IndexStoragePort": { "kind": "abstract-class" }, - "LoggerPort": { "kind": "abstract-class" }, - "ClockPort": { "kind": "abstract-class" }, - "SeekCachePort": { "kind": "abstract-class" }, - "NoOpLogger": { "kind": "class" }, - "ConsoleLogger": { "kind": "class" }, - "ClockAdapter": { "kind": "class" }, - "HealthStatus": { "kind": "const" }, - "LogLevel": { "kind": "const" }, - "TraversalService": { "kind": "class", "deprecated": true, "alias": "CommitDagTraversalService" }, - - "ForkError": { "kind": "class" }, - "IndexError": { "kind": "class" }, - "QueryError": { "kind": "class" }, - "SchemaUnsupportedError": { "kind": "class" }, - "ShardLoadError": { "kind": "class" }, - "ShardCorruptionError": { "kind": "class" }, - "ShardValidationError": { "kind": "class" }, - "StorageError": { "kind": "class" }, - "TraversalError": { "kind": "class" }, - "OperationAbortedError": { "kind": "class" }, - "SyncError": { "kind": "class" }, - "WormholeError": { "kind": "class" }, - - "checkAborted": { "kind": "function", "params": [{ "name": "signal", "type": "AbortSignal", "optional": true }, { "name": "operation", "type": "string", "optional": true }], "returns": "void" }, - "createTimeoutSignal": { "kind": "function", "params": [{ "name": "ms", "type": "number" }], "returns": "AbortSignal" }, - "encodeEdgePropKey": { "kind": "function", "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "propKey", "type": "string" }], "returns": "string" }, - "decodeEdgePropKey": { "kind": "function", "params": [{ "name": "encoded", "type": "string" }], "returns": "{ from: string; to: string; label: string; propKey: string }" }, - "isEdgePropKey": { "kind": "function", "params": [{ "name": "key", "type": "string" }], "returns": "boolean" }, - "CONTENT_PROPERTY_KEY": { "kind": "const" }, - "computeTranslationCost": { "kind": "function" }, - "migrateV4toV5": { "kind": "function" }, - - "createNodeAdd": { "kind": "function" }, - "createNodeTombstone": { "kind": "function" }, - "createEdgeAdd": { "kind": "function" }, - "createEdgeTombstone": { "kind": "function" }, - "createPropSet": { "kind": "function" }, - "createInlineValue": { "kind": "function" }, - "createBlobValue": { "kind": "function" }, - "createEventId": { "kind": "function" }, - - "createTickReceipt": { "kind": "function" }, - "tickReceiptCanonicalJson": { "kind": "function" }, - "TICK_RECEIPT_OP_TYPES": { "kind": "const" }, - "TICK_RECEIPT_RESULT_TYPES": { "kind": "const" }, - - "createBTR": { "kind": "function", "async": true }, - "verifyBTR": { "kind": "function", "async": true }, - "replayBTR": { "kind": "function", "async": true }, - "serializeBTR": { "kind": "function" }, - "deserializeBTR": { "kind": "function" }, - - "createWormhole": { "kind": "function", "async": true }, - "composeWormholes": { "kind": "function", "async": true }, - "replayWormhole": { "kind": "function" }, - "serializeWormhole": { "kind": "function" }, - "deserializeWormhole": { "kind": "function" }, - - "CryptoPort": { "kind": "abstract-class" }, - "NodeCryptoAdapter": { "kind": "class" }, - "WebCryptoAdapter": { "kind": "class" }, - "HttpServerPort": { "kind": "abstract-class" }, - "BunHttpAdapter": { "kind": "class" }, - "DenoHttpAdapter": { "kind": "class" }, - "WarpStateIndexBuilder": { "kind": "class" }, - "PatchError": { "kind": "class" }, - "WriterError": { "kind": "class" }, - - "buildWarpStateIndex": { "kind": "function", "async": true }, - "computeStateHashV5": { "kind": "function", "async": true }, - - "PingResult": { "kind": "interface" }, - "RepositoryHealth": { "kind": "interface" }, - "IndexHealth": { "kind": "interface" }, - "HealthResult": { "kind": "interface" }, - "CreateNodeOptions": { "kind": "interface" }, - "BulkNodeSpec": { "kind": "interface" }, - "ListNodesOptions": { "kind": "interface" }, - "IterateNodesOptions": { "kind": "interface" }, - "RebuildOptions": { "kind": "interface" }, - "LoadOptions": { "kind": "interface" }, - "TraversalNode": { "kind": "interface" }, - "PathResult": { "kind": "interface" }, - "QueryNodeSnapshot": { "kind": "interface" }, - "QueryResultV1": { "kind": "interface" }, - "AggregateSpec": { "kind": "interface" }, - "AggregateResult": { "kind": "interface" }, - "HopOptions": { "kind": "interface" }, - "TraverseFacadeOptions": { "kind": "interface" }, - "LogicalTraversal": { "kind": "interface" }, - "TraversalOptions": { "kind": "interface" }, - "AncestorOptions": { "kind": "interface" }, - "PathOptions": { "kind": "interface" }, - "CommonAncestorsOptions": { "kind": "interface" }, - "TopologicalSortOptions": { "kind": "interface" }, - "NodeInfo": { "kind": "interface" }, - "GitPlumbing": { "kind": "interface" }, - "ObserverConfig": { "kind": "interface" }, - "TranslationCostBreakdown": { "kind": "interface" }, - "TranslationCostResult": { "kind": "interface" }, - "EdgeChange": { "kind": "interface" }, - "PropSet": { "kind": "interface" }, - "PropRemoved": { "kind": "interface" }, - "StateDiffResult": { "kind": "interface" }, - "TemporalNodeSnapshot": { "kind": "interface" }, - "TemporalQuery": { "kind": "interface" }, - "PatchV2": { "kind": "interface" }, - "GCPolicyConfig": { "kind": "interface" }, - "GCExecuteResult": { "kind": "interface" }, - "GCMetrics": { "kind": "interface" }, - "MaybeGCResult": { "kind": "interface" }, - "SyncRequest": { "kind": "interface" }, - "SyncResponse": { "kind": "interface" }, - "ApplySyncResult": { "kind": "interface" }, - "SyncAuthServerOptions": { "kind": "interface" }, - "SyncAuthClientOptions": { "kind": "interface" }, - "WarpGraphStatus": { "kind": "interface" }, - "JoinReceipt": { "kind": "interface" }, - "OpOutcome": { "kind": "interface" }, - "TickReceipt": { "kind": "interface" }, - "OpNodeAdd": { "kind": "interface" }, - "OpNodeTombstone": { "kind": "interface" }, - "OpEdgeAdd": { "kind": "interface" }, - "OpEdgeTombstone": { "kind": "interface" }, - "OpPropSet": { "kind": "interface" }, - "ValueRefInline": { "kind": "interface" }, - "ValueRefBlob": { "kind": "interface" }, - "EventId": { "kind": "interface" }, - "PatchEntry": { "kind": "interface" }, - "WarpStateV5": { "kind": "interface" }, - "BTR": { "kind": "interface" }, - "BisectResult": { "kind": "type" }, - "BTRVerificationResult": { "kind": "interface" }, - "CreateBTROptions": { "kind": "interface" }, - "VerifyBTROptions": { "kind": "interface" }, - "WormholeEdge": { "kind": "interface" }, - "CreateWormholeOptions": { "kind": "interface" }, - "ComposeWormholesOptions": { "kind": "interface" }, - - "TraversalDirection": { "kind": "type" }, - "EdgeWeightFn": { "kind": "type" }, - "NodeWeightFn": { "kind": "type" }, - "WeightedCostSelector": { "kind": "type" }, - "LogLevelValue": { "kind": "type" }, - "TickReceiptOpType": { "kind": "type" }, - "TickReceiptResult": { "kind": "type" }, - "ValueRef": { "kind": "type" } + "verifyBTR": { + "kind": "function", + "async": true + } } } diff --git a/eslint.config.js b/eslint.config.js index 0521617e..1c400569 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -49,6 +49,7 @@ export default tseslint.config( AbortSignal: "readonly", performance: "readonly", global: "readonly", + WebSocket: "readonly", }, }, plugins: { @@ -253,6 +254,8 @@ export default tseslint.config( "src/domain/services/LogicalBitmapIndexBuilder.js", "src/domain/services/LogicalIndexBuildService.js", "src/domain/services/IncrementalIndexUpdater.js", + "src/domain/services/WormholeService.js", + "src/domain/services/WarpServeService.js", ], rules: { "complexity": ["error", 35], @@ -270,6 +273,28 @@ export default tseslint.config( }, }, + // ── Domain purity: ban Buffer — use Uint8Array + helpers from domain/utils/bytes.js ── + { + files: ["src/domain/**/*.js"], + rules: { + "no-restricted-globals": ["error", + { "name": "Buffer", "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters." }, + ], + "no-restricted-imports": ["error", { + "paths": [ + { + "name": "node:buffer", + "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters.", + }, + { + "name": "buffer", + "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters.", + }, + ], + }], + }, + }, + // ── Domain purity: ban Date.now(), new Date(), and Date() — use ClockPort ── { files: ["src/domain/**/*.js"], @@ -323,6 +348,12 @@ export default tseslint.config( TextEncoder: "readonly", performance: "readonly", global: "readonly", + Headers: "readonly", + ReadableStream: "readonly", + Request: "readonly", + Response: "readonly", + WebSocket: "readonly", + queueMicrotask: "readonly", describe: "readonly", it: "readonly", expect: "readonly", diff --git a/index.d.ts b/index.d.ts index 8ee27844..969d0d36 100644 --- a/index.d.ts +++ b/index.d.ts @@ -452,11 +452,11 @@ export abstract class GraphPersistencePort { */ export abstract class IndexStoragePort { /** Writes a blob and returns its OID */ - abstract writeBlob(content: Buffer | string): Promise; + abstract writeBlob(content: Uint8Array | string): Promise; /** Writes a tree from entries and returns its OID */ abstract writeTree(entries: string[]): Promise; /** Reads a blob by OID */ - abstract readBlob(oid: string): Promise; + abstract readBlob(oid: string): Promise; /** Reads a tree and returns a map of path to blob OID */ abstract readTreeOids(treeOid: string): Promise>; /** Updates a ref to point to an OID */ @@ -484,11 +484,11 @@ export type LogLevelValue = 0 | 1 | 2 | 3 | 4; */ export abstract class CryptoPort { /** Computes a hash digest of the given data */ - abstract hash(algorithm: string, data: string | Buffer | Uint8Array): Promise; + abstract hash(algorithm: string, data: string | Uint8Array): Promise; /** Computes an HMAC of the given data */ - abstract hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise; - /** Constant-time comparison of two buffers */ - abstract timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean; + abstract hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise; + /** Constant-time comparison of two byte arrays */ + abstract timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean; } /** @@ -528,9 +528,9 @@ export class ClockAdapter extends ClockPort { */ export abstract class SeekCachePort { /** Retrieves a cached state buffer by key, or null on miss. */ - abstract get(key: string): Promise<{ buffer: Buffer | Uint8Array; indexTreeOid?: string } | null>; + abstract get(key: string): Promise<{ buffer: Uint8Array; indexTreeOid?: string } | null>; /** Stores a state buffer under the given key. */ - abstract set(key: string, buffer: Buffer | Uint8Array, options?: { indexTreeOid?: string }): Promise; + abstract set(key: string, buffer: Uint8Array, options?: { indexTreeOid?: string }): Promise; /** Checks whether a key exists in the cache index. */ abstract has(key: string): Promise; /** Lists all keys currently in the cache index. */ @@ -541,6 +541,18 @@ export abstract class SeekCachePort { abstract clear(): Promise; } +/** + * Port interface for content blob storage operations. + * Abstracts how large binary content is stored and retrieved. + * @abstract + */ +export abstract class BlobStoragePort { + /** Stores content and returns a storage identifier (e.g. CAS tree OID). */ + abstract store(content: Uint8Array | string, options?: { slug?: string }): Promise; + /** Retrieves content by its storage identifier. */ + abstract retrieve(oid: string): Promise; +} + /** * Port interface for structured logging operations. * @abstract @@ -596,8 +608,8 @@ export class ConsoleLogger extends LoggerPort { */ export interface GitPlumbing { readonly emptyTree: string; - execute(options: { args: string[]; input?: string | Buffer }): Promise; - executeStream(options: { args: string[] }): Promise & { collect(opts?: { asString?: boolean }): Promise }>; + execute(options: { args: string[]; input?: string | Uint8Array }): Promise; + executeStream(options: { args: string[] }): Promise & { collect(opts?: { asString?: boolean }): Promise }>; } /** @@ -607,7 +619,11 @@ export interface GitPlumbing { * but stores all data in Maps — no real Git I/O required. */ export class InMemoryGraphAdapter extends GraphPersistencePort { - constructor(); + constructor(options?: { + author?: string; + clock?: { now: () => number }; + hash?: (data: Uint8Array) => string; + }); get emptyTree(): string; commitNode(options: CreateNodeOptions): Promise; @@ -631,11 +647,11 @@ export class GitGraphAdapter extends GraphPersistencePort implements IndexStorag getNodeInfo(sha: string): Promise; logNodesStream(options: ListNodesOptions & { format: string }): Promise>; logNodes(options: ListNodesOptions & { format: string }): Promise; - writeBlob(content: Buffer | string): Promise; + writeBlob(content: Uint8Array | string): Promise; writeTree(entries: string[]): Promise; - readTree(treeOid: string): Promise>; + readTree(treeOid: string): Promise>; readTreeOids(treeOid: string): Promise>; - readBlob(oid: string): Promise; + readBlob(oid: string): Promise; updateRef(ref: string, oid: string): Promise; readRef(ref: string): Promise; deleteRef(ref: string): Promise; @@ -657,9 +673,9 @@ export class GitGraphAdapter extends GraphPersistencePort implements IndexStorag */ export class NodeCryptoAdapter extends CryptoPort { constructor(); - hash(algorithm: string, data: string | Buffer | Uint8Array): Promise; - hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise; - timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean; + hash(algorithm: string, data: string | Uint8Array): Promise; + hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise; + timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean; } /** @@ -670,9 +686,9 @@ export class NodeCryptoAdapter extends CryptoPort { */ export class WebCryptoAdapter extends CryptoPort { constructor(options?: { subtle?: SubtleCrypto }); - hash(algorithm: string, data: string | Buffer | Uint8Array): Promise; - hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise; - timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean; + hash(algorithm: string, data: string | Uint8Array): Promise; + hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise; + timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean; } /** @@ -684,7 +700,7 @@ export abstract class HttpServerPort { method: string; url: string; headers: Record; - body?: Buffer | Uint8Array; + body?: Uint8Array; }) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): { listen(port: number, callback?: (err?: Error | null) => void): void; listen(port: number, host: string, callback?: (err?: Error | null) => void): void; @@ -704,7 +720,7 @@ export class BunHttpAdapter extends HttpServerPort { method: string; url: string; headers: Record; - body?: Buffer | Uint8Array; + body?: Uint8Array; }) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): { listen(port: number, callback?: (err?: Error | null) => void): void; listen(port: number, host: string, callback?: (err?: Error | null) => void): void; @@ -724,7 +740,7 @@ export class DenoHttpAdapter extends HttpServerPort { method: string; url: string; headers: Record; - body?: Buffer | Uint8Array; + body?: Uint8Array; }) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): { listen(port: number, callback?: (err?: Error | null) => void): void; listen(port: number, host: string, callback?: (err?: Error | null) => void): void; @@ -753,7 +769,7 @@ export class BitmapIndexBuilder { addEdge(srcSha: string, tgtSha: string): void; /** Serializes the index to a tree structure of buffers */ - serialize(options?: { frontier?: Map }): Promise>; + serialize(options?: { frontier?: Map }): Promise>; } /** @@ -773,7 +789,7 @@ export class WarpStateIndexBuilder { /** * Serializes the index to a tree structure of buffers. */ - serialize(): Promise>; + serialize(): Promise>; } /** @@ -782,7 +798,7 @@ export class WarpStateIndexBuilder { * Convenience function that creates a WarpStateIndexBuilder, builds from state, * and returns the serialized tree and stats. */ -export function buildWarpStateIndex(state: WarpStateV5, options?: { crypto?: CryptoPort }): Promise<{ tree: Record; stats: { nodes: number; edges: number } }>; +export function buildWarpStateIndex(state: WarpStateV5, options?: { crypto?: CryptoPort }): Promise<{ tree: Record; stats: { nodes: number; edges: number } }>; /** * Computes a deterministic hash of a WarpStateV5 state. @@ -1438,9 +1454,9 @@ export class PatchBuilderV2 { /** Sets a property on an edge. */ setEdgeProperty(from: string, to: string, label: string, key: string, value: unknown): PatchBuilderV2; /** Attaches content to a node (writes blob + sets _content property). */ - attachContent(nodeId: string, content: Buffer | string): Promise; + attachContent(nodeId: string, content: Uint8Array | string): Promise; /** Attaches content to an edge (writes blob + sets _content edge property). */ - attachEdgeContent(from: string, to: string, label: string, content: Buffer | string): Promise; + attachEdgeContent(from: string, to: string, label: string, content: Uint8Array | string): Promise; /** Builds the PatchV2 object without committing. */ build(): PatchV2; /** Commits the patch to the graph and returns the commit SHA. */ @@ -1472,9 +1488,9 @@ export class PatchSession { /** Sets a property on an edge. */ setEdgeProperty(from: string, to: string, label: string, key: string, value: unknown): this; /** Attaches content to a node (writes blob + sets _content property). */ - attachContent(nodeId: string, content: Buffer | string): Promise; + attachContent(nodeId: string, content: Uint8Array | string): Promise; /** Attaches content to an edge (writes blob + sets _content edge property). */ - attachEdgeContent(from: string, to: string, label: string, content: Buffer | string): Promise; + attachEdgeContent(from: string, to: string, label: string, content: Uint8Array | string): Promise; /** Builds the PatchV2 object without committing. */ build(): PatchV2; /** Commits the patch with CAS protection. */ @@ -1513,6 +1529,18 @@ export class WriterError extends Error { constructor(code: string, message: string, cause?: Error); } +/** + * Error thrown when a patch requires decryption but no patchBlobStorage + * (with encryption key) is configured. + */ +export class EncryptionError extends Error { + readonly name: 'EncryptionError'; + readonly code: string; + readonly context: Record; + + constructor(message: string, options?: { context?: Record }); +} + // ============================================================================ // GC Types // ============================================================================ @@ -1672,6 +1700,10 @@ export default class WarpGraph { crypto?: CryptoPort; codec?: unknown; seekCache?: SeekCachePort; + /** Content blob storage (for attachContent/attachEdgeContent). */ + blobStorage?: BlobStoragePort; + /** Patch blob storage — when set, patch CBOR is encrypted via this port. */ + patchBlobStorage?: BlobStoragePort; }): Promise; /** @@ -1763,9 +1795,9 @@ export default class WarpGraph { /** * Gets the content blob for a node, or null if none is attached. - * Returns raw Buffer; call `.toString('utf8')` for text. + * Returns raw bytes; use `new TextDecoder().decode(result)` for text. */ - getContent(nodeId: string): Promise; + getContent(nodeId: string): Promise; /** * Gets the content blob OID for an edge, or null if none is attached. @@ -1774,9 +1806,9 @@ export default class WarpGraph { /** * Gets the content blob for an edge, or null if none is attached. - * Returns raw Buffer; call `.toString('utf8')` for text. + * Returns raw bytes; use `new TextDecoder().decode(result)` for text. */ - getEdgeContent(from: string, to: string, label: string): Promise; + getEdgeContent(from: string, to: string, label: string): Promise; /** * Checks if a node exists in the materialized state. @@ -2099,14 +2131,14 @@ export class ProvenanceIndex { /** * Serializes the index to CBOR format for checkpoint storage. */ - serialize(): Buffer; + serialize(): Uint8Array; /** * Deserializes an index from CBOR format. * * @throws Error if the buffer contains an unsupported version */ - static deserialize(buffer: Buffer): ProvenanceIndex; + static deserialize(buffer: Uint8Array): ProvenanceIndex; /** * Returns a JSON-serializable representation of this index. @@ -2650,3 +2682,68 @@ export function deserializeWormhole(json: { patchCount: number; payload: PatchEntry[]; }): WormholeEdge; + +// ── WebSocket Server Port ──────────────────────────────────────────── + +/** + * A single WebSocket connection. + */ +export interface WsConnection { + /** Send a text message to the client. */ + send(message: string): void; + /** Register a handler for incoming messages. */ + onMessage(handler: (message: string) => void): void; + /** Register a handler for connection close events. */ + onClose(handler: (code?: number, reason?: string) => void): void; + /** Close the connection. */ + close(): void; +} + +/** + * Handle returned by WebSocketServerPort.createServer(). + */ +export interface WsServerHandle { + /** Start listening on the given port and optional host. */ + listen(port: number, host?: string): Promise<{ port: number; host: string }>; + /** Shut down the server. */ + close(): Promise; +} + +/** + * Port for WebSocket server creation. + * + * Abstracts platform-specific WebSocket server APIs (Node ws, Bun.serve, + * Deno.upgradeWebSocket) so domain code doesn't depend on any runtime. + */ +export class WebSocketServerPort { + /** Creates a WebSocket server. */ + createServer( + onConnection: (connection: WsConnection) => void + ): WsServerHandle; +} + +// ── WarpServeService ───────────────────────────────────────────────── + +/** + * Domain service that bridges WarpGraph instances to browser clients + * over a WebSocketServerPort. + */ +export class WarpServeService { + constructor(options: { + wsPort: WebSocketServerPort; + graphs: Array<{ + graphName: string; + materialize: Function; + subscribe: Function; + getNodeProps: Function; + createPatch: Function; + query: Function; + }>; + }); + + /** Start listening for WebSocket connections. */ + listen(port: number, host?: string): Promise<{ port: number; host: string }>; + + /** Shut down the server and clean up subscriptions. */ + close(): Promise; +} diff --git a/index.js b/index.js index 4483b0ee..2d92f571 100644 --- a/index.js +++ b/index.js @@ -38,9 +38,11 @@ import NoOpLogger from './src/infrastructure/adapters/NoOpLogger.js'; import ConsoleLogger, { LogLevel } from './src/infrastructure/adapters/ConsoleLogger.js'; import ClockAdapter from './src/infrastructure/adapters/ClockAdapter.js'; import { + EncryptionError, ForkError, IndexError, QueryError, + PatchError, SchemaUnsupportedError, ShardLoadError, ShardCorruptionError, @@ -51,6 +53,14 @@ import { SyncError, WormholeError, } from './src/domain/errors/index.js'; +import WriterError from './src/domain/errors/WriterError.js'; +import BlobStoragePort from './src/ports/BlobStoragePort.js'; +import CryptoPort from './src/ports/CryptoPort.js'; +import HttpServerPort from './src/ports/HttpServerPort.js'; +import NodeCryptoAdapter from './src/infrastructure/adapters/NodeCryptoAdapter.js'; +import WebCryptoAdapter from './src/infrastructure/adapters/WebCryptoAdapter.js'; +import BunHttpAdapter from './src/infrastructure/adapters/BunHttpAdapter.js'; +import DenoHttpAdapter from './src/infrastructure/adapters/DenoHttpAdapter.js'; import { checkAborted, createTimeoutSignal } from './src/domain/utils/cancellation.js'; // Multi-writer graph support (WARP) @@ -104,6 +114,12 @@ import { } from './src/domain/services/WormholeService.js'; import BisectService from './src/domain/services/BisectService.js'; +import { PatchBuilderV2 } from './src/domain/services/PatchBuilderV2.js'; +import { PatchSession } from './src/domain/warp/PatchSession.js'; +import { Writer } from './src/domain/warp/Writer.js'; +import { ProvenanceIndex } from './src/domain/services/ProvenanceIndex.js'; +import WarpStateIndexBuilder, { buildWarpStateIndex } from './src/domain/services/WarpStateIndexBuilder.js'; +import { computeStateHashV5 } from './src/domain/services/StateSerializerV5.js'; const TraversalService = CommitDagTraversalService; @@ -135,7 +151,22 @@ export { SeekCachePort, ClockAdapter, + // Port contracts + BlobStoragePort, + CryptoPort, + HttpServerPort, + + // Crypto adapters + NodeCryptoAdapter, + WebCryptoAdapter, + + // HTTP adapters + BunHttpAdapter, + DenoHttpAdapter, + // Error types for integrity failure handling + EncryptionError, + PatchError, ForkError, IndexError, QueryError, @@ -148,6 +179,7 @@ export { OperationAbortedError, SyncError, WormholeError, + WriterError, // Cancellation utilities checkAborted, @@ -157,6 +189,10 @@ export { WarpGraph, QueryBuilder, ObserverView, + PatchBuilderV2, + PatchSession, + Writer, + ProvenanceIndex, computeTranslationCost, // WARP type creators @@ -175,6 +211,11 @@ export { isEdgePropKey, CONTENT_PROPERTY_KEY, + // State indexing & hashing + WarpStateIndexBuilder, + buildWarpStateIndex, + computeStateHashV5, + // WARP migration migrateV4toV5, diff --git a/jsr.json b/jsr.json index 812529a9..4cd74de0 100644 --- a/jsr.json +++ b/jsr.json @@ -1,18 +1,23 @@ { "name": "@git-stunts/git-warp", - "version": "13.1.0", + "version": "14.0.0", "imports": { "roaring": "npm:roaring@^2.7.0" }, "exports": { ".": "./index.js", "./node": "./src/domain/entities/GraphNode.js", - "./visualization": "./src/visualization/index.js" + "./visualization": "./src/visualization/index.js", + "./browser": "./browser.js", + "./sha1sync": "./src/infrastructure/adapters/sha1sync.js" }, "publish": { "include": [ "index.js", "index.d.ts", + "browser.js", + "browser.d.ts", + "sha1sync.d.ts", "src/**/*.js", "src/**/*.d.ts", "README.md", diff --git a/package-lock.json b/package-lock.json index 69c7d148..c3df96f7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,16 +1,16 @@ { "name": "@git-stunts/git-warp", - "version": "13.1.0", + "version": "14.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@git-stunts/git-warp", - "version": "13.1.0", + "version": "14.0.0", "license": "Apache-2.0", "dependencies": { "@git-stunts/alfred": "^0.4.0", - "@git-stunts/git-cas": "^3.0.0", + "@git-stunts/git-cas": "^5.2.4", "@git-stunts/plumbing": "^2.8.0", "@git-stunts/trailer-codec": "^2.1.1", "boxen": "^7.1.1", @@ -23,6 +23,7 @@ "roaring-wasm": "^1.1.0", "string-width": "^7.1.0", "wrap-ansi": "^9.0.0", + "ws": "^8.19.0", "zod": "3.24.1" }, "bin": { @@ -33,6 +34,7 @@ "@eslint/js": "^9.17.0", "@git-stunts/docker-guard": "^0.1.0", "@types/node": "^22.15.29", + "@types/ws": "^8.18.1", "@typescript-eslint/eslint-plugin": "^8.54.0", "@typescript-eslint/parser": "^8.54.0", "eslint": "^9.17.0", @@ -721,6 +723,40 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@flyingrobots/bijou": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@flyingrobots/bijou/-/bijou-0.2.0.tgz", + "integrity": "sha512-Oix2Kqq4w87KCkyK2W+8u4E4aGVQiraUy8BF3Bk/NRtT+UlUI0ETs+E7GwpwOyOvHvt0cIOjcMmVPxzKa52P4A==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@flyingrobots/bijou-node": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@flyingrobots/bijou-node/-/bijou-node-0.2.0.tgz", + "integrity": "sha512-QaIaoBF0OMRHGtLsga1knplfFEmAeC6Lt4SxWkCKIJahMdNqXatCWM3RdzXcbjfcXqRIXyeEpm1agmmwi4gneQ==", + "license": "MIT", + "dependencies": { + "@flyingrobots/bijou": "0.2.0", + "chalk": "^5.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@flyingrobots/bijou-tui": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@flyingrobots/bijou-tui/-/bijou-tui-0.2.0.tgz", + "integrity": "sha512-pXEo/Am6svRIKvez7926avdGUbfVndlSOpidBPc42YjCQHU5ZQrEuJpjI7niJb63N0ruxu0VXHci8N0wzBYSow==", + "license": "MIT", + "dependencies": { + "@flyingrobots/bijou": "0.2.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@git-stunts/alfred": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/@git-stunts/alfred/-/alfred-0.4.0.tgz", @@ -738,11 +774,14 @@ "license": "Apache-2.0" }, "node_modules/@git-stunts/git-cas": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@git-stunts/git-cas/-/git-cas-3.0.0.tgz", - "integrity": "sha512-5uqIsTukE+8f1h317ZmGneYpTJ1ecBxg16QJxvF3kNrfQR3/DcAH4fQyMRkCIQtSHEz2p6UpOwpM10R9dEQm/w==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/@git-stunts/git-cas/-/git-cas-5.2.4.tgz", + "integrity": "sha512-8jxOLbeGOsJmCLLxybQ3KRvnCzxhHnrbicBAZOlSgIQBVe0VO5RDZgNDcw/Fk0zIOf9TOb1F8YuArwDVc/jM/A==", "license": "Apache-2.0", "dependencies": { + "@flyingrobots/bijou": "^0.2.0", + "@flyingrobots/bijou-node": "^0.2.0", + "@flyingrobots/bijou-tui": "^0.2.0", "@git-stunts/alfred": "^0.10.0", "@git-stunts/plumbing": "^2.8.0", "cbor-x": "^1.6.0", @@ -1321,6 +1360,16 @@ "undici-types": "~6.21.0" } }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.54.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz", @@ -4472,9 +4521,9 @@ } }, "node_modules/tar": { - "version": "7.5.9", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.9.tgz", - "integrity": "sha512-BTLcK0xsDh2+PUe9F6c2TlRp4zOOBMTkoQHQIWSIzI0R7KG46uEwq4OPk2W7bZcprBMsuaeFsqwYr7pjh6CuHg==", + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz", + "integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==", "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -4956,6 +5005,27 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/ws": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", diff --git a/package.json b/package.json index 938d6af9..418d7048 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@git-stunts/git-warp", - "version": "13.1.0", + "version": "14.0.0", "description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.", "type": "module", "license": "Apache-2.0", @@ -38,6 +38,16 @@ "import": "./src/visualization/index.js", "default": "./src/visualization/index.js" }, + "./browser": { + "types": "./browser.d.ts", + "import": "./browser.js", + "default": "./browser.js" + }, + "./sha1sync": { + "types": "./sha1sync.d.ts", + "import": "./src/infrastructure/adapters/sha1sync.js", + "default": "./src/infrastructure/adapters/sha1sync.js" + }, "./package.json": "./package.json" }, "files": [ @@ -46,7 +56,10 @@ "bin/cli", "bin/git-warp", "src", + "browser.js", "index.js", + "browser.d.ts", + "sha1sync.d.ts", "index.d.ts", "README.md", "LICENSE", @@ -96,7 +109,7 @@ }, "dependencies": { "@git-stunts/alfred": "^0.4.0", - "@git-stunts/git-cas": "^3.0.0", + "@git-stunts/git-cas": "^5.2.4", "@git-stunts/plumbing": "^2.8.0", "@git-stunts/trailer-codec": "^2.1.1", "boxen": "^7.1.1", @@ -109,12 +122,14 @@ "roaring-wasm": "^1.1.0", "string-width": "^7.1.0", "wrap-ansi": "^9.0.0", + "ws": "^8.19.0", "zod": "3.24.1" }, "devDependencies": { "@eslint/js": "^9.17.0", "@git-stunts/docker-guard": "^0.1.0", "@types/node": "^22.15.29", + "@types/ws": "^8.18.1", "@typescript-eslint/eslint-plugin": "^8.54.0", "@typescript-eslint/parser": "^8.54.0", "eslint": "^9.17.0", diff --git a/sha1sync.d.ts b/sha1sync.d.ts new file mode 100644 index 00000000..02d287ef --- /dev/null +++ b/sha1sync.d.ts @@ -0,0 +1,14 @@ +/** + * Synchronous SHA-1 for browser use with InMemoryGraphAdapter. + * + * NOT used for security — only for Git content addressing. + */ + +/** + * Computes a SHA-1 hash of the given data, returning a 40-character + * lowercase hex string. + * + * @param data - The data to hash + * @returns 40-character lowercase hex SHA-1 digest + */ +export function sha1sync(data: Uint8Array): string; diff --git a/src/domain/WarpGraph.js b/src/domain/WarpGraph.js index 1f978bd1..92930883 100644 --- a/src/domain/WarpGraph.js +++ b/src/domain/WarpGraph.js @@ -48,9 +48,9 @@ const DEFAULT_ADJACENCY_CACHE_SIZE = 3; export default class WarpGraph { /** * @private - * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean }} options + * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean, blobStorage?: import('../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../ports/BlobStoragePort.js').default }} options */ - constructor({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize = DEFAULT_ADJACENCY_CACHE_SIZE, checkpointPolicy, autoMaterialize = true, onDeleteWithData = 'warn', logger, clock, crypto, codec, seekCache, audit = false }) { + constructor({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize = DEFAULT_ADJACENCY_CACHE_SIZE, checkpointPolicy, autoMaterialize = true, onDeleteWithData = 'warn', logger, clock, crypto, codec, seekCache, audit = false, blobStorage, patchBlobStorage }) { /** @type {CorePersistence} */ this._persistence = /** @type {CorePersistence} */ (persistence); @@ -144,6 +144,12 @@ export default class WarpGraph { /** @type {import('../ports/SeekCachePort.js').default|null} */ this._seekCache = seekCache || null; + /** @type {import('../ports/BlobStoragePort.js').default|null} */ + this._blobStorage = blobStorage || null; + + /** @type {import('../ports/BlobStoragePort.js').default|null} */ + this._patchBlobStorage = patchBlobStorage || null; + /** @type {boolean} */ this._patchInProgress = false; @@ -241,7 +247,7 @@ export default class WarpGraph { /** * Opens a multi-writer graph. * - * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean }} options + * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean, blobStorage?: import('../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../ports/BlobStoragePort.js').default }} options * @returns {Promise} The opened graph instance * @throws {Error} If graphName, writerId, checkpointPolicy, or onDeleteWithData is invalid * @@ -252,7 +258,7 @@ export default class WarpGraph { * writerId: 'node-1' * }); */ - static async open({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit }) { + static async open({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit, blobStorage, patchBlobStorage }) { // Validate inputs validateGraphName(graphName); validateWriterId(writerId); @@ -289,7 +295,7 @@ export default class WarpGraph { } } - const graph = new WarpGraph({ persistence, graphName, writerId, gcPolicy, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit }); + const graph = new WarpGraph({ persistence, graphName, writerId, gcPolicy, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit, blobStorage, patchBlobStorage }); // Validate migration boundary await graph._validateMigrationBoundary(); diff --git a/src/domain/errors/EncryptionError.js b/src/domain/errors/EncryptionError.js new file mode 100644 index 00000000..0fb981b7 --- /dev/null +++ b/src/domain/errors/EncryptionError.js @@ -0,0 +1,24 @@ +import WarpError from './WarpError.js'; + +/** + * Error thrown when a patch requires decryption but no patchBlobStorage + * (with encryption key) is configured. + * + * ## Error Codes + * + * | Code | Description | + * |------|-------------| + * | `E_ENCRYPTED_PATCH` | Patch is encrypted but no decryption key is available | + * + * @class EncryptionError + * @extends WarpError + */ +export default class EncryptionError extends WarpError { + /** + * @param {string} message + * @param {{ context?: Record }} [options={}] + */ + constructor(message, options = {}) { + super(message, 'E_ENCRYPTED_PATCH', options); + } +} diff --git a/src/domain/errors/index.js b/src/domain/errors/index.js index 91a63cce..32c0a44b 100644 --- a/src/domain/errors/index.js +++ b/src/domain/errors/index.js @@ -5,6 +5,7 @@ */ export { default as EmptyMessageError } from './EmptyMessageError.js'; +export { default as EncryptionError } from './EncryptionError.js'; export { default as PersistenceError } from './PersistenceError.js'; export { default as WarpError } from './WarpError.js'; export { default as ForkError } from './ForkError.js'; diff --git a/src/domain/services/AuditReceiptService.js b/src/domain/services/AuditReceiptService.js index c6c1a073..3c004b48 100644 --- a/src/domain/services/AuditReceiptService.js +++ b/src/domain/services/AuditReceiptService.js @@ -351,7 +351,7 @@ export class AuditReceiptService { // Write blob let blobOid; try { - blobOid = await this._persistence.writeBlob(Buffer.from(cborBytes)); + blobOid = await this._persistence.writeBlob(cborBytes); } catch (err) { this._logger?.warn('[warp:audit]', { code: 'AUDIT_WRITE_BLOB_FAILED', diff --git a/src/domain/services/BitmapIndexBuilder.js b/src/domain/services/BitmapIndexBuilder.js index f6dc57ff..62e25fe5 100644 --- a/src/domain/services/BitmapIndexBuilder.js +++ b/src/domain/services/BitmapIndexBuilder.js @@ -4,6 +4,7 @@ import { computeChecksum } from '../utils/checksumUtils.js'; import { getRoaringBitmap32, getNativeRoaringAvailable } from '../utils/roaring.js'; import { canonicalStringify } from '../utils/canonicalStringify.js'; import { SHARD_VERSION } from '../utils/shardVersion.js'; +import { textEncode, base64Encode } from '../utils/bytes.js'; // Re-export for backwards compatibility export { SHARD_VERSION }; @@ -42,7 +43,7 @@ const wrapShard = async (data, crypto) => ({ /** * Serializes a frontier Map into CBOR and JSON blobs in the given tree. * @param {Map} frontier - Writer→tip SHA map - * @param {Record} tree - Target tree to add entries to + * @param {Record} tree - Target tree to add entries to * @param {import('../../ports/CodecPort.js').default} codec - Codec for CBOR serialization */ function serializeFrontierToTree(frontier, tree, codec) { @@ -52,8 +53,8 @@ function serializeFrontierToTree(frontier, tree, codec) { sorted[key] = frontier.get(key); } const envelope = { version: 1, writerCount: frontier.size, frontier: sorted }; - tree['frontier.cbor'] = Buffer.from(codec.encode(envelope)); - tree['frontier.json'] = Buffer.from(canonicalStringify(envelope)); + tree['frontier.cbor'] = codec.encode(envelope); + tree['frontier.json'] = textEncode(canonicalStringify(envelope)); } /** @@ -135,10 +136,10 @@ export default class BitmapIndexBuilder { * Each shard is wrapped in a version/checksum envelope for integrity verification. * * @param {{ frontier?: Map }} [options] - Serialization options - * @returns {Promise>} Map of path → serialized content + * @returns {Promise>} Map of path → serialized content */ async serialize({ frontier } = {}) { - /** @type {Record} */ + /** @type {Record} */ const tree = {}; // Serialize ID mappings (sharded by prefix) @@ -152,7 +153,7 @@ export default class BitmapIndexBuilder { idShards[prefix][sha] = id; } for (const [prefix, map] of Object.entries(idShards)) { - tree[`meta_${prefix}.json`] = Buffer.from(JSON.stringify(await wrapShard(map, this._crypto))); + tree[`meta_${prefix}.json`] = textEncode(JSON.stringify(await wrapShard(map, this._crypto))); } // Serialize bitmaps (sharded by prefix, per-node within shard) @@ -167,12 +168,12 @@ export default class BitmapIndexBuilder { bitmapShards[type][prefix] = {}; } // Encode bitmap as base64 for JSON storage - bitmapShards[type][prefix][sha] = Buffer.from(bitmap.serialize(true)).toString('base64'); + bitmapShards[type][prefix][sha] = base64Encode(new Uint8Array(bitmap.serialize(true))); } for (const type of ['fwd', 'rev']) { for (const [prefix, shardData] of Object.entries(bitmapShards[type])) { - tree[`shards_${type}_${prefix}.json`] = Buffer.from(JSON.stringify(await wrapShard(shardData, this._crypto))); + tree[`shards_${type}_${prefix}.json`] = textEncode(JSON.stringify(await wrapShard(shardData, this._crypto))); } } diff --git a/src/domain/services/BitmapIndexReader.js b/src/domain/services/BitmapIndexReader.js index 28062511..54011d26 100644 --- a/src/domain/services/BitmapIndexReader.js +++ b/src/domain/services/BitmapIndexReader.js @@ -5,6 +5,7 @@ import LRUCache from '../utils/LRUCache.js'; import { getRoaringBitmap32 } from '../utils/roaring.js'; import { canonicalStringify } from '../utils/canonicalStringify.js'; import { isValidShardOid } from '../utils/validateShardOid.js'; +import { base64Decode } from '../utils/bytes.js'; /** @typedef {import('../../ports/IndexStoragePort.js').default} IndexStoragePort */ /** @typedef {import('../types/WarpPersistence.js').IndexStorage} IndexStorage */ @@ -202,7 +203,7 @@ export default class BitmapIndexReader { } // Decode base64 bitmap and extract IDs - const buffer = Buffer.from(encoded, 'base64'); + const buffer = base64Decode(encoded); let ids; try { const RoaringBitmap32 = getRoaringBitmap32(); diff --git a/src/domain/services/GitLogParser.js b/src/domain/services/GitLogParser.js index d0bda085..7a3e7bd5 100644 --- a/src/domain/services/GitLogParser.js +++ b/src/domain/services/GitLogParser.js @@ -1,5 +1,6 @@ import GraphNode from '../entities/GraphNode.js'; import { checkAborted } from '../utils/cancellation.js'; +import { concatBytes, textEncode, textDecode } from '../utils/bytes.js'; /** * NUL byte (0x00) - Delimits commit records in git log output. @@ -35,10 +36,8 @@ export const RECORD_SEPARATOR = '\x00'; * testing and alternative implementations. * * **Binary-First Processing**: The parser works directly with binary data for - * performance. Buffer.indexOf(0) is faster than string indexOf('\0') because: - * - No UTF-8 decoding overhead during scanning - * - Native C++ implementation in Node.js Buffer - * - Byte-level comparison vs character-level + * performance. Uint8Array.indexOf(0) scans bytes without UTF-8 decoding + * overhead, and byte-level comparison is faster than character-level. * * UTF-8 decoding only happens once per complete record, not during scanning. * This is especially beneficial for large commit histories where most of the @@ -74,9 +73,8 @@ export default class GitLogParser { * Parses a stream of git log output and yields GraphNode instances. * * **Binary-first processing for performance**: - * - Accepts Buffer, Uint8Array, or string chunks - * - Finds NUL bytes (0x00) directly in binary using Buffer.indexOf(0) - * - Buffer.indexOf(0) is faster than string indexOf('\0') - native C++ vs JS + * - Accepts Uint8Array or string chunks + * - Finds NUL bytes (0x00) directly in binary using Uint8Array.indexOf(0) * - UTF-8 decoding only happens for complete records, not during scanning * * Handles: @@ -86,8 +84,8 @@ export default class GitLogParser { * - Backwards compatibility with string chunks * - Cancellation via AbortSignal * - * @param {AsyncIterable} stream - The git log output stream. - * May yield Buffer, Uint8Array, or string chunks. + * @param {AsyncIterable} stream - The git log output stream. + * May yield Uint8Array or string chunks. * @param {{ signal?: AbortSignal }} [options] - Parse options * @yields {GraphNode} Parsed graph nodes. Invalid records are silently skipped. * @throws {OperationAbortedError} If signal is aborted during parsing @@ -106,23 +104,24 @@ export default class GitLogParser { * } */ async *parse(stream, { signal } = {}) { - let buffer = Buffer.alloc(0); // Binary buffer accumulator + /** @type {Uint8Array} */ + let buffer = new Uint8Array(0); // Binary buffer accumulator for await (const chunk of stream) { checkAborted(signal, 'GitLogParser.parse'); - // Convert string chunks to Buffer, keep Buffer chunks as-is - const chunkBuffer = + // Convert string chunks to Uint8Array, keep Uint8Array chunks as-is + const chunkBytes = typeof chunk === 'string' - ? Buffer.from(chunk, 'utf-8') - : Buffer.isBuffer(chunk) + ? textEncode(chunk) + : chunk instanceof Uint8Array ? chunk - : Buffer.from(chunk); // Uint8Array + : Uint8Array.from(chunk); // Append to accumulator - buffer = Buffer.concat([buffer, chunkBuffer]); + buffer = concatBytes(buffer, chunkBytes); - // Find NUL bytes (0x00) in binary - faster than string indexOf + // Find NUL bytes (0x00) in binary let nullIndex; while ((nullIndex = buffer.indexOf(0)) !== -1) { checkAborted(signal, 'GitLogParser.parse'); @@ -132,7 +131,7 @@ export default class GitLogParser { buffer = buffer.subarray(nullIndex + 1); // Only decode UTF-8 for complete records - const block = recordBytes.toString('utf-8'); + const block = textDecode(recordBytes); const node = this.parseNode(block); if (node) { yield node; @@ -142,7 +141,7 @@ export default class GitLogParser { // Process any remaining data (final record without trailing NUL) if (buffer.length > 0) { - const block = buffer.toString('utf-8'); + const block = textDecode(buffer); if (block) { const node = this.parseNode(block); if (node) { diff --git a/src/domain/services/HookInstaller.js b/src/domain/services/HookInstaller.js index 8692d015..9677ff02 100644 --- a/src/domain/services/HookInstaller.js +++ b/src/domain/services/HookInstaller.js @@ -9,7 +9,7 @@ /** * @typedef {Object} FsAdapter - * @property {(path: string, content: string | Buffer, options?: Object) => void} writeFileSync + * @property {(path: string, content: string | Uint8Array, options?: Object) => void} writeFileSync * @property {(path: string, mode: number) => void} chmodSync * @property {(path: string, encoding?: string) => string} readFileSync * @property {(path: string) => boolean} existsSync diff --git a/src/domain/services/HttpSyncServer.js b/src/domain/services/HttpSyncServer.js index 569be9cf..9b853a05 100644 --- a/src/domain/services/HttpSyncServer.js +++ b/src/domain/services/HttpSyncServer.js @@ -168,7 +168,7 @@ function validateRoute(request, expectedPath, defaultHost) { /** * Checks if the request body exceeds the maximum allowed size. * - * @param {Buffer | Uint8Array | undefined} body + * @param {Uint8Array | undefined} body * @param {number} maxBytes * @returns {{ status: number, headers: Record, body: string }|null} Error response or null if within limits * @private @@ -184,7 +184,7 @@ function checkBodySize(body, maxBytes) { * Parses and validates the request body as a sync request. * Uses Zod-based SyncPayloadSchema for shape + resource limit validation. * - * @param {Buffer | Uint8Array | undefined} body + * @param {Uint8Array | undefined} body * @returns {{ error: { status: number, headers: Record, body: string }, parsed: null } | { error: null, parsed: import('./SyncProtocol.js').SyncRequest }} * @private */ @@ -256,7 +256,7 @@ export default class HttpSyncServer { * In log-only mode both checks record metrics/logs but always return * null so the request proceeds. * - * @param {{ method: string, url: string, headers: Record, body: Buffer | Uint8Array | undefined }} request + * @param {{ method: string, url: string, headers: Record, body: Uint8Array | undefined }} request * @param {Record} parsed - Parsed sync request body * @returns {Promise<{ status: number, headers: Record, body: string }|null>} * @private @@ -300,7 +300,7 @@ export default class HttpSyncServer { * @private */ async _handleRequest(request) { - /** @type {{ method: string, url: string, headers: Record, body: Buffer | Uint8Array | undefined }} */ + /** @type {{ method: string, url: string, headers: Record, body: Uint8Array | undefined }} */ const req = { ...request, headers: /** @type {Record} */ (request.headers) }; const contentTypeError = checkContentType(req.headers); if (contentTypeError) { diff --git a/src/domain/services/MessageCodecInternal.js b/src/domain/services/MessageCodecInternal.js index ff81f88a..114dc156 100644 --- a/src/domain/services/MessageCodecInternal.js +++ b/src/domain/services/MessageCodecInternal.js @@ -47,6 +47,7 @@ export const TRAILER_KEYS = { checkpointVersion: 'eg-checkpoint', dataCommit: 'eg-data-commit', opsDigest: 'eg-ops-digest', + encrypted: 'eg-encrypted', }; /** diff --git a/src/domain/services/PatchBuilderV2.js b/src/domain/services/PatchBuilderV2.js index 9ba09cb8..ea6d3b43 100644 --- a/src/domain/services/PatchBuilderV2.js +++ b/src/domain/services/PatchBuilderV2.js @@ -99,9 +99,9 @@ export class PatchBuilderV2 { /** * Creates a new PatchBuilderV2. * - * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, lamport: number, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, expectedParentSha?: string|null, onCommitSuccess?: ((result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise)|null, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} options + * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, lamport: number, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, expectedParentSha?: string|null, onCommitSuccess?: ((result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise)|null, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, blobStorage?: import('../../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options */ - constructor({ persistence, graphName, writerId, lamport, versionVector, getCurrentState, expectedParentSha = null, onCommitSuccess = null, onDeleteWithData = 'warn', codec, logger }) { + constructor({ persistence, graphName, writerId, lamport, versionVector, getCurrentState, expectedParentSha = null, onCommitSuccess = null, onDeleteWithData = 'warn', codec, logger, blobStorage, patchBlobStorage }) { /** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} */ this._persistence = /** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} */ (persistence); @@ -157,6 +157,12 @@ export class PatchBuilderV2 { */ this._contentBlobs = []; + /** @type {import('../../ports/BlobStoragePort.js').default|null} */ + this._blobStorage = blobStorage || null; + + /** @type {import('../../ports/BlobStoragePort.js').default|null} */ + this._patchBlobStorage = patchBlobStorage || null; + /** * Observed operands — entities whose current state was consulted to build * this patch. @@ -536,7 +542,9 @@ export class PatchBuilderV2 { // Validate identifiers before writing blob to avoid orphaned blobs _assertNoReservedBytes(nodeId, 'nodeId'); _assertNoReservedBytes(CONTENT_PROPERTY_KEY, 'key'); - const oid = await this._persistence.writeBlob(content); + const oid = this._blobStorage + ? await this._blobStorage.store(content, { slug: `${this._graphName}/${nodeId}` }) + : await this._persistence.writeBlob(content); this.setProperty(nodeId, CONTENT_PROPERTY_KEY, oid); this._contentBlobs.push(oid); return this; @@ -559,7 +567,9 @@ export class PatchBuilderV2 { _assertNoReservedBytes(to, 'to'); _assertNoReservedBytes(label, 'label'); _assertNoReservedBytes(CONTENT_PROPERTY_KEY, 'key'); - const oid = await this._persistence.writeBlob(content); + const oid = this._blobStorage + ? await this._blobStorage.store(content, { slug: `${this._graphName}/${from}/${to}/${label}` }) + : await this._persistence.writeBlob(content); this.setEdgeProperty(from, to, label, CONTENT_PROPERTY_KEY, oid); this._contentBlobs.push(oid); return this; @@ -718,9 +728,11 @@ export class PatchBuilderV2 { writes: [...this._writes].sort(), }); - // 6. Encode patch as CBOR and write as a Git blob + // 6. Encode patch as CBOR and write as a Git blob (or encrypted CAS asset) const patchCbor = this._codec.encode(patch); - const patchBlobOid = await this._persistence.writeBlob(patchCbor); + const patchBlobOid = this._patchBlobStorage + ? await this._patchBlobStorage.store(patchCbor, { slug: `${this._graphName}/${this._writerId}/patch` }) + : await this._persistence.writeBlob(patchCbor); // 7. Create tree with the patch blob + any content blobs (deduplicated) // Format for mktree: "mode type oid\tpath" @@ -738,6 +750,10 @@ export class PatchBuilderV2 { lamport, patchOid: patchBlobOid, schema, + // "encrypted" is a legacy wire name meaning "patch blob stored externally + // via patchBlobStorage" (see ADR-0002). The flag tells readers to retrieve + // the blob via BlobStoragePort instead of reading it directly from Git. + encrypted: !!this._patchBlobStorage, }); const parents = parentCommit ? [parentCommit] : []; const newCommitSha = await this._persistence.commitNodeWithTree({ diff --git a/src/domain/services/PatchMessageCodec.js b/src/domain/services/PatchMessageCodec.js index 4afa8c43..6718deaa 100644 --- a/src/domain/services/PatchMessageCodec.js +++ b/src/domain/services/PatchMessageCodec.js @@ -30,7 +30,7 @@ import { /** * Encodes a patch commit message. * - * @param {{ graph: string, writer: string, lamport: number, patchOid: string, schema?: number }} options - The patch message options + * @param {{ graph: string, writer: string, lamport: number, patchOid: string, schema?: number, encrypted?: boolean }} options - The patch message options * @returns {string} The encoded commit message * @throws {Error} If any validation fails * @@ -42,7 +42,7 @@ import { * patchOid: 'abc123...' // 40-char hex * }); */ -export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = 2 }) { +export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = 2, encrypted = false }) { // Validate inputs validateGraphName(graph); validateWriterId(writer); @@ -51,16 +51,21 @@ export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = validateSchema(schema); const codec = getCodec(); + /** @type {Record} */ + const trailers = { + [TRAILER_KEYS.kind]: 'patch', + [TRAILER_KEYS.graph]: graph, + [TRAILER_KEYS.writer]: writer, + [TRAILER_KEYS.lamport]: String(lamport), + [TRAILER_KEYS.patchOid]: patchOid, + [TRAILER_KEYS.schema]: String(schema), + }; + if (encrypted) { + trailers[TRAILER_KEYS.encrypted] = 'true'; + } return codec.encode({ title: MESSAGE_TITLES.patch, - trailers: { - [TRAILER_KEYS.kind]: 'patch', - [TRAILER_KEYS.graph]: graph, - [TRAILER_KEYS.writer]: writer, - [TRAILER_KEYS.lamport]: String(lamport), - [TRAILER_KEYS.patchOid]: patchOid, - [TRAILER_KEYS.schema]: String(schema), - }, + trailers, }); } @@ -72,7 +77,7 @@ export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = * Decodes a patch commit message. * * @param {string} message - The raw commit message - * @returns {{ kind: 'patch', graph: string, writer: string, lamport: number, patchOid: string, schema: number }} The decoded patch message + * @returns {{ kind: 'patch', graph: string, writer: string, lamport: number, patchOid: string, schema: number, encrypted: boolean }} The decoded patch message * @throws {Error} If the message is not a valid patch message * * @example @@ -93,6 +98,8 @@ export function decodePatchMessage(message) { validateOid(patchOid, 'patchOid'); const schema = parsePositiveIntTrailer(trailers, 'schema', 'patch'); + const encrypted = trailers[TRAILER_KEYS.encrypted] === 'true'; + return { kind: 'patch', graph, @@ -100,5 +107,6 @@ export function decodePatchMessage(message) { lamport, patchOid, schema, + encrypted, }; } diff --git a/src/domain/services/PropertyIndexReader.js b/src/domain/services/PropertyIndexReader.js index cc89c976..e95bfc46 100644 --- a/src/domain/services/PropertyIndexReader.js +++ b/src/domain/services/PropertyIndexReader.js @@ -85,7 +85,7 @@ export default class PropertyIndexReader { return null; } - const buffer = await /** @type {{ readBlob(oid: string): Promise }} */ (this._storage).readBlob(oid); + const buffer = await /** @type {{ readBlob(oid: string): Promise }} */ (this._storage).readBlob(oid); if (buffer === null || buffer === undefined) { throw new Error(`PropertyIndexReader: missing blob for OID '${oid}' (${path})`); } diff --git a/src/domain/services/StreamingBitmapIndexBuilder.js b/src/domain/services/StreamingBitmapIndexBuilder.js index 07322a55..6deca61d 100644 --- a/src/domain/services/StreamingBitmapIndexBuilder.js +++ b/src/domain/services/StreamingBitmapIndexBuilder.js @@ -8,6 +8,7 @@ import { checkAborted } from '../utils/cancellation.js'; import { getRoaringBitmap32 } from '../utils/roaring.js'; import { canonicalStringify } from '../utils/canonicalStringify.js'; import { SHARD_VERSION } from '../utils/shardVersion.js'; +import { textEncode, base64Encode, base64Decode } from '../utils/bytes.js'; /** @typedef {import('../types/WarpPersistence.js').IndexStorage} IndexStorage */ @@ -185,7 +186,7 @@ export default class StreamingBitmapIndexBuilder { if (!bitmapShards[type][prefix]) { bitmapShards[type][prefix] = {}; } - bitmapShards[type][prefix][sha] = Buffer.from(bitmap.serialize(true)).toString('base64'); + bitmapShards[type][prefix][sha] = base64Encode(new Uint8Array(bitmap.serialize(true))); } return bitmapShards; } @@ -216,7 +217,7 @@ export default class StreamingBitmapIndexBuilder { checksum, data: shardData, }; - const buffer = Buffer.from(JSON.stringify(envelope)); + const buffer = textEncode(JSON.stringify(envelope)); const oid = await this.storage.writeBlob(buffer); if (!this.flushedChunks.has(path)) { this.flushedChunks.set(path, []); @@ -326,7 +327,7 @@ export default class StreamingBitmapIndexBuilder { checksum: await computeChecksum(map, this._crypto), data: map, }; - const buffer = Buffer.from(JSON.stringify(envelope)); + const buffer = textEncode(JSON.stringify(envelope)); const oid = await this.storage.writeBlob(buffer); return `100644 blob ${oid}\t${path}`; }) @@ -418,9 +419,9 @@ export default class StreamingBitmapIndexBuilder { sorted[key] = frontier.get(key); } const envelope = { version: 1, writerCount: frontier.size, frontier: sorted }; - const cborOid = await this.storage.writeBlob(Buffer.from(this._codec.encode(envelope))); + const cborOid = await this.storage.writeBlob(this._codec.encode(envelope)); flatEntries.push(`100644 blob ${cborOid}\tfrontier.cbor`); - const jsonOid = await this.storage.writeBlob(Buffer.from(canonicalStringify(envelope))); + const jsonOid = await this.storage.writeBlob(textEncode(canonicalStringify(envelope))); flatEntries.push(`100644 blob ${jsonOid}\tfrontier.json`); } @@ -589,7 +590,7 @@ export default class StreamingBitmapIndexBuilder { _mergeDeserializedBitmap({ merged, sha, base64Bitmap, oid }) { let bitmap; try { - bitmap = this._RoaringBitmap32.deserialize(Buffer.from(base64Bitmap, 'base64'), true); + bitmap = this._RoaringBitmap32.deserialize(base64Decode(base64Bitmap), true); } catch (err) { throw new ShardCorruptionError('Failed to deserialize bitmap', { oid, @@ -652,7 +653,7 @@ export default class StreamingBitmapIndexBuilder { /** @type {Record} */ const result = {}; for (const [sha, bitmap] of Object.entries(merged)) { - result[sha] = Buffer.from(bitmap.serialize(true)).toString('base64'); + result[sha] = base64Encode(new Uint8Array(bitmap.serialize(true))); } // Wrap merged result in envelope with version and checksum @@ -664,7 +665,7 @@ export default class StreamingBitmapIndexBuilder { let serialized; try { - serialized = Buffer.from(JSON.stringify(mergedEnvelope)); + serialized = textEncode(JSON.stringify(mergedEnvelope)); } catch (err) { throw new ShardCorruptionError('Failed to serialize merged shard', { reason: 'serialization_error', diff --git a/src/domain/services/SyncAuthService.js b/src/domain/services/SyncAuthService.js index 865469b6..ebbf484c 100644 --- a/src/domain/services/SyncAuthService.js +++ b/src/domain/services/SyncAuthService.js @@ -13,6 +13,7 @@ import LRUCache from '../utils/LRUCache.js'; import defaultCrypto from '../utils/defaultCrypto.js'; import nullLogger from '../utils/nullLogger.js'; import { validateWriterId } from '../utils/RefLayout.js'; +import { hexEncode, hexDecode } from '../utils/bytes.js'; const SIG_VERSION = '1'; const SIG_PREFIX = 'warp-v1'; @@ -48,7 +49,7 @@ export function buildCanonicalPayload({ keyId, method, path, timestamp, nonce, c /** * Signs an outgoing sync request. * - * @param {{ method: string, path: string, contentType: string, body: Buffer|Uint8Array, secret: string, keyId: string }} params + * @param {{ method: string, path: string, contentType: string, body: Uint8Array, secret: string, keyId: string }} params * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps] * @returns {Promise>} Auth headers */ @@ -71,7 +72,7 @@ export async function signSyncRequest({ method, path, contentType, body, secret, }); const hmacBuf = await c.hmac(HMAC_ALGO, secret, canonical); - const signature = Buffer.from(hmacBuf).toString('hex'); + const signature = hexEncode(hmacBuf); return { 'x-warp-sig-version': SIG_VERSION, @@ -267,7 +268,7 @@ export default class SyncAuthService { /** * Verifies the HMAC signature against the canonical payload. * - * @param {{ request: { method: string, url: string, headers: Record, body?: Buffer|Uint8Array }, secret: string, keyId: string, timestamp: string, nonce: string }} params + * @param {{ request: { method: string, url: string, headers: Record, body?: Uint8Array }, secret: string, keyId: string, timestamp: string, nonce: string }} params * @returns {Promise<{ ok: false, reason: string, status: number } | { ok: true }>} * @private */ @@ -290,9 +291,10 @@ export default class SyncAuthService { const expectedBuf = await this._crypto.hmac(HMAC_ALGO, secret, canonical); const receivedHex = request.headers['x-warp-signature']; + /** @type {Uint8Array} */ let receivedBuf; try { - receivedBuf = Buffer.from(receivedHex, 'hex'); + receivedBuf = hexDecode(receivedHex); } catch { return fail('INVALID_SIGNATURE', 401); } @@ -304,7 +306,7 @@ export default class SyncAuthService { let equal; try { equal = this._crypto.timingSafeEqual( - Buffer.from(expectedBuf), + expectedBuf, receivedBuf, ); } catch { @@ -321,7 +323,7 @@ export default class SyncAuthService { /** * Verifies an incoming sync request. * - * @param {{ method: string, url: string, headers: Record, body?: Buffer|Uint8Array }} request + * @param {{ method: string, url: string, headers: Record, body?: Uint8Array }} request * @returns {Promise<{ ok: true } | { ok: false, reason: string, status: number }>} */ async verify(request) { diff --git a/src/domain/services/SyncController.js b/src/domain/services/SyncController.js index 789f4034..1072816e 100644 --- a/src/domain/services/SyncController.js +++ b/src/domain/services/SyncController.js @@ -48,6 +48,7 @@ import SyncTrustGate from './SyncTrustGate.js'; * @property {import('../../ports/CodecPort.js').default} _codec * @property {import('../../ports/CryptoPort.js').default} _crypto * @property {import('../../ports/LoggerPort.js').default|null} _logger + * @property {import('../../ports/BlobStoragePort.js').default|null} [_patchBlobStorage] * @property {number} _patchesSinceCheckpoint * @property {(op: string, t0: number, opts?: {metrics?: string, error?: Error}) => void} _logTiming * @property {(options?: Record) => Promise} materialize @@ -270,7 +271,7 @@ export default class SyncController { localFrontier, persistence, this._host._graphName, - { codec: this._host._codec, logger: this._host._logger || undefined } + { codec: this._host._codec, logger: this._host._logger || undefined, patchBlobStorage: this._host._patchBlobStorage || undefined } ); } diff --git a/src/domain/services/SyncProtocol.js b/src/domain/services/SyncProtocol.js index 945563b6..e89af441 100644 --- a/src/domain/services/SyncProtocol.js +++ b/src/domain/services/SyncProtocol.js @@ -41,6 +41,8 @@ import nullLogger from '../utils/nullLogger.js'; import { decodePatchMessage, assertOpsCompatible, SCHEMA_V3 } from './WarpMessageCodec.js'; import { join, cloneStateV5, isKnownRawOp } from './JoinReducer.js'; import SchemaUnsupportedError from '../errors/SchemaUnsupportedError.js'; +import EncryptionError from '../errors/EncryptionError.js'; +import PersistenceError from '../errors/PersistenceError.js'; import { cloneFrontier, updateFrontier } from './Frontier.js'; import { vvDeserialize } from '../crdt/VersionVector.js'; @@ -125,7 +127,7 @@ function objectToFrontier(obj) { * @param {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default} persistence - Git persistence layer * (uses CommitPort.showNode() + BlobPort.readBlob() methods) * @param {string} sha - The 40-character commit SHA to load the patch from - * @param {{ codec?: import('../../ports/CodecPort.js').default }} [options] + * @param {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options] * @returns {Promise} The decoded and normalized patch object containing: * - `ops`: Array of patch operations * - `context`: VersionVector (Map) of causal dependencies @@ -135,16 +137,35 @@ function objectToFrontier(obj) { * @throws {Error} If the commit message cannot be decoded (malformed, wrong schema) * @throws {Error} If the patch blob cannot be read (blob not found, I/O error) * @throws {Error} If the patch blob cannot be CBOR-decoded (corrupted data) + * @throws {EncryptionError} If the patch is encrypted but no patchBlobStorage is provided * @private */ -async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @type {{ codec?: import('../../ports/CodecPort.js').default }} */ ({})) { +async function loadPatchFromCommit(persistence, sha, { codec: codecOpt, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) { const codec = codecOpt || defaultCodec; // Read commit message to extract patch OID const message = await persistence.showNode(sha); const decoded = decodePatchMessage(message); - // Read and decode the patch blob - const patchBuffer = await persistence.readBlob(decoded.patchOid); + // Read the patch blob (encrypted or plain) + /** @type {Uint8Array} */ + let patchBuffer; + if (decoded.encrypted) { + if (!patchBlobStorage) { + throw new EncryptionError( + 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key', + ); + } + patchBuffer = await patchBlobStorage.retrieve(decoded.patchOid); + } else { + patchBuffer = await persistence.readBlob(decoded.patchOid); + } + if (!patchBuffer) { + throw new PersistenceError( + `Patch blob not found: ${decoded.patchOid}`, + PersistenceError.E_MISSING_OBJECT, + { context: { oid: decoded.patchOid } }, + ); + } const patch = /** @type {DecodedPatch} */ (codec.decode(patchBuffer)); // Normalize the patch (convert context from object to Map) @@ -172,7 +193,7 @@ async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @ * @param {string|null} fromSha - Start SHA (exclusive). Pass null to load ALL patches * for this writer from the beginning of their chain. * @param {string} toSha - End SHA (inclusive). This is typically the writer's current tip. - * @param {{ codec?: import('../../ports/CodecPort.js').default }} [options] + * @param {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options] * @returns {Promise>} Array of patch objects in * chronological order (oldest first). Each entry contains: * - `patch`: The decoded patch object @@ -191,7 +212,7 @@ async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @ * // Load ALL patches for a new writer * const patches = await loadPatchRange(persistence, 'events', 'new-writer', null, tipSha); */ -export async function loadPatchRange(persistence, graphName, writerId, fromSha, toSha, { codec } = /** @type {{ codec?: import('../../ports/CodecPort.js').default }} */ ({})) { +export async function loadPatchRange(persistence, graphName, writerId, fromSha, toSha, { codec, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) { const patches = []; let cur = toSha; @@ -200,7 +221,7 @@ export async function loadPatchRange(persistence, graphName, writerId, fromSha, const commitInfo = await persistence.getNodeInfo(cur); // Load patch from commit - const patch = await loadPatchFromCommit(persistence, cur, { codec }); + const patch = await loadPatchFromCommit(persistence, cur, { codec, patchBlobStorage }); patches.unshift({ patch, sha: cur }); // Prepend for chronological order // Move to parent (first parent in linear chain) @@ -394,7 +415,7 @@ export function createSyncRequest(frontier) { * @param {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default} persistence - Git persistence * layer for loading patches (uses CommitPort + BlobPort methods) * @param {string} graphName - Graph name for error messages and logging - * @param {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} [options] + * @param {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options] * @returns {Promise} Response containing local frontier and patches. * Patches are ordered chronologically within each writer. * @throws {Error} If patch loading fails for reasons other than divergence @@ -408,7 +429,7 @@ export function createSyncRequest(frontier) { * res.json(response); * }); */ -export async function processSyncRequest(request, localFrontier, persistence, graphName, { codec, logger } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} */ ({})) { +export async function processSyncRequest(request, localFrontier, persistence, graphName, { codec, logger, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) { const log = logger || nullLogger; const remoteFrontier = objectToFrontier(request.frontier); @@ -452,7 +473,7 @@ export async function processSyncRequest(request, localFrontier, persistence, gr writerId, range.from, range.to, - { codec } + { codec, patchBlobStorage } ); for (const { patch, sha } of writerPatches) { diff --git a/src/domain/services/WarpServeService.js b/src/domain/services/WarpServeService.js new file mode 100644 index 00000000..217eb2c9 --- /dev/null +++ b/src/domain/services/WarpServeService.js @@ -0,0 +1,647 @@ +/** + * WarpServeService — domain service that bridges WarpGraph instances + * to browser clients over a WebSocketServerPort. + * + * Responsibilities: + * - Accepts WebSocket connections and manages client lifecycle + * - Sends materialized state on `open` requests + * - Forwards graph diffs to subscribed clients in real-time + * - Applies mutations (addNode, removeNode, addEdge, etc.) from clients + * - Supports time-travel (seek) and node inspection + * + * @module domain/services/WarpServeService + */ + +import { orsetElements } from '../crdt/ORSet.js'; +import { lwwValue } from '../crdt/LWW.js'; +import { decodePropKey, isEdgePropKey, decodeEdgeKey } from './KeyCodec.js'; + +const PROTOCOL_VERSION = 1; + +/** + * Allowlist of PatchBuilderV2 methods that WebSocket clients may invoke. + * Prevents arbitrary method calls via untrusted `op` strings. + * @type {Set} + */ +const ALLOWED_MUTATE_OPS = new Set([ + 'addNode', + 'removeNode', + 'addEdge', + 'removeEdge', + 'setProperty', + 'setEdgeProperty', + 'attachContent', + 'attachEdgeContent', +]); + +/** + * Expected argument signatures for each mutation op. + * Each entry specifies the count and types of required arguments. + * @type {Record} + */ +const MUTATE_OP_SIGNATURES = { + addNode: ['string'], + removeNode: ['string'], + addEdge: ['string', 'string', 'string'], + removeEdge: ['string', 'string', 'string'], + setProperty: ['string', 'string', '*'], + setEdgeProperty: ['string', 'string', 'string', 'string', '*'], + // Binary content (Uint8Array) cannot survive JSON serialisation — these + // require string input over the WebSocket JSON protocol. A future binary + // protocol could lift this limitation. + attachContent: ['string', 'string'], + attachEdgeContent: ['string', 'string', 'string', 'string'], +}; + +/** Maximum serialized size for wildcard property values (64 KiB). */ +const MAX_WILDCARD_SIZE = 65_536; + +/** + * Validates a single wildcard-typed argument against size limits. + * + * @param {string} op + * @param {number} i + * @param {unknown} arg + * @returns {string|null} + */ +function validateWildcardArg(op, i, arg) { + if (typeof arg === 'string' && arg.length > MAX_WILDCARD_SIZE) { + return `${op}: arg[${i}] exceeds 64 KiB string limit`; + } + if (typeof arg === 'object' && arg !== null && JSON.stringify(arg).length > MAX_WILDCARD_SIZE) { + return `${op}: arg[${i}] exceeds 64 KiB serialized limit`; + } + return null; +} + +/** + * Validates that args match the expected signature for an op. + * + * @param {string} op + * @param {unknown[]} args + * @returns {string|null} Error message if invalid, null if valid + */ +function validateMutateArgs(op, args) { + const sig = MUTATE_OP_SIGNATURES[op]; + if (!sig) { + return `Unknown op: ${op}`; + } + if (!Array.isArray(args)) { + return `${op}: args must be an array`; + } + if (args.length !== sig.length) { + return `${op}: expected ${sig.length} args, got ${args.length}`; + } + for (let i = 0; i < sig.length; i++) { + if (sig[i] === '*') { + const err = validateWildcardArg(op, i, args[i]); + if (err) { return err; } + } else if (typeof args[i] !== sig[i]) { + return `${op}: arg[${i}] must be ${sig[i]}, got ${typeof args[i]}`; + } + } + return null; +} + +/** + * @typedef {import('../../ports/WebSocketServerPort.js').WsConnection} WsConnection + * @typedef {import('../../ports/WebSocketServerPort.js').WsServerHandle} WsServerHandle + */ + +/** + * @typedef {Object} ClientSession + * @property {WsConnection} conn + * @property {Set} openGraphs - Graph names this client has opened + */ + +/** + * Minimal duck-typed shape of a WarpGraph instance as consumed by + * WarpServeService. Uses the `import()` type directly so tsc can + * structurally match WarpGraph without re-declaring its overloaded + * signatures. + * + * @typedef {import('../WarpGraph.js').default} GraphHandle + */ + +/** + * Envelope shape for all protocol messages. + * @typedef {Object} Envelope + * @property {number} v - Protocol version + * @property {string} type - Message type + * @property {string} [id] - Request correlation ID + * @property {unknown} payload - Message-specific data + */ + +/** + * Serializes materialized state into a plain object suitable for JSON. + * + * @param {string} graphName + * @param {import('./JoinReducer.js').WarpStateV5} state + * @returns {{ graph: string, nodes: Array<{ id: string, props: Record }>, edges: Array<{ from: string, to: string, label: string }>, frontier: Record }} + */ +function serializeState(graphName, state) { + // Build node-to-props index to avoid O(nodes × props) scan + /** @type {Map>} */ + const nodePropsMap = new Map(); + for (const [key, reg] of state.prop) { + // Edge properties are intentionally omitted in the MVP wire format. + // Edges are serialized as {from, to, label} only. A future protocol + // version should include edge props alongside node props. + // TODO: serialize edge properties when protocol supports them + if (isEdgePropKey(key)) { continue; } + const decoded = decodePropKey(key); + let props = nodePropsMap.get(decoded.nodeId); + if (!props) { + props = {}; + nodePropsMap.set(decoded.nodeId, props); + } + props[decoded.propKey] = lwwValue(reg); + } + + const nodes = []; + for (const nodeId of orsetElements(state.nodeAlive)) { + nodes.push({ id: nodeId, props: nodePropsMap.get(nodeId) || {} }); + } + + const edges = []; + for (const edgeKey of orsetElements(state.edgeAlive)) { + const decoded = decodeEdgeKey(edgeKey); + edges.push({ from: decoded.from, to: decoded.to, label: decoded.label }); + } + + /** @type {Record} */ + const frontier = {}; + if (state.observedFrontier) { + for (const [writer, ts] of state.observedFrontier) { + frontier[writer] = ts; + } + } + + return { graph: graphName, nodes, edges, frontier }; +} + +/** + * Builds a protocol envelope. + * + * @param {string} type + * @param {unknown} payload + * @param {string} [id] + * @returns {string} + */ +function envelope(type, payload, id) { + /** @type {Envelope} */ + const msg = { v: PROTOCOL_VERSION, type, payload }; + if (id !== undefined) { msg.id = id; } + return JSON.stringify(msg); +} + +/** + * Builds an error envelope. + * + * @param {string} code + * @param {string} message + * @param {string} [id] + * @returns {string} + */ +function errorEnvelope(code, message, id) { + return envelope('error', { code, message }, id); +} + +/** + * Validates payload graph name and resolves the graph object. + * Sends an error envelope and returns null on failure. + * + * @param {ClientSession} session + * @param {Envelope} msg + * @param {{ graphs: Map, requireOpen?: boolean }} opts + * @returns {{ graphName: string, graph: GraphHandle }|null} + */ +function resolveGraph(session, msg, { graphs, requireOpen = true }) { + const { payload } = msg; + const graphName = /** @type {Record} */ (payload)?.graph; + + if (typeof graphName !== 'string' || graphName.length === 0) { + session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', `${msg.type}: graph must be a non-empty string`, msg.id)); + return null; + } + if (requireOpen && !session.openGraphs.has(graphName)) { + session.conn.send(errorEnvelope('E_NOT_OPENED', `Graph not opened: ${graphName}`, msg.id)); + return null; + } + const graph = graphs.get(graphName); + if (!graph) { + session.conn.send(errorEnvelope('E_UNKNOWN_GRAPH', `Unknown graph: ${graphName}`, msg.id)); + return null; + } + return { graphName, graph }; +} + +export default class WarpServeService { + /** + * @param {{ wsPort: import('../../ports/WebSocketServerPort.js').default, graphs: GraphHandle[] }} options + */ + constructor({ wsPort, graphs }) { + if (!wsPort || typeof wsPort.createServer !== 'function') { + throw new Error('wsPort must be a WebSocketServerPort'); + } + if (!Array.isArray(graphs) || graphs.length === 0) { + throw new Error('At least one graph is required'); + } + + /** @type {import('../../ports/WebSocketServerPort.js').default} */ + this._wsPort = wsPort; + + /** @type {Map} */ + this._graphs = new Map(); + for (const g of graphs) { + this._graphs.set(g.graphName, g); + } + + /** @type {Set} */ + this._clients = new Set(); + + /** @type {Map void }>} */ + this._subscriptions = new Map(); + + /** @type {WsServerHandle|null} */ + this._server = null; + } + + /** + * Start listening for WebSocket connections. + * + * @param {number} port + * @param {string} [host] + * @returns {Promise<{ port: number, host: string }>} + */ + async listen(port, host) { + if (this._server) { + throw new Error('Server is already listening'); + } + + const server = this._wsPort.createServer((conn) => this._onConnection(conn)); + + // Subscribe to each graph for live diff push. + // Subscriptions are created before bind so diffs aren't missed between + // bind and subscribe — but we must clean up if bind fails. + /** @type {Map void }>} */ + const subs = new Map(); + for (const [graphName, graph] of this._graphs) { + const sub = graph.subscribe({ + onChange: (/** @type {unknown} */ diff) => this._broadcastDiff(graphName, diff), + }); + subs.set(graphName, sub); + } + + try { + const result = await server.listen(port, host); + // Bind succeeded — commit state mutations + this._server = server; + this._subscriptions = subs; + return result; + } catch (err) { + // Bind failed — clean up subscriptions to prevent leaked broadcast handlers + for (const [, sub] of subs) { + sub.unsubscribe(); + } + throw err; + } + } + + /** + * Shut down the server and clean up subscriptions. + * + * @returns {Promise} + */ + async close() { + for (const [, sub] of this._subscriptions) { + sub.unsubscribe(); + } + this._subscriptions.clear(); + + for (const client of this._clients) { + try { + client.conn.close(); + } catch { + // Best-effort — connection may already be dead. + } + } + this._clients.clear(); + + if (this._server) { + await this._server.close(); + this._server = null; + } + } + + /** + * Handle a new WebSocket connection. + * + * @param {WsConnection} conn + * @private + */ + _onConnection(conn) { + /** @type {ClientSession} */ + const session = { + conn, + openGraphs: new Set(), + }; + this._clients.add(session); + + // Send hello + conn.send(envelope('hello', { + protocol: PROTOCOL_VERSION, + graphs: [...this._graphs.keys()], + })); + + conn.onMessage((raw) => { + // Extract correlation ID before the async call so the catch handler + // can correlate the error without re-parsing the raw message. + /** @type {string|undefined} */ + let id; + try { id = JSON.parse(raw).id; } catch { /* unparseable — no id */ } + + this._onMessage(session, raw).catch(() => { + // Errors are caught and sent as error envelopes inside _onMessage handlers. + // This catch prevents unhandled rejection for truly unexpected failures. + // Send a generic message to avoid leaking internal details (file paths, + // stack traces, etc.) to untrusted WebSocket clients. + session.conn.send(errorEnvelope( + 'E_INTERNAL', + 'Internal error', + id, + )); + }); + }); + conn.onClose(() => this._clients.delete(session)); + } + + /** + * Handle an incoming message from a client. + * + * @param {ClientSession} session + * @param {string} raw + * @private + */ + async _onMessage(session, raw) { + // Approximate check: String.length counts UTF-16 code units, not bytes. + // For ASCII-heavy JSON this is close enough; multi-byte characters could + // make the actual byte count higher than the code-unit count. + if (raw.length > 1_048_576) { + session.conn.send(errorEnvelope('E_MESSAGE_TOO_LARGE', 'Message exceeds 1 MiB limit')); + return; + } + + /** @type {Envelope} */ + let msg; + try { + msg = JSON.parse(raw); + } catch { + session.conn.send(errorEnvelope('E_INVALID_MESSAGE', 'Invalid JSON')); + return; + } + + if (!msg || typeof msg.type !== 'string') { + session.conn.send(errorEnvelope('E_INVALID_MESSAGE', 'Missing type field')); + return; + } + + if (msg.v !== PROTOCOL_VERSION) { + session.conn.send(errorEnvelope( + 'E_UNSUPPORTED_VERSION', + `Unsupported protocol version: ${msg.v}. Expected: ${PROTOCOL_VERSION}`, + msg.id, + )); + return; + } + + switch (msg.type) { + case 'open': + await this._handleOpen(session, msg); + break; + case 'mutate': + await this._handleMutate(session, msg); + break; + case 'inspect': + await this._handleInspect(session, msg); + break; + case 'seek': + await this._handleSeek(session, msg); + break; + default: + session.conn.send(errorEnvelope( + 'E_UNKNOWN_TYPE', + `Unknown message type: ${msg.type}`, + msg.id, + )); + } + } + + /** + * Handle 'open' — client subscribes to a graph. + * + * `materialize()` is called without `receipts: true`, so the return is + * always a plain `WarpStateV5` (not a `MaterializeResult` with receipts). + * + * @param {ClientSession} session + * @param {Envelope} msg + * @private + */ + async _handleOpen(session, msg) { + const resolved = resolveGraph(session, msg, { graphs: this._graphs, requireOpen: false }); + if (!resolved) { return; } + const { graphName, graph } = resolved; + + let state; + try { + state = await graph.materialize(); + } catch (err) { + session.conn.send(errorEnvelope( + 'E_MATERIALIZE_FAILED', + err instanceof Error ? err.message : 'Materialization failed', + msg.id, + )); + return; + } + + session.openGraphs.add(graphName); + const serialized = serializeState(graphName, state); + session.conn.send(envelope('state', serialized, msg.id)); + } + + /** + * Handle 'mutate' — client sends graph mutations. + * + * @param {ClientSession} session + * @param {Envelope} msg + * @private + */ + async _handleMutate(session, msg) { + const { payload } = msg; + const ops = /** @type {Array<{ op: string, args: unknown[] }>|undefined} */ ( + /** @type {Record} */ (payload)?.ops + ); + + if (!Array.isArray(ops)) { + session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', 'mutate: ops must be an array', msg.id)); + return; + } + + const resolved = resolveGraph(session, msg, { graphs: this._graphs }); + if (!resolved) { return; } + + await this._applyMutateOps(session, msg, { graph: resolved.graph, ops }); + } + + /** + * Validate and apply mutation ops for _handleMutate. + * + * @param {ClientSession} session + * @param {Envelope} msg + * @param {{ graph: GraphHandle, ops: Array<{ op: string, args: unknown[] }> }} ctx + * @private + */ + async _applyMutateOps(session, msg, { graph, ops }) { + // Pre-validate ALL ops before creating a patch + for (const { op, args } of ops) { + if (!ALLOWED_MUTATE_OPS.has(op)) { + session.conn.send(errorEnvelope('E_INVALID_OP', `Unknown mutation op: ${op}`, msg.id)); + return; + } + const argError = validateMutateArgs(op, args); + if (argError) { + session.conn.send(errorEnvelope('E_INVALID_ARGS', argError, msg.id)); + return; + } + } + + try { + const patch = await graph.createPatch(); + for (const { op, args } of ops) { + await /** @type {Record Promise>} */ (/** @type {unknown} */ (patch))[op](...args); + } + const sha = await patch.commit(); + session.conn.send(envelope('ack', { sha }, msg.id)); + } catch (err) { + session.conn.send(errorEnvelope( + 'E_MUTATE_FAILED', + err instanceof Error ? err.message : 'Mutation failed', + msg.id, + )); + } + } + + /** + * Handle 'inspect' — client requests node properties. + * + * @param {ClientSession} session + * @param {Envelope} msg + * @private + */ + async _handleInspect(session, msg) { + const { payload } = msg; + const nodeId = /** @type {string} */ (/** @type {Record} */ (payload)?.nodeId); + + if (typeof nodeId !== 'string' || nodeId.length === 0) { + session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', 'inspect: nodeId must be a non-empty string', msg.id)); + return; + } + + const resolved = resolveGraph(session, msg, { graphs: this._graphs }); + if (!resolved) { return; } + const { graphName, graph } = resolved; + + try { + const props = await graph.getNodeProps(nodeId); + session.conn.send(envelope('inspect', { graph: graphName, nodeId, props }, msg.id)); + } catch (err) { + session.conn.send(errorEnvelope( + 'E_INSPECT_FAILED', + err instanceof Error ? err.message : 'Inspect failed', + msg.id, + )); + } + } + + /** + * Validates a seek ceiling value. Returns an error message or null. + * Infinity is intentionally accepted (treated as "materialize at head"). + * + * @param {unknown} ceiling + * @returns {string|null} + * @private + */ + _validateCeiling(ceiling) { + if (typeof ceiling !== 'number' || ceiling < 0 || Number.isNaN(ceiling)) { + return 'seek: ceiling must be a non-negative number'; + } + if (Number.isFinite(ceiling) && !Number.isInteger(ceiling)) { + return 'seek: ceiling must be an integer'; + } + return null; + } + + /** + * Handle 'seek' — client requests time-travel materialization. + * + * @param {ClientSession} session + * @param {Envelope} msg + * @private + */ + async _handleSeek(session, msg) { + const { payload } = msg; + const ceiling = /** @type {number} */ (/** @type {Record} */ (payload)?.ceiling); + + const ceilingError = this._validateCeiling(ceiling); + if (ceilingError) { + session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', ceilingError, msg.id)); + return; + } + + const resolved = resolveGraph(session, msg, { graphs: this._graphs }); + if (!resolved) { return; } + const { graphName, graph } = resolved; + + try { + const opts = Number.isFinite(ceiling) ? { ceiling } : {}; + const state = await graph.materialize(opts); + const serialized = serializeState(graphName, state); + session.conn.send(envelope('state', serialized, msg.id)); + } catch (err) { + session.conn.send(errorEnvelope( + 'E_SEEK_FAILED', + err instanceof Error ? err.message : 'Seek failed', + msg.id, + )); + } + } + + /** + * Broadcast a diff to all clients subscribed to the given graph. + * + * @param {string} graphName + * @param {unknown} diff + * @private + */ + _broadcastDiff(graphName, diff) { + const msg = envelope('diff', { graph: graphName, diff }); + /** @type {ClientSession[]} */ + const dead = []; + for (const client of this._clients) { + if (client.openGraphs.has(graphName)) { + try { + client.conn.send(msg); + } catch { + // Dead connection — evict after iteration. No logger is + // available at this layer; the `onClose` handler also evicts, + // but `send()` can throw before `close` fires on a reset + // connection. We must not delete from the Set mid-iteration. + dead.push(client); + } + } + } + for (const client of dead) { + this._clients.delete(client); + } + } +} diff --git a/src/domain/services/WarpStateIndexBuilder.js b/src/domain/services/WarpStateIndexBuilder.js index 3b8cd08d..394032f4 100644 --- a/src/domain/services/WarpStateIndexBuilder.js +++ b/src/domain/services/WarpStateIndexBuilder.js @@ -88,7 +88,7 @@ export default class WarpStateIndexBuilder { /** * Serializes the index to a tree structure of buffers. * - * @returns {Promise>} Map of path → serialized content + * @returns {Promise>} Map of path → serialized content */ async serialize() { return await this._builder.serialize(); @@ -109,7 +109,7 @@ export default class WarpStateIndexBuilder { * * @param {import('./JoinReducer.js').WarpStateV5} state - The materialized state * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [options] - Configuration - * @returns {Promise<{tree: Record, stats: {nodes: number, edges: number}}>} Serialized index and stats + * @returns {Promise<{tree: Record, stats: {nodes: number, edges: number}}>} Serialized index and stats * * @example * import { buildWarpStateIndex } from './WarpStateIndexBuilder.js'; diff --git a/src/domain/services/WormholeService.js b/src/domain/services/WormholeService.js index 186a162a..bbfc0c61 100644 --- a/src/domain/services/WormholeService.js +++ b/src/domain/services/WormholeService.js @@ -23,6 +23,8 @@ import defaultCodec from '../utils/defaultCodec.js'; import ProvenancePayload from './ProvenancePayload.js'; import WormholeError from '../errors/WormholeError.js'; +import EncryptionError from '../errors/EncryptionError.js'; +import PersistenceError from '../errors/PersistenceError.js'; import { detectMessageKind, decodePatchMessage } from './WarpMessageCodec.js'; /** @@ -61,12 +63,13 @@ async function verifyShaExists(persistence, sha, paramName) { /** * Processes a single commit in the wormhole chain. - * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, sha: string, graphName: string, expectedWriter: string|null, codec?: import('../../ports/CodecPort.js').default }} opts - Options + * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, sha: string, graphName: string, expectedWriter: string|null, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} opts - Options * @returns {Promise<{patch: import('../types/WarpTypesV2.js').PatchV2, sha: string, writerId: string, parentSha: string|null}>} * @throws {WormholeError} On validation errors + * @throws {EncryptionError} If the patch is encrypted but no patchBlobStorage is provided * @private */ -async function processCommit({ persistence, sha, graphName, expectedWriter, codec: codecOpt }) { +async function processCommit({ persistence, sha, graphName, expectedWriter, codec: codecOpt, patchBlobStorage }) { const codec = codecOpt || defaultCodec; const nodeInfo = await persistence.getNodeInfo(sha); const { message, parents } = nodeInfo; @@ -95,7 +98,25 @@ async function processCommit({ persistence, sha, graphName, expectedWriter, code }); } - const patchBuffer = await persistence.readBlob(patchMeta.patchOid); + /** @type {Uint8Array} */ + let patchBuffer; + if (patchMeta.encrypted) { + if (!patchBlobStorage) { + throw new EncryptionError( + 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key', + ); + } + patchBuffer = await patchBlobStorage.retrieve(patchMeta.patchOid); + } else { + patchBuffer = await persistence.readBlob(patchMeta.patchOid); + } + if (!patchBuffer) { + throw new PersistenceError( + `Patch blob not found: ${patchMeta.patchOid}`, + PersistenceError.E_MISSING_OBJECT, + { context: { oid: patchMeta.patchOid } }, + ); + } const patch = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (codec.decode(patchBuffer)); return { @@ -130,20 +151,21 @@ async function processCommit({ persistence, sha, graphName, expectedWriter, code * must be an ancestor of `toSha` in the writer's patch chain. Both endpoints * are inclusive in the wormhole. * - * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default }} options - Wormhole creation options + * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options - Wormhole creation options * @returns {Promise} The created wormhole * @throws {WormholeError} If fromSha or toSha doesn't exist (E_WORMHOLE_SHA_NOT_FOUND) * @throws {WormholeError} If fromSha is not an ancestor of toSha (E_WORMHOLE_INVALID_RANGE) * @throws {WormholeError} If commits span multiple writers (E_WORMHOLE_MULTI_WRITER) * @throws {WormholeError} If a commit is not a patch commit (E_WORMHOLE_NOT_PATCH) + * @throws {EncryptionError} If patches are encrypted but no patchBlobStorage is provided */ -export async function createWormhole({ persistence, graphName, fromSha, toSha, codec }) { +export async function createWormhole({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage }) { validateSha(fromSha, 'fromSha'); validateSha(toSha, 'toSha'); await verifyShaExists(persistence, fromSha, 'fromSha'); await verifyShaExists(persistence, toSha, 'toSha'); - const patches = await collectPatchRange({ persistence, graphName, fromSha, toSha, codec }); + const patches = await collectPatchRange({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage }); // Reverse to get oldest-first order (as required by ProvenancePayload) patches.reverse(); @@ -161,18 +183,18 @@ export async function createWormhole({ persistence, graphName, fromSha, toSha, c * Walks the parent chain from toSha towards fromSha, collecting and * validating each commit along the way. * - * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default }} options + * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options * @returns {Promise>} Patches in newest-first order * @throws {WormholeError} If fromSha is not an ancestor of toSha or range is empty * @private */ -async function collectPatchRange({ persistence, graphName, fromSha, toSha, codec }) { +async function collectPatchRange({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage }) { const patches = []; let currentSha = toSha; let writerId = null; while (currentSha) { - const result = await processCommit({ persistence, sha: currentSha, graphName, expectedWriter: writerId, codec }); + const result = await processCommit({ persistence, sha: currentSha, graphName, expectedWriter: writerId, codec, patchBlobStorage }); writerId = result.writerId; patches.push({ patch: result.patch, sha: result.sha, writerId: result.writerId }); diff --git a/src/domain/trust/TrustCanonical.js b/src/domain/trust/TrustCanonical.js index 10433f6d..b5072c43 100644 --- a/src/domain/trust/TrustCanonical.js +++ b/src/domain/trust/TrustCanonical.js @@ -8,35 +8,39 @@ * @see docs/specs/TRUST_V1_CRYPTO.md */ -import { createHash } from 'node:crypto'; import { recordIdPayload, signaturePayload } from './canonical.js'; +import defaultCrypto from '../utils/defaultCrypto.js'; +import { textEncode } from '../utils/bytes.js'; /** * Computes the record ID (SHA-256 hex digest) for a trust record. * * @param {Record} record - Full trust record - * @returns {string} 64-character lowercase hex string + * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps] - Optional dependency injection + * @returns {Promise} 64-character lowercase hex string */ -export function computeRecordId(record) { - return createHash('sha256').update(recordIdPayload(record)).digest('hex'); +export async function computeRecordId(record, { crypto } = {}) { + const c = crypto || defaultCrypto; + return await c.hash('sha256', recordIdPayload(record)); } /** - * Computes the signature payload as a Buffer (UTF-8 bytes). + * Computes the signature payload as UTF-8 bytes. * * @param {Record} record - Full trust record (signature will be stripped) - * @returns {Buffer} UTF-8 encoded bytes of the domain-separated canonical string + * @returns {Uint8Array} UTF-8 encoded bytes of the domain-separated canonical string */ export function computeSignaturePayload(record) { - return Buffer.from(signaturePayload(record), 'utf8'); + return textEncode(signaturePayload(record)); } /** * Verifies that a record's recordId matches its content. * * @param {Record} record - Trust record with `recordId` field - * @returns {boolean} true if recordId matches computed value + * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps] - Optional dependency injection + * @returns {Promise} true if recordId matches computed value */ -export function verifyRecordId(record) { - return record.recordId === computeRecordId(record); +export async function verifyRecordId(record, { crypto } = {}) { + return record.recordId === await computeRecordId(record, { crypto }); } diff --git a/src/domain/trust/TrustRecordService.js b/src/domain/trust/TrustRecordService.js index cb62a23f..7bde1316 100644 --- a/src/domain/trust/TrustRecordService.js +++ b/src/domain/trust/TrustRecordService.js @@ -71,7 +71,7 @@ export class TrustRecordService { } // 2. RecordId integrity - if (!verifyRecordId(record)) { + if (!await verifyRecordId(record)) { throw new TrustError( 'Trust record recordId does not match content', { code: 'E_TRUST_RECORD_ID_MISMATCH' }, @@ -175,9 +175,9 @@ export class TrustRecordService { * - First record has prev=null * * @param {Array>} records - Records in chain order (oldest first) - * @returns {{valid: boolean, errors: Array<{index: number, error: string}>}} + * @returns {Promise<{valid: boolean, errors: Array<{index: number, error: string}>}>} */ - verifyChain(records) { + async verifyChain(records) { /** @type {Array<{index: number, error: string}>} */ const errors = []; const seenIds = new Set(); @@ -193,7 +193,7 @@ export class TrustRecordService { } // RecordId integrity - if (!verifyRecordId(record)) { + if (!await verifyRecordId(record)) { errors.push({ index: i, error: 'RecordId does not match content' }); } @@ -343,8 +343,7 @@ export class TrustRecordService { async _persistRecord(ref, record, parentSha) { // Encode record as CBOR blob const encoded = this._codec.encode(record); - // Buffer.from() ensures Uint8Array from codec is accepted by writeBlob - const blobOid = await this._persistence.writeBlob(Buffer.from(encoded)); + const blobOid = await this._persistence.writeBlob(encoded); // Create tree with single entry (mktree format) const treeOid = await this._persistence.writeTree([`100644 blob ${blobOid}\trecord.cbor`]); diff --git a/src/domain/utils/bytes.js b/src/domain/utils/bytes.js new file mode 100644 index 00000000..eb2411be --- /dev/null +++ b/src/domain/utils/bytes.js @@ -0,0 +1,202 @@ +/** + * Pure byte-manipulation utilities for the domain layer. + * + * These functions replace Node.js Buffer methods with portable + * Uint8Array-based equivalents that work identically on Node, + * Bun, Deno, and browsers. + * + * @module domain/utils/bytes + */ + +const _encoder = new TextEncoder(); +const _decoder = new TextDecoder(); + +/** @type {readonly string[]} */ +const HEX_TABLE = Object.freeze( + Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')) +); + +/** + * Encodes a Uint8Array to a lowercase hex string. + * + * @param {Uint8Array} bytes + * @returns {string} Lowercase hex string + */ +export function hexEncode(bytes) { + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += HEX_TABLE[bytes[i]]; + } + return hex; +} + +/** + * Returns the numeric value of a hex character code, or -1 if invalid. + * + * @param {number} cc - Character code + * @returns {number} 0–15 or -1 + */ +function hexCharValue(cc) { + // 0-9: 0x30–0x39 + if (cc >= 0x30 && cc <= 0x39) { return cc - 0x30; } + // A-F: 0x41–0x46 + if (cc >= 0x41 && cc <= 0x46) { return cc - 0x41 + 10; } + // a-f: 0x61–0x66 + if (cc >= 0x61 && cc <= 0x66) { return cc - 0x61 + 10; } + return -1; +} + +/** + * Decodes a hex string to a Uint8Array. + * + * @param {string} hex - Even-length hex string + * @returns {Uint8Array} + */ +export function hexDecode(hex) { + if (hex.length % 2 !== 0) { + throw new RangeError(`Invalid hex string (odd length ${hex.length}): ${hex.length > 20 ? `${hex.slice(0, 20)}…` : hex}`); + } + const len = hex.length >>> 1; + const bytes = new Uint8Array(len); + for (let i = 0; i < len; i++) { + const hi = hexCharValue(hex.charCodeAt(i * 2)); + const lo = hexCharValue(hex.charCodeAt(i * 2 + 1)); + if (hi === -1 || lo === -1) { + throw new RangeError(`Invalid hex string (length ${hex.length}): ${hex.length > 20 ? `${hex.slice(0, 20)}…` : hex}`); + } + bytes[i] = (hi << 4) | lo; + } + return bytes; +} + +const B64_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; + +const B64_LOOKUP = new Uint8Array(128); +for (let i = 0; i < B64_CHARS.length; i++) { + B64_LOOKUP[B64_CHARS.charCodeAt(i)] = i; +} + +/** + * Encodes a Uint8Array to a base64 string. + * + * Uses a direct table-based implementation that avoids intermediate binary + * strings, preventing memory spikes on large buffers. + * + * @param {Uint8Array} bytes + * @returns {string} Base64-encoded string + */ +export function base64Encode(bytes) { + let result = ''; + const len = bytes.length; + const remainder = len % 3; + const mainLen = len - remainder; + + for (let i = 0; i < mainLen; i += 3) { + const n = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2]; + result += B64_CHARS[(n >>> 18) & 0x3f] + + B64_CHARS[(n >>> 12) & 0x3f] + + B64_CHARS[(n >>> 6) & 0x3f] + + B64_CHARS[n & 0x3f]; + } + + if (remainder === 1) { + const n = bytes[mainLen]; + result += `${B64_CHARS[(n >>> 2) & 0x3f]}${B64_CHARS[(n << 4) & 0x3f]}==`; + } else if (remainder === 2) { + const n = (bytes[mainLen] << 8) | bytes[mainLen + 1]; + result += `${B64_CHARS[(n >>> 10) & 0x3f]}${B64_CHARS[(n >>> 4) & 0x3f]}${B64_CHARS[(n << 2) & 0x3f]}=`; + } + + return result; +} + +/** + * Validates a base64 string's character set and length. + * + * @param {string} b64 - Base64-encoded string to validate + * @throws {RangeError} If the string contains invalid characters or has an + * impossible length (length % 4 === 1 can never represent whole bytes). + */ +function validateBase64(b64) { + if (!/^[A-Za-z0-9+/]*={0,2}$/.test(b64)) { + throw new RangeError(`Invalid base64 string: ${b64.length > 20 ? `${b64.slice(0, 20)}…` : b64}`); + } + // Length % 4 === 1 is always invalid (a single base64 char encodes only 6 bits, + // which cannot form a complete byte). Accept 0, 2, 3 (unpadded) and 0 (padded). + if (b64.length % 4 === 1) { + throw new RangeError(`Invalid base64 string (bad length ${b64.length}): ${b64.length > 20 ? `${b64.slice(0, 20)}…` : b64}`); + } +} + +/** + * Decodes a base64 string to a Uint8Array. + * + * Uses a direct table-based implementation that avoids intermediate binary + * strings, preventing memory spikes on large buffers. + * + * @param {string} b64 - Base64-encoded string + * @returns {Uint8Array} + */ +export function base64Decode(b64) { + validateBase64(b64); + let len = b64.length; + if (b64[len - 1] === '=') { len--; } + if (b64[len - 1] === '=') { len--; } + + const outLen = (len * 3) >>> 2; + const bytes = new Uint8Array(outLen); + let j = 0; + + for (let i = 0; i < len; i += 4) { + const a = B64_LOOKUP[b64.charCodeAt(i)]; + const b = B64_LOOKUP[b64.charCodeAt(i + 1)]; + const c = i + 2 < len ? B64_LOOKUP[b64.charCodeAt(i + 2)] : 0; + const d = i + 3 < len ? B64_LOOKUP[b64.charCodeAt(i + 3)] : 0; + + bytes[j++] = (a << 2) | (b >>> 4); + if (j < outLen) { bytes[j++] = ((b << 4) | (c >>> 2)) & 0xff; } + if (j < outLen) { bytes[j++] = ((c << 6) | d) & 0xff; } + } + + return bytes; +} + +/** + * Concatenates multiple Uint8Arrays into a single Uint8Array. + * + * @param {...Uint8Array} arrays + * @returns {Uint8Array} + */ +export function concatBytes(...arrays) { + let totalLength = 0; + for (let i = 0; i < arrays.length; i++) { + totalLength += arrays[i].length; + } + const result = new Uint8Array(totalLength); + let offset = 0; + for (let i = 0; i < arrays.length; i++) { + result.set(arrays[i], offset); + offset += arrays[i].length; + } + return result; +} + +/** + * Encodes a string to UTF-8 bytes. + * + * @param {string} str + * @returns {Uint8Array} + */ +export function textEncode(str) { + return _encoder.encode(str); +} + +/** + * Decodes UTF-8 bytes to a string. + * + * @param {Uint8Array} bytes + * @returns {string} + */ +export function textDecode(bytes) { + return _decoder.decode(bytes); +} diff --git a/src/domain/utils/defaultCrypto.js b/src/domain/utils/defaultCrypto.js index d199cc5c..f0487ae5 100644 --- a/src/domain/utils/defaultCrypto.js +++ b/src/domain/utils/defaultCrypto.js @@ -6,30 +6,52 @@ * the infrastructure layer. This follows the same pattern as * defaultCodec.js and defaultClock.js. * - * Since git-warp requires Git (and therefore Node 22+, Deno, or Bun), - * node:crypto is always available. + * In Node/Bun/Deno, node:crypto loads normally. When the import + * fails (e.g., Vite stubs `node:crypto` in browser bundles), + * callers must inject a CryptoPort explicitly. * * @module domain/utils/defaultCrypto */ -import { - createHash, - createHmac, - timingSafeEqual as nodeTimingSafeEqual, -} from 'node:crypto'; +/** @type {Function|null} */ +let _createHash = null; +/** @type {Function|null} */ +let _createHmac = null; +/** @type {Function|null} */ +let _timingSafeEqual = null; + +try { + const nodeCrypto = await import('node:crypto'); + _createHash = nodeCrypto.createHash; + _createHmac = nodeCrypto.createHmac; + _timingSafeEqual = nodeCrypto.timingSafeEqual; +} catch { + // Import failed (bundler stub, unsupported runtime, etc.) — + // caller must inject a CryptoPort explicitly. +} /** @type {import('../../ports/CryptoPort.js').default} */ const defaultCrypto = { // eslint-disable-next-line @typescript-eslint/require-await -- async matches CryptoPort contract async hash(algorithm, data) { - return createHash(algorithm).update(data).digest('hex'); + if (!_createHash) { + throw new Error('No crypto available. Inject a CryptoPort explicitly.'); + } + return _createHash(algorithm).update(data).digest('hex'); }, // eslint-disable-next-line @typescript-eslint/require-await -- async matches CryptoPort contract async hmac(algorithm, key, data) { - return createHmac(algorithm, key).update(data).digest(); + if (!_createHmac) { + throw new Error('No crypto available. Inject a CryptoPort explicitly.'); + } + const result = _createHmac(algorithm, key).update(data).digest(); + return new Uint8Array(result); }, timingSafeEqual(a, b) { - return nodeTimingSafeEqual(a, b); + if (!_timingSafeEqual) { + throw new Error('No crypto available. Inject a CryptoPort explicitly.'); + } + return _timingSafeEqual(a, b); }, }; diff --git a/src/domain/utils/seekCacheKey.js b/src/domain/utils/seekCacheKey.js index a4c03385..201664b2 100644 --- a/src/domain/utils/seekCacheKey.js +++ b/src/domain/utils/seekCacheKey.js @@ -10,7 +10,7 @@ * @module domain/utils/seekCacheKey */ -import { createHash } from 'node:crypto'; +import defaultCrypto from './defaultCrypto.js'; const KEY_VERSION = 'v1'; @@ -18,15 +18,18 @@ const KEY_VERSION = 'v1'; * Builds a deterministic, collision-resistant cache key from a ceiling tick * and writer frontier snapshot. * + * This function is intentionally async — WebCrypto's `digest()` is async-only, + * and `defaultCrypto.hash()` uses it. Both call sites are already async. + * * @param {number} ceiling - Lamport ceiling tick * @param {Map} frontier - Map of writerId → tip SHA - * @returns {string} Cache key, e.g. `v1:t42-a1b2c3d4...` (32+ hex chars in hash) + * @returns {Promise} Cache key, e.g. `v1:t42-a1b2c3d4...` (32+ hex chars in hash) */ -export function buildSeekCacheKey(ceiling, frontier) { +export async function buildSeekCacheKey(ceiling, frontier) { const sorted = [...frontier.entries()].sort((a, b) => a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0 ); const payload = sorted.map(([w, sha]) => `${w}:${sha}`).join('\n'); - const hash = createHash('sha256').update(payload).digest('hex'); + const hash = await defaultCrypto.hash('sha256', payload); return `${KEY_VERSION}:t${ceiling}-${hash}`; } diff --git a/src/domain/warp/Writer.js b/src/domain/warp/Writer.js index 7f9f9b19..be2e8e2b 100644 --- a/src/domain/warp/Writer.js +++ b/src/domain/warp/Writer.js @@ -36,9 +36,9 @@ export class Writer { /** * Creates a new Writer instance. * - * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, onCommitSuccess?: (result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} options + * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, onCommitSuccess?: (result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, blobStorage?: import('../../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options */ - constructor({ persistence, graphName, writerId, versionVector, getCurrentState, onCommitSuccess, onDeleteWithData = 'warn', codec, logger }) { + constructor({ persistence, graphName, writerId, versionVector, getCurrentState, onCommitSuccess, onDeleteWithData = 'warn', codec, logger, blobStorage, patchBlobStorage }) { validateWriterId(writerId); /** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} Wider than Writer's own calls; satisfies PatchBuilderV2 constructor. */ @@ -68,6 +68,12 @@ export class Writer { /** @type {import('../../ports/LoggerPort.js').default} */ this._logger = logger || nullLogger; + /** @type {import('../../ports/BlobStoragePort.js').default|null} */ + this._blobStorage = blobStorage || null; + + /** @type {import('../../ports/BlobStoragePort.js').default|null} */ + this._patchBlobStorage = patchBlobStorage || null; + /** @type {boolean} */ this._commitInProgress = false; } @@ -148,6 +154,8 @@ export class Writer { onDeleteWithData: this._onDeleteWithData, codec: this._codec, logger: this._logger, + blobStorage: this._blobStorage || undefined, + patchBlobStorage: this._patchBlobStorage || undefined, }); // Return PatchSession wrapping the builder diff --git a/src/domain/warp/_internal.js b/src/domain/warp/_internal.js index 6a7d65cd..6fe2c4e4 100644 --- a/src/domain/warp/_internal.js +++ b/src/domain/warp/_internal.js @@ -11,6 +11,13 @@ export { default as QueryError } from '../errors/QueryError.js'; export { default as ForkError } from '../errors/ForkError.js'; +/** + * Extended WarpGraph type that includes mixin methods wired at runtime. + * Use this as the `@this` type in method files that call other mixin methods. + * + * @typedef {import('../WarpGraph.js').default & { _readPatchBlob(patchMeta: { patchOid: string, encrypted: boolean }): Promise }} WarpGraphWithMixins + */ + // ── Shared constants ──────────────────────────────────────────────────────── export const DEFAULT_ADJACENCY_CACHE_SIZE = 3; export const E_NO_STATE_MSG = 'No materialized state. Call materialize() before querying, or use autoMaterialize: true (the default). See https://github.com/git-stunts/git-warp#materialization'; diff --git a/src/domain/warp/checkpoint.methods.js b/src/domain/warp/checkpoint.methods.js index 7cc59ac1..92040a63 100644 --- a/src/domain/warp/checkpoint.methods.js +++ b/src/domain/warp/checkpoint.methods.js @@ -258,7 +258,7 @@ export async function _validateMigrationBoundary() { * typically writes a new tip, so a schema:2+ tip implies the writer has * been migrated. * - * @this {import('../WarpGraph.js').default} + * @this {import('./_internal.js').WarpGraphWithMixins} * @returns {Promise} True if any writer tip is schema:1 (or omits `schema`, treated as legacy v1) * @private */ @@ -279,7 +279,7 @@ export async function _hasSchema1Patches() { if (kind === 'patch') { const patchMeta = decodePatchMessage(nodeInfo.message); - const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid); + const patchBuffer = await this._readPatchBlob(patchMeta); const patch = /** @type {{schema?: number}} */ (this._codec.decode(patchBuffer)); // If any patch has schema:1, we have v1 history diff --git a/src/domain/warp/materializeAdvanced.methods.js b/src/domain/warp/materializeAdvanced.methods.js index 74049660..3de3b631 100644 --- a/src/domain/warp/materializeAdvanced.methods.js +++ b/src/domain/warp/materializeAdvanced.methods.js @@ -281,9 +281,13 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) { // Persistent cache check — skip when collectReceipts is requested let cacheKey; if (this._seekCache && !collectReceipts) { - cacheKey = buildSeekCacheKey(ceiling, frontier); try { - const cached = await this._seekCache.get(cacheKey); + cacheKey = await buildSeekCacheKey(ceiling, frontier); + } catch { + // crypto unavailable (e.g., browser) — treat as cache miss + } + try { + const cached = cacheKey ? await this._seekCache.get(cacheKey) : undefined; if (cached) { try { const state = deserializeFullStateV5(cached.buffer, { codec: this._codec }); @@ -299,7 +303,9 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) { return freezePublicState(state); } catch { // Corrupted payload — self-heal by removing the bad entry - try { await this._seekCache.delete(cacheKey); } catch { /* best-effort */ } + if (cacheKey) { + try { await this._seekCache.delete(cacheKey); } catch { /* best-effort */ } + } } } } catch { @@ -347,12 +353,16 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) { // Store to persistent cache (fire-and-forget — failure is non-fatal) if (this._seekCache && !collectReceipts && allPatches.length > 0) { - if (!cacheKey) { - cacheKey = buildSeekCacheKey(ceiling, frontier); + try { + if (!cacheKey) { + cacheKey = await buildSeekCacheKey(ceiling, frontier); + } + const buf = serializeFullStateV5(state, { codec: this._codec }); + this._persistSeekCacheEntry(cacheKey, buf, state) + .catch(() => {}); + } catch { + // crypto unavailable — skip cache write } - const buf = serializeFullStateV5(state, { codec: this._codec }); - this._persistSeekCacheEntry(cacheKey, buf, state) - .catch(() => {}); } // Skip auto-checkpoint and GC — this is an exploratory read @@ -430,7 +440,7 @@ export async function _restoreIndexFromCache(indexTreeOid) { * builds the target frontier from current writer tips, and applies * incremental patches since the checkpoint. * - * @this {import('../WarpGraph.js').default} + * @this {import('./_internal.js').WarpGraphWithMixins} * @param {string} checkpointSha - The checkpoint commit SHA * @returns {Promise} The materialized graph state at the checkpoint * @throws {Error} If checkpoint SHA is invalid or not found @@ -472,7 +482,7 @@ export async function materializeAt(checkpointSha) { } const patchMeta = decodePatchMessage(message); - const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid); + const patchBuffer = await this._readPatchBlob(patchMeta); const patch = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer)); patches.push({ patch, sha: currentSha }); diff --git a/src/domain/warp/patch.methods.js b/src/domain/warp/patch.methods.js index 5ab95e5c..db66bfb4 100644 --- a/src/domain/warp/patch.methods.js +++ b/src/domain/warp/patch.methods.js @@ -17,6 +17,8 @@ import { buildWriterRef, buildWritersPrefix, parseWriterIdFromRef } from '../uti import { decodePatchMessage, detectMessageKind } from '../services/WarpMessageCodec.js'; import { Writer } from './Writer.js'; import { generateWriterId, resolveWriterId } from '../utils/WriterId.js'; +import EncryptionError from '../errors/EncryptionError.js'; +import PersistenceError from '../errors/PersistenceError.js'; /** @typedef {import('../types/WarpPersistence.js').CorePersistence} CorePersistence */ @@ -48,6 +50,8 @@ export async function createPatch() { onCommitSuccess: (/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ opts) => this._onPatchCommitted(this._writerId, opts), codec: this._codec, logger: this._logger || undefined, + blobStorage: this._blobStorage || undefined, + patchBlobStorage: this._patchBlobStorage || undefined, }); } @@ -171,7 +175,7 @@ export async function _nextLamport() { * Walks commits from the tip SHA back to the first patch commit, * collecting all patches along the way. * - * @this {import('../WarpGraph.js').default} + * @this {import('./_internal.js').WarpGraphWithMixins} * @param {string} writerId - The writer ID to load patches for * @param {string|null} [stopAtSha=null] - Stop walking when reaching this SHA (exclusive) * @returns {Promise>} Array of patches @@ -202,8 +206,8 @@ export async function _loadWriterPatches(writerId, stopAtSha = null) { // Decode the patch message to get patchOid const patchMeta = decodePatchMessage(message); - // Read the patch blob - const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid); + // Read the patch blob (encrypted or plain) + const patchBuffer = await this._readPatchBlob(patchMeta); const decoded = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer)); patches.push({ patch: decoded, sha: currentSha }); @@ -326,6 +330,8 @@ export async function writer(writerId) { onCommitSuccess: /** @type {(result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void} */ ((/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ opts) => this._onPatchCommitted(resolvedWriterId, opts)), codec: this._codec, logger: this._logger || undefined, + blobStorage: this._blobStorage || undefined, + patchBlobStorage: this._patchBlobStorage || undefined, }); } @@ -383,6 +389,8 @@ export async function createWriter(opts = {}) { onCommitSuccess: /** @type {(result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void} */ ((/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ commitOpts) => this._onPatchCommitted(freshWriterId, commitOpts)), codec: this._codec, logger: this._logger || undefined, + blobStorage: this._blobStorage || undefined, + patchBlobStorage: this._patchBlobStorage || undefined, }); } @@ -414,6 +422,34 @@ export async function _ensureFreshState() { } } +/** + * Reads a patch blob, using patchBlobStorage for encrypted patches + * and falling back to persistence.readBlob() for plain patches. + * + * @this {import('./_internal.js').WarpGraphWithMixins} + * @param {{ patchOid: string, encrypted: boolean }} patchMeta + * @returns {Promise} + */ +export async function _readPatchBlob(patchMeta) { + if (patchMeta.encrypted) { + if (!this._patchBlobStorage) { + throw new EncryptionError( + 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key', + ); + } + return await this._patchBlobStorage.retrieve(patchMeta.patchOid); + } + const blob = await this._persistence.readBlob(patchMeta.patchOid); + if (!blob) { + throw new PersistenceError( + `Patch blob not found: ${patchMeta.patchOid}`, + PersistenceError.E_MISSING_OBJECT, + { context: { oid: patchMeta.patchOid } }, + ); + } + return blob; +} + /** * Discovers all writers that have written to this graph. * diff --git a/src/domain/warp/provenance.methods.js b/src/domain/warp/provenance.methods.js index 9caab655..170f16a9 100644 --- a/src/domain/warp/provenance.methods.js +++ b/src/domain/warp/provenance.methods.js @@ -219,7 +219,7 @@ export async function loadPatchBySha(sha) { /** * Loads a single patch by its SHA. * - * @this {import('../WarpGraph.js').default} + * @this {import('./_internal.js').WarpGraphWithMixins} * @param {string} sha - The patch commit SHA * @returns {Promise} The decoded patch object * @throws {Error} If the commit is not a patch or loading fails @@ -233,7 +233,7 @@ export async function _loadPatchBySha(sha) { } const patchMeta = decodePatchMessage(nodeInfo.message); - const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid); + const patchBuffer = await this._readPatchBlob(patchMeta); return /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer)); } diff --git a/src/domain/warp/query.methods.js b/src/domain/warp/query.methods.js index 77dea119..20e6812c 100644 --- a/src/domain/warp/query.methods.js +++ b/src/domain/warp/query.methods.js @@ -377,6 +377,9 @@ export async function getContent(nodeId) { if (!oid) { return null; } + if (this._blobStorage) { + return await this._blobStorage.retrieve(oid); + } return await this._persistence.readBlob(oid); } @@ -420,5 +423,8 @@ export async function getEdgeContent(from, to, label) { if (!oid) { return null; } + if (this._blobStorage) { + return await this._blobStorage.retrieve(oid); + } return await this._persistence.readBlob(oid); } diff --git a/src/globals.d.ts b/src/globals.d.ts index 931ec235..0fcf0f5a 100644 --- a/src/globals.d.ts +++ b/src/globals.d.ts @@ -2,9 +2,10 @@ * Minimal ambient declarations for Deno and Bun runtime globals. * * These cover ONLY the APIs actually used in this codebase: - * - Deno.serve() (DenoHttpAdapter.js) - * - Deno.env.get() (bin/cli/infrastructure.js) - * - Bun.serve() (BunHttpAdapter.js) + * - Deno.serve() (DenoHttpAdapter.js, DenoWsAdapter.js) + * - Deno.upgradeWebSocket() (DenoWsAdapter.js) + * - Deno.env.get() (bin/cli/infrastructure.js) + * - Bun.serve() (BunHttpAdapter.js, BunWsAdapter.js) * * Do NOT install @types/deno or @types/bun — this file is intentionally * narrow to avoid pulling in thousands of unrelated declarations. @@ -28,7 +29,7 @@ interface DenoServer { interface DenoServeOptions { port?: number; hostname?: string; - onListen?: () => void; + onListen?: (addr?: { port: number; hostname: string }) => void; } interface DenoEnv { @@ -41,22 +42,44 @@ declare namespace Deno { options: DenoServeOptions, handler: (request: Request) => Promise | Response, ): DenoServer; + function upgradeWebSocket(request: Request): { socket: WebSocket; response: Response }; } /* ------------------------------------------------------------------ */ /* Bun */ /* ------------------------------------------------------------------ */ +interface BunServerWebSocket { + send(data: string | ArrayBuffer | Uint8Array): void; + close(code?: number, reason?: string): void; + data: T; + readyState: number; +} + +interface BunWsData { + messageHandler: ((message: string) => void) | null; + closeHandler: ((code?: number, reason?: string) => void) | null; + messageBuffer: string[]; +} + +interface BunWebSocketHandlers { + open?(ws: BunServerWebSocket): void; + message?(ws: BunServerWebSocket, message: string | ArrayBuffer): void; + close?(ws: BunServerWebSocket, code: number, reason: string): void; +} + interface BunServer { stop(closeActiveConnections?: boolean): Promise; hostname: string; port: number; + upgrade(req: Request, options?: { data?: T }): boolean; } interface BunServeOptions { port?: number; hostname?: string; - fetch: (request: Request) => Promise | Response; + fetch: (request: Request, server: BunServer) => Promise | Response | undefined; + websocket?: BunWebSocketHandlers; } declare namespace Bun { diff --git a/src/infrastructure/adapters/BunHttpAdapter.js b/src/infrastructure/adapters/BunHttpAdapter.js index bc91cc0a..5a75866d 100644 --- a/src/infrastructure/adapters/BunHttpAdapter.js +++ b/src/infrastructure/adapters/BunHttpAdapter.js @@ -1,46 +1,12 @@ import HttpServerPort from '../../ports/HttpServerPort.js'; -import { MAX_BODY_BYTES, readStreamBody, noopLogger } from './httpAdapterUtils.js'; - -const ERROR_BODY = 'Internal Server Error'; -const ERROR_BODY_BYTES = new TextEncoder().encode(ERROR_BODY); -const ERROR_BODY_LENGTH = String(ERROR_BODY_BYTES.byteLength); - -const PAYLOAD_TOO_LARGE = 'Payload Too Large'; -const PAYLOAD_TOO_LARGE_LENGTH = String(new TextEncoder().encode(PAYLOAD_TOO_LARGE).byteLength); - -/** - * Converts a Bun Request into the plain-object format expected by - * HttpServerPort request handlers. - * - * @param {Request} request - Bun fetch Request - * @returns {Promise<{ method: string, url: string, headers: Record, body: Uint8Array|undefined }>} - */ -async function toPortRequest(request) { - /** @type {Record} */ - const headers = {}; - request.headers.forEach((value, key) => { - headers[key] = value; - }); - - let body; - if (request.method !== 'GET' && request.method !== 'HEAD') { - const cl = headers['content-length']; - if (cl !== undefined && Number(cl) > MAX_BODY_BYTES) { - throw Object.assign(new Error('Payload Too Large'), { status: 413 }); - } - if (request.body) { - body = await readStreamBody(request.body); - } - } - - const parsedUrl = new URL(request.url); - return { - method: request.method, - url: parsedUrl.pathname + parsedUrl.search, - headers, - body, - }; -} +import { + noopLogger, + toPortRequest, + ERROR_BODY, + ERROR_BODY_LENGTH, + PAYLOAD_TOO_LARGE_BODY, + PAYLOAD_TOO_LARGE_LENGTH, +} from './httpAdapterUtils.js'; /** * Converts a plain-object port response into a Bun Response. @@ -71,7 +37,7 @@ function createFetchHandler(requestHandler, logger) { return toResponse(portRes); } catch (err) { if (typeof err === 'object' && err !== null && /** @type {{status?: number}} */ (err).status === 413) { - return new Response(PAYLOAD_TOO_LARGE, { + return new Response(PAYLOAD_TOO_LARGE_BODY, { status: 413, headers: { 'Content-Type': 'text/plain', 'Content-Length': PAYLOAD_TOO_LARGE_LENGTH }, }); diff --git a/src/infrastructure/adapters/BunWsAdapter.js b/src/infrastructure/adapters/BunWsAdapter.js new file mode 100644 index 00000000..b86d082f --- /dev/null +++ b/src/infrastructure/adapters/BunWsAdapter.js @@ -0,0 +1,130 @@ +import WebSocketServerPort from '../../ports/WebSocketServerPort.js'; +import { normalizeHost, assertNotListening, messageToString } from './wsAdapterUtils.js'; + +/** + * Wraps a Bun ServerWebSocket into a port-compliant WsConnection. + * + * Handler refs are stored on `ws.data` so the Bun `websocket` callbacks + * can route messages/closes to the correct connection. + * + * @param {BunServerWebSocket} ws + * @returns {import('../../ports/WebSocketServerPort.js').WsConnection} + */ +function wrapBunWs(ws) { + return { + send(message) { + if (ws.readyState === 1) { + ws.send(message); + } + }, + onMessage(handler) { + // Flush any messages that arrived before the handler was set + if (ws.data.messageBuffer.length > 0) { + for (const buffered of ws.data.messageBuffer) { + handler(buffered); + } + ws.data.messageBuffer.length = 0; + } + ws.data.messageHandler = handler; + }, + onClose(handler) { ws.data.closeHandler = handler; }, + close() { ws.close(); }, + }; +} + +/** + * Builds a Bun.serve fetch handler that attempts WS upgrade, then + * optionally serves static files. + * + * @param {string|null} staticDir + * @returns {(req: Request, srv: BunServer) => Promise} + */ +function createFetchHandler(staticDir) { + return async (req, srv) => { + if (srv.upgrade(req, { data: { messageHandler: null, closeHandler: null, messageBuffer: [] } })) { + return undefined; + } + if (staticDir) { + const { handleStaticRequest } = await import('./staticFileHandler.js'); + const url = new URL(req.url); + const result = await handleStaticRequest(staticDir, url.pathname); + return new Response(/** @type {BodyInit|null} */ (result.body), { status: result.status, headers: result.headers }); + } + return new Response('Not Found', { status: 404 }); + }; +} + +/** + * Bun WebSocket adapter implementing WebSocketServerPort. + * + * Uses `globalThis.Bun.serve()` with the `websocket` handler option. + * When `staticDir` is provided, serves static files for non-WS requests. + * This file can be imported on any runtime but will fail at call-time + * if Bun is not available. + * + * @extends WebSocketServerPort + */ +export default class BunWsAdapter extends WebSocketServerPort { + /** + * @param {{ staticDir?: string|null }} [options] + */ + constructor({ staticDir } = {}) { + super(); + /** @type {string|null} */ + this._staticDir = staticDir || null; + } + + /** + * @param {(connection: import('../../ports/WebSocketServerPort.js').WsConnection) => void} onConnection + * @returns {import('../../ports/WebSocketServerPort.js').WsServerHandle} + */ + createServer(onConnection) { + /** @type {BunServer|null} */ + let server = null; + + return { + listen: (/** @type {number} */ port, /** @type {string} [host] */ host = '127.0.0.1') => { + assertNotListening(server); + const bindHost = normalizeHost(host); + server = globalThis.Bun.serve({ + port, + hostname: bindHost, + fetch: createFetchHandler(this._staticDir), + websocket: { + open(ws) { onConnection(wrapBunWs(ws)); }, + message(ws, msg) { + const text = messageToString(msg); + if (ws.data.messageHandler) { + ws.data.messageHandler(text); + } else { + ws.data.messageBuffer.push(text); + } + }, + close(ws, code, reason) { + if (ws.data.closeHandler) { + ws.data.closeHandler(code, reason); + } + }, + }, + }); + return Promise.resolve({ port: server.port, host: bindHost }); + }, + + close() { + if (!server) { + return Promise.resolve(); + } + const s = server; + server = null; + try { + return Promise.resolve(s.stop()).catch(() => { + // Best-effort — stop() may reject on some versions + }); + } catch { + // Best-effort — stop() may throw synchronously + return Promise.resolve(); + } + }, + }; + } +} diff --git a/src/infrastructure/adapters/CasBlobAdapter.js b/src/infrastructure/adapters/CasBlobAdapter.js new file mode 100644 index 00000000..49a9701d --- /dev/null +++ b/src/infrastructure/adapters/CasBlobAdapter.js @@ -0,0 +1,158 @@ +/** + * CasBlobAdapter — stores content blobs via git-cas. + * + * Content is chunked (CDC by default), optionally encrypted, and stored + * as a CAS tree in the Git object store. The tree OID serves as the + * storage identifier. + * + * Backward compatibility: if `retrieve()` fails to find a CAS manifest + * at the given OID, it falls back to reading a raw Git blob. This + * handles content written before the CAS migration. + * + * @module infrastructure/adapters/CasBlobAdapter + */ + +import BlobStoragePort from '../../ports/BlobStoragePort.js'; +import { createLazyCas } from './lazyCasInit.js'; +import LoggerObservabilityBridge from './LoggerObservabilityBridge.js'; +import { Readable } from 'node:stream'; + +/** @typedef {{ readManifest: Function, restore: Function, store: Function, createTree: Function }} CasStore */ + +/** + * Error codes from `@git-stunts/git-cas` that indicate the OID is not + * a CAS manifest (i.e. it's a legacy raw Git blob written before the + * CAS migration). + * + * - `MANIFEST_NOT_FOUND` — tree exists but contains no manifest entry + * - `GIT_ERROR` — Git couldn't read the tree at all (wrong object type) + * + * @type {ReadonlySet} + */ +const LEGACY_BLOB_CODES = new Set(['MANIFEST_NOT_FOUND', 'GIT_ERROR']); + +/** + * Returns true when the error indicates the OID is not a CAS manifest + * (i.e. it's a legacy raw Git blob). All other errors are considered + * real failures and should be rethrown. + * + * Checks `err.code` (the machine-readable `CasError` code) first. + * Falls back to message-based matching for non-CasError exceptions + * thrown by lower-level Git operations. + * + * @param {unknown} err + * @returns {boolean} + */ +function isLegacyBlobError(err) { + if (err instanceof Error && 'code' in err) { + /** @type {{ code: unknown }} */ + const coded = /** @type {Error & { code: unknown }} */ (err); + if (typeof coded.code === 'string') { + return LEGACY_BLOB_CODES.has(coded.code); + } + } + const msg = err instanceof Error ? err.message : ''; + return msg.includes('not a tree') + || msg.includes('bad object') + || msg.includes('does not exist'); +} + +export default class CasBlobAdapter extends BlobStoragePort { + /** + * @param {{ plumbing: *, persistence: *, encryptionKey?: Uint8Array, logger?: import('../../ports/LoggerPort.js').default }} options + */ + constructor({ plumbing, persistence, encryptionKey, logger }) { + super(); + this._plumbing = plumbing; + this._persistence = persistence; + this._encryptionKey = encryptionKey; + this._logger = logger; + this._getCas = createLazyCas(() => this._initCas()); + } + + /** + * @private + * @returns {Promise} + */ + async _initCas() { + const { default: ContentAddressableStore, CborCodec } = await import( + /* webpackIgnore: true */ '@git-stunts/git-cas' + ); + /** @type {{ plumbing: *, codec: *, chunking: { strategy: 'cdc' }, observability?: * }} */ + const opts = { + plumbing: this._plumbing, + codec: new CborCodec(), + chunking: { strategy: 'cdc' }, + }; + if (this._logger) { + opts.observability = new LoggerObservabilityBridge(this._logger); + } + return new ContentAddressableStore(opts); + } + + /** + * Stores content via git-cas and returns the tree OID. + * + * @override + * @param {Uint8Array|string} content + * @param {{ slug?: string }} [options] + * @returns {Promise} + */ + async store(content, options) { + const cas = await this._getCas(); + const buf = typeof content === 'string' + ? new TextEncoder().encode(content) + : content; + const source = Readable.from([buf]); + + /** @type {{ source: *, slug: string, filename: string, encryptionKey?: Uint8Array }} */ + const storeOpts = { + source, + slug: options?.slug || `blob-${Date.now().toString(36)}`, + filename: 'content', + }; + if (this._encryptionKey) { + storeOpts.encryptionKey = this._encryptionKey; + } + + const manifest = await cas.store(storeOpts); + return await cas.createTree({ manifest }); + } + + /** + * Retrieves content by tree OID. Falls back to raw Git blob read + * for backward compatibility with pre-CAS content. + * + * @override + * @param {string} oid + * @returns {Promise} + */ + async retrieve(oid) { + const cas = await this._getCas(); + + try { + const manifest = await cas.readManifest({ treeOid: oid }); + /** @type {{ manifest: *, encryptionKey?: Uint8Array }} */ + const restoreOpts = { manifest }; + if (this._encryptionKey) { + restoreOpts.encryptionKey = this._encryptionKey; + } + const { buffer } = await cas.restore(restoreOpts); + return buffer; + } catch (err) { + // Fallback: OID may be a raw Git blob (pre-CAS content). + // Only fall through for "not a manifest" errors (missing tree, bad format). + // Rethrow corruption, decryption, and I/O errors. + if (!isLegacyBlobError(err)) { + throw err; + } + const blob = await this._persistence.readBlob(oid); + if (blob === null || blob === undefined) { + throw new Error( + `Blob not found: OID "${oid}" is neither a CAS manifest nor a readable Git blob`, + ); + } + return blob; + } + } +} diff --git a/src/infrastructure/adapters/CasSeekCacheAdapter.js b/src/infrastructure/adapters/CasSeekCacheAdapter.js index a1e72c92..b1f9189c 100644 --- a/src/infrastructure/adapters/CasSeekCacheAdapter.js +++ b/src/infrastructure/adapters/CasSeekCacheAdapter.js @@ -18,11 +18,14 @@ /** * Minimal interface for the ContentAddressableStore from @git-stunts/git-cas. - * @typedef {{ readManifest: Function, restore: Function, store: Function, createTree: Function }} CasStore + * @typedef {{ readManifest: Function, restore: Function, restoreStream?: Function, store: Function, createTree: Function }} CasStore */ import SeekCachePort from '../../ports/SeekCachePort.js'; import { buildSeekCacheRef } from '../../domain/utils/RefLayout.js'; +import { createLazyCas } from './lazyCasInit.js'; +import LoggerObservabilityBridge from './LoggerObservabilityBridge.js'; +import { textEncode, textDecode, concatBytes } from '../../domain/utils/bytes.js'; import { Readable } from 'node:stream'; const DEFAULT_MAX_ENTRIES = 200; @@ -50,31 +53,20 @@ const MAX_CAS_RETRIES = 3; export default class CasSeekCacheAdapter extends SeekCachePort { /** - * @param {{ persistence: *, plumbing: *, graphName: string, maxEntries?: number }} options + * @param {{ persistence: *, plumbing: *, graphName: string, maxEntries?: number, encryptionKey?: Uint8Array, logger?: import('../../ports/LoggerPort.js').default }} options */ - constructor({ persistence, plumbing, graphName, maxEntries }) { + constructor({ persistence, plumbing, graphName, maxEntries, encryptionKey, logger }) { super(); this._persistence = persistence; this._plumbing = plumbing; this._graphName = graphName; this._maxEntries = maxEntries ?? DEFAULT_MAX_ENTRIES; this._ref = buildSeekCacheRef(graphName); - this._casPromise = null; - } - - /** - * Lazily initializes the ContentAddressableStore. - * @private - * @returns {Promise} - */ - async _getCas() { - if (!this._casPromise) { - this._casPromise = this._initCas().catch((err) => { - this._casPromise = null; - throw err; - }); - } - return await this._casPromise; + /** @type {Uint8Array|undefined} */ + this._encryptionKey = encryptionKey; + /** @type {import('../../ports/LoggerPort.js').default|undefined} */ + this._logger = logger; + this._getCas = createLazyCas(() => this._initCas()); } /** @@ -82,10 +74,19 @@ export default class CasSeekCacheAdapter extends SeekCachePort { * @returns {Promise} */ async _initCas() { - const { default: ContentAddressableStore } = await import( + const { default: ContentAddressableStore, CborCodec } = await import( /* webpackIgnore: true */ '@git-stunts/git-cas' ); - return ContentAddressableStore.createCbor({ plumbing: this._plumbing }); + /** @type {{ plumbing: *, codec: *, chunking: { strategy: 'cdc' }, observability?: * }} */ + const opts = { + plumbing: this._plumbing, + codec: new CborCodec(), + chunking: { strategy: 'cdc' }, + }; + if (this._logger) { + opts.observability = new LoggerObservabilityBridge(this._logger); + } + return new ContentAddressableStore(opts); } // --------------------------------------------------------------------------- @@ -104,7 +105,7 @@ export default class CasSeekCacheAdapter extends SeekCachePort { } try { const buf = await this._persistence.readBlob(oid); - const parsed = JSON.parse(buf.toString('utf8')); + const parsed = JSON.parse(textDecode(buf)); if (parsed.schemaVersion !== INDEX_SCHEMA_VERSION) { return { schemaVersion: INDEX_SCHEMA_VERSION, entries: {} }; } @@ -122,7 +123,7 @@ export default class CasSeekCacheAdapter extends SeekCachePort { */ async _writeIndex(index) { const json = JSON.stringify(index); - const oid = await this._persistence.writeBlob(Buffer.from(json, 'utf8')); + const oid = await this._persistence.writeBlob(textEncode(json)); await this._persistence.updateRef(this._ref, oid); } @@ -199,6 +200,39 @@ export default class CasSeekCacheAdapter extends SeekCachePort { return { ceiling, frontierHash }; } + // --------------------------------------------------------------------------- + // Restore helpers + // --------------------------------------------------------------------------- + + /** + * Restores a CAS asset into a single buffer. + * + * Prefers `cas.restoreStream()` (git-cas v4+) for I/O pipelining — + * chunk reads overlap with buffer accumulation. Falls back to + * `cas.restore()` for older git-cas versions or when streaming is + * unavailable (e.g. encrypted assets that require full buffering). + * + * @private + * @param {CasStore} cas - ContentAddressableStore instance + * @param {{ manifest: *, encryptionKey?: Uint8Array }} restoreOpts + * @returns {Promise} + */ + async _restoreBuffer(cas, restoreOpts) { + if (typeof cas.restoreStream === 'function') { + const stream = cas.restoreStream(restoreOpts); + const chunks = []; + for await (const chunk of stream) { + chunks.push(chunk); + } + if (chunks.length === 1) { + return chunks[0]; + } + return concatBytes(...chunks); + } + const { buffer } = await cas.restore(restoreOpts); + return buffer; + } + // --------------------------------------------------------------------------- // SeekCachePort implementation // --------------------------------------------------------------------------- @@ -215,7 +249,7 @@ export default class CasSeekCacheAdapter extends SeekCachePort { * * @override * @param {string} key - * @returns {Promise<{ buffer: Buffer|Uint8Array, indexTreeOid?: string } | null>} + * @returns {Promise<{ buffer: Uint8Array, indexTreeOid?: string } | null>} */ async get(key) { const cas = await this._getCas(); @@ -227,7 +261,12 @@ export default class CasSeekCacheAdapter extends SeekCachePort { try { const manifest = await cas.readManifest({ treeOid: entry.treeOid }); - const { buffer } = await cas.restore({ manifest }); + /** @type {{ manifest: *, encryptionKey?: Uint8Array }} */ + const restoreOpts = { manifest }; + if (this._encryptionKey) { + restoreOpts.encryptionKey = this._encryptionKey; + } + const buffer = await this._restoreBuffer(cas, restoreOpts); // Update lastAccessedAt for LRU eviction ordering await this._mutateIndex((idx) => { if (idx.entries[key]) { @@ -235,7 +274,7 @@ export default class CasSeekCacheAdapter extends SeekCachePort { } return idx; }); - /** @type {{ buffer: Buffer|Uint8Array, indexTreeOid?: string }} */ + /** @type {{ buffer: Uint8Array, indexTreeOid?: string }} */ const result = { buffer }; if (entry.indexTreeOid) { result.indexTreeOid = entry.indexTreeOid; @@ -254,7 +293,7 @@ export default class CasSeekCacheAdapter extends SeekCachePort { /** * @override * @param {string} key - * @param {Buffer|Uint8Array} buffer + * @param {Uint8Array} buffer * @param {{ indexTreeOid?: string }} [options] * @returns {Promise} */ @@ -264,11 +303,12 @@ export default class CasSeekCacheAdapter extends SeekCachePort { // Store buffer as CAS asset const source = Readable.from([buffer]); - const manifest = await cas.store({ - source, - slug: key, - filename: 'state.cbor', - }); + /** @type {{ source: *, slug: string, filename: string, encryptionKey?: Uint8Array }} */ + const storeOpts = { source, slug: key, filename: 'state.cbor' }; + if (this._encryptionKey) { + storeOpts.encryptionKey = this._encryptionKey; + } + const manifest = await cas.store(storeOpts); const treeOid = await cas.createTree({ manifest }); // Update index with rich metadata diff --git a/src/infrastructure/adapters/DenoHttpAdapter.js b/src/infrastructure/adapters/DenoHttpAdapter.js index 45fa07b3..b575b124 100644 --- a/src/infrastructure/adapters/DenoHttpAdapter.js +++ b/src/infrastructure/adapters/DenoHttpAdapter.js @@ -1,40 +1,11 @@ import HttpServerPort from '../../ports/HttpServerPort.js'; -import { MAX_BODY_BYTES, readStreamBody, noopLogger } from './httpAdapterUtils.js'; - -const ERROR_BODY = 'Internal Server Error'; -const ERROR_BODY_BYTES = new TextEncoder().encode(ERROR_BODY); - -/** - * Converts a Deno Request into the plain-object format expected by - * HttpServerPort request handlers. - * - * @param {Request} request - Deno Request object - * @returns {Promise<{ method: string, url: string, headers: Record, body: Uint8Array|undefined }>} - */ -async function toPlainRequest(request) { - /** @type {Record} */ - const headers = {}; - request.headers.forEach((value, key) => { - headers[key] = value; - }); - - let body; - if (request.method !== 'GET' && request.method !== 'HEAD' && request.body) { - const cl = headers['content-length']; - if (cl !== undefined && Number(cl) > MAX_BODY_BYTES) { - throw Object.assign(new Error('Payload Too Large'), { status: 413 }); - } - body = await readStreamBody(request.body); - } - - const url = new URL(request.url); - return { - method: request.method, - url: url.pathname + url.search, - headers, - body, - }; -} +import { + noopLogger, + toPortRequest, + ERROR_BODY_BYTES, + PAYLOAD_TOO_LARGE_BYTES, + PAYLOAD_TOO_LARGE_LENGTH, +} from './httpAdapterUtils.js'; /** * Converts a plain-object response from the handler into a Deno Response. @@ -60,15 +31,14 @@ function toDenoResponse(plain) { function createHandler(requestHandler, logger) { return async (request) => { try { - const plain = await toPlainRequest(request); - const response = await requestHandler(plain); + const portReq = await toPortRequest(request); + const response = await requestHandler(portReq); return toDenoResponse(response); } catch (err) { if (typeof err === 'object' && err !== null && /** @type {{status?: number}} */ (err).status === 413) { - const msg = new TextEncoder().encode('Payload Too Large'); - return new Response(msg, { + return new Response(PAYLOAD_TOO_LARGE_BYTES, { status: 413, - headers: { 'Content-Type': 'text/plain', 'Content-Length': String(msg.byteLength) }, + headers: { 'Content-Type': 'text/plain', 'Content-Length': PAYLOAD_TOO_LARGE_LENGTH }, }); } logger.error('DenoHttpAdapter dispatch error', err); diff --git a/src/infrastructure/adapters/DenoWsAdapter.js b/src/infrastructure/adapters/DenoWsAdapter.js new file mode 100644 index 00000000..f5f64dc1 --- /dev/null +++ b/src/infrastructure/adapters/DenoWsAdapter.js @@ -0,0 +1,145 @@ +import WebSocketServerPort from '../../ports/WebSocketServerPort.js'; +import { normalizeHost, assertNotListening, messageToString } from './wsAdapterUtils.js'; + +/** + * Wraps a Deno WebSocket (standard browser-like API) into a + * port-compliant WsConnection. + * + * Handler refs are set by `onMessage()`/`onClose()` before any + * messages can arrive, because `onConnection` runs synchronously + * inside `socket.onopen`. + * + * @param {WebSocket} socket + * @returns {import('../../ports/WebSocketServerPort.js').WsConnection} + */ +function wrapDenoWs(socket) { + /** @type {((message: string) => void)|null} */ + let messageHandler = null; + /** @type {((code?: number, reason?: string) => void)|null} */ + let closeHandler = null; + /** @type {string[]} */ + const messageBuffer = []; + + socket.onmessage = (e) => { + const text = messageToString(e.data); + if (messageHandler) { + messageHandler(text); + } else { + messageBuffer.push(text); + } + }; + + socket.onclose = (e) => { + if (closeHandler) { + closeHandler(e.code, e.reason); + } + }; + + return { + send(message) { + if (socket.readyState === WebSocket.OPEN) { + socket.send(message); + } + }, + onMessage(handler) { + // Flush any messages that arrived before the handler was set + for (const buffered of messageBuffer) { + handler(buffered); + } + messageBuffer.length = 0; + messageHandler = handler; + }, + onClose(handler) { closeHandler = handler; }, + close() { socket.close(); }, + }; +} + +/** + * Builds the Deno.serve request handler that upgrades WS connections + * and optionally serves static files. + * + * @param {(connection: import('../../ports/WebSocketServerPort.js').WsConnection) => void} onConnection + * @param {string|null} staticDir + * @returns {(req: Request) => Response|Promise} + */ +function createDenoHandler(onConnection, staticDir) { + return async (req) => { + const upgrade = req.headers.get('upgrade'); + if (upgrade && upgrade.toLowerCase() === 'websocket') { + const { socket, response } = globalThis.Deno.upgradeWebSocket(req); + socket.onopen = () => { onConnection(wrapDenoWs(socket)); }; + return response; + } + if (staticDir) { + const { handleStaticRequest } = await import('./staticFileHandler.js'); + const url = new URL(req.url); + const result = await handleStaticRequest(staticDir, url.pathname); + return new Response(/** @type {BodyInit|null} */ (result.body), { status: result.status, headers: result.headers }); + } + return new Response('Not Found', { status: 404 }); + }; +} + +/** + * Deno WebSocket adapter implementing WebSocketServerPort. + * + * Uses `globalThis.Deno.serve()` with `Deno.upgradeWebSocket()` to + * handle incoming WebSocket connections. When `staticDir` is provided, + * serves static files for non-WS requests. + * This file can be imported on any runtime but will fail at call-time + * if Deno is not available. + * + * @extends WebSocketServerPort + */ +export default class DenoWsAdapter extends WebSocketServerPort { + /** + * @param {{ staticDir?: string|null }} [options] + */ + constructor({ staticDir } = {}) { + super(); + /** @type {string|null} */ + this._staticDir = staticDir || null; + } + + /** + * @param {(connection: import('../../ports/WebSocketServerPort.js').WsConnection) => void} onConnection + * @returns {import('../../ports/WebSocketServerPort.js').WsServerHandle} + */ + createServer(onConnection) { + /** @type {DenoServer|null} */ + let server = null; + const handler = createDenoHandler(onConnection, this._staticDir); + + return { + listen(/** @type {number} */ port, /** @type {string} [host] */ host = '127.0.0.1') { + assertNotListening(server); + const bindHost = normalizeHost(host); + return new Promise((resolve, reject) => { + try { + server = globalThis.Deno.serve( + { + port, + hostname: bindHost, + onListen() { + resolve({ port: /** @type {DenoServer} */ (server).addr.port, host: bindHost }); + }, + }, + handler, + ); + } catch (err) { + reject(err); + } + }); + }, + + close() { + if (!server) { + return Promise.resolve(); + } + const s = server; + server = null; + return s.shutdown(); + }, + }; + } +} diff --git a/src/infrastructure/adapters/GitGraphAdapter.js b/src/infrastructure/adapters/GitGraphAdapter.js index 52ad537a..d05c771c 100644 --- a/src/infrastructure/adapters/GitGraphAdapter.js +++ b/src/infrastructure/adapters/GitGraphAdapter.js @@ -43,7 +43,6 @@ * @see {@link https://git-scm.com/book/en/v2/Git-Internals-Plumbing-and-Porcelain} for Git plumbing concepts */ -import { Buffer } from 'node:buffer'; import { retry } from '@git-stunts/alfred'; import PersistenceError from '../../domain/errors/PersistenceError.js'; import GraphPersistencePort from '../../ports/GraphPersistencePort.js'; @@ -580,11 +579,11 @@ export default class GitGraphAdapter extends GraphPersistencePort { * Reads a tree and returns a map of path to content. * Reads blobs in batches of 16 to balance concurrency against fd/process limits. * @param {string} treeOid - The tree OID to read - * @returns {Promise>} Map of file path to blob content + * @returns {Promise>} Map of file path to blob content */ async readTree(treeOid) { const oids = await this.readTreeOids(treeOid); - /** @type {Record} */ + /** @type {Record} */ const files = {}; const entries = Object.entries(oids); const BATCH_SIZE = 16; @@ -642,7 +641,7 @@ export default class GitGraphAdapter extends GraphPersistencePort { /** * Reads the content of a Git blob. * @param {string} oid - The blob OID to read - * @returns {Promise} The blob content + * @returns {Promise} The blob content * @throws {Error} If the OID is invalid */ async readBlob(oid) { @@ -652,8 +651,8 @@ export default class GitGraphAdapter extends GraphPersistencePort { args: ['cat-file', 'blob', oid] }); const raw = await stream.collect({ asString: false }); - // Ensure a real Node Buffer (plumbing may return Uint8Array) - return Buffer.isBuffer(raw) ? raw : Buffer.from(raw); + // Return as-is — plumbing returns Buffer (which IS-A Uint8Array) + return /** @type {Uint8Array} */ (raw); } catch (err) { throw wrapGitError(/** @type {GitError} */ (err), { oid }); } diff --git a/src/infrastructure/adapters/InMemoryGraphAdapter.js b/src/infrastructure/adapters/InMemoryGraphAdapter.js index 34617904..dfab27ba 100644 --- a/src/infrastructure/adapters/InMemoryGraphAdapter.js +++ b/src/infrastructure/adapters/InMemoryGraphAdapter.js @@ -9,14 +9,115 @@ * SHA computation follows Git's object format so debugging is straightforward, * but cross-adapter SHA matching is NOT guaranteed. * + * Browser-compatible: the only Node-specific dependency (node:crypto) is + * lazy-loaded and can be replaced via the `hash` constructor option. + * * @module infrastructure/adapters/InMemoryGraphAdapter */ -import { createHash } from 'node:crypto'; -import { Readable } from 'node:stream'; import GraphPersistencePort from '../../ports/GraphPersistencePort.js'; import { validateOid, validateRef, validateLimit, validateConfigKey } from './adapterValidation.js'; +// ── Browser-safe byte helpers ──────────────────────────────────────── + +const _encoder = new TextEncoder(); + +/** + * Concatenates an array of Uint8Array instances into one. + * @param {Uint8Array[]} arrays + * @returns {Uint8Array} + */ +function concatBytes(arrays) { + const len = arrays.reduce((sum, a) => sum + a.length, 0); + const result = new Uint8Array(len); + let offset = 0; + for (const a of arrays) { + result.set(a, offset); + offset += a.length; + } + return result; +} + +/** + * Converts a hex string to a Uint8Array. + * @param {string} hex + * @returns {Uint8Array} + */ +function hexToBytes(hex) { + const bytes = new Uint8Array(hex.length / 2); + for (let i = 0; i < hex.length; i += 2) { + bytes[i / 2] = parseInt(hex.slice(i, i + 2), 16); + } + return bytes; +} + +/** + * Converts a string or Uint8Array to bytes. + * @param {string|Uint8Array} data + * @returns {Uint8Array} + */ +function toBytes(data) { + if (data instanceof Uint8Array) { + return data; + } + if (typeof data === 'string') { + return _encoder.encode(data); + } + throw new Error('Expected string or Uint8Array'); +} + +// ── Lazy node:crypto for default hash ──────────────────────────────── + +/** + * Module-level crypto probe state — intentionally shared across all + * InMemoryGraphAdapter instances. The probe runs at most once per + * process; subsequent instances reuse the cached result. This avoids + * repeated dynamic `import('node:crypto')` calls which are both slow + * and unnecessary (the availability of node:crypto doesn't change + * within a single process lifetime). + */ +/** @type {Function|null} */ +let _nodeCreateHash = null; +/** @type {boolean} */ +let _cryptoProbed = false; + +/** + * Lazily probes for node:crypto on first call. Avoids top-level await + * which forces the module into async evaluation — problematic for + * bundlers and non-Node runtimes where the import always fails. + * + * @returns {Promise} createHash or null + */ +async function probeNodeCrypto() { + if (_cryptoProbed) { + return _nodeCreateHash; + } + _cryptoProbed = true; + try { + const nodeCrypto = await import('node:crypto'); + _nodeCreateHash = nodeCrypto.createHash; + } catch { + // Browser or non-Node runtime — hash must be injected via constructor + } + return _nodeCreateHash; +} + +/** + * Default hash function using node:crypto SHA-1. + * Synchronous after the first call resolves the lazy probe. + * + * @param {Uint8Array} data + * @returns {string} 40-hex SHA + */ +function defaultHash(data) { + if (!_nodeCreateHash) { + throw new Error( + 'No hash function available. Pass { hash } to InMemoryGraphAdapter constructor.', + ); + } + return _nodeCreateHash('sha1').update(data).digest('hex'); +} + /** Well-known SHA for Git's empty tree. */ const EMPTY_TREE_OID = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; @@ -24,12 +125,13 @@ const EMPTY_TREE_OID = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; /** * Computes a Git blob SHA-1: `SHA1("blob " + len + "\0" + content)`. - * @param {Buffer} content + * @param {(data: Uint8Array) => string} hash + * @param {Uint8Array} content * @returns {string} 40-hex SHA */ -function hashBlob(content) { - const header = Buffer.from(`blob ${content.length}\0`); - return createHash('sha1').update(header).update(content).digest('hex'); +function hashBlob(hash, content) { + const header = _encoder.encode(`blob ${content.length}\0`); + return hash(concatBytes([header, content])); } /** @@ -38,27 +140,28 @@ function hashBlob(content) { * Each entry is: ` \0<20-byte binary OID>` * Entries are sorted by path (byte order), matching Git's canonical sort. * + * @param {(data: Uint8Array) => string} hash * @param {Array<{mode: string, path: string, oid: string}>} entries * @returns {string} 40-hex SHA */ -function hashTree(entries) { +function hashTree(hash, entries) { const sorted = [...entries].sort((a, b) => (a.path < b.path ? -1 : a.path > b.path ? 1 : 0)); const parts = sorted.map(e => { - const prefix = Buffer.from(`${e.mode} ${e.path}\0`); - const oidBin = Buffer.from(e.oid, 'hex'); - return Buffer.concat([prefix, oidBin]); + const prefix = _encoder.encode(`${e.mode} ${e.path}\0`); + return concatBytes([prefix, hexToBytes(e.oid)]); }); - const body = Buffer.concat(parts); - const header = Buffer.from(`tree ${body.length}\0`); - return createHash('sha1').update(header).update(body).digest('hex'); + const body = concatBytes(parts); + const header = _encoder.encode(`tree ${body.length}\0`); + return hash(concatBytes([header, body])); } /** * Builds a Git-style commit string and hashes it. + * @param {(data: Uint8Array) => string} hash * @param {{treeOid: string, parents: string[], message: string, author: string, date: string}} opts * @returns {string} 40-hex SHA */ -function hashCommit({ treeOid, parents, message, author, date }) { +function hashCommit(hash, { treeOid, parents, message, author, date }) { const lines = [`tree ${treeOid}`]; for (const p of parents) { lines.push(`parent ${p}`); @@ -67,9 +170,9 @@ function hashCommit({ treeOid, parents, message, author, date }) { lines.push(`committer ${author} ${date}`); lines.push(''); lines.push(message); - const body = lines.join('\n'); - const header = `commit ${Buffer.byteLength(body)}\0`; - return createHash('sha1').update(header).update(body).digest('hex'); + const bodyBytes = _encoder.encode(lines.join('\n')); + const header = _encoder.encode(`commit ${bodyBytes.length}\0`); + return hash(concatBytes([header, bodyBytes])); } // ── Adapter ───────────────────────────────────────────────────────────── @@ -79,7 +182,7 @@ function hashCommit({ treeOid, parents, message, author, date }) { * * Data structures: * - `_commits` — Map - * - `_blobs` — Map + * - `_blobs` — Map * - `_trees` — Map> * - `_refs` — Map * - `_config` — Map @@ -88,16 +191,24 @@ function hashCommit({ treeOid, parents, message, author, date }) { */ export default class InMemoryGraphAdapter extends GraphPersistencePort { /** - * @param {{ author?: string, clock?: { now: () => number } }} [options] + * @param {{ author?: string, clock?: { now: () => number }, hash?: (data: Uint8Array) => string }} [options] */ - constructor({ author, clock } = {}) { + constructor({ author, clock, hash } = {}) { super(); this._author = author || 'InMemory '; this._clock = clock || { now: () => Date.now() }; + this._hash = hash || defaultHash; + // Eagerly kick off the async probe so node:crypto is resolved by the + // time the first hash call arrives. The probe is a no-op on repeat calls. + if (!hash) { + this._cryptoReady = probeNodeCrypto(); + } else { + this._cryptoReady = Promise.resolve(null); + } /** @type {Map} */ this._commits = new Map(); - /** @type {Map} */ + /** @type {Map} */ this._blobs = new Map(); /** @type {Map>} */ this._trees = new Map(); @@ -120,6 +231,7 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { * @returns {Promise} */ async writeTree(entries) { + await this._cryptoReady; const parsed = entries.map(line => { const tabIdx = line.indexOf('\t'); if (tabIdx === -1) { @@ -130,7 +242,7 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { const [mode, , oid] = meta.split(' '); return { mode, path, oid }; }); - const oid = hashTree(parsed); + const oid = hashTree(this._hash, parsed); this._trees.set(oid, parsed); return oid; } @@ -158,11 +270,11 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { /** * @param {string} treeOid - * @returns {Promise>} + * @returns {Promise>} */ async readTree(treeOid) { const oids = await this.readTreeOids(treeOid); - /** @type {Record} */ + /** @type {Record} */ const files = {}; for (const [path, oid] of Object.entries(oids)) { files[path] = await this.readBlob(oid); @@ -173,19 +285,20 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { // ── BlobPort ──────────────────────────────────────────────────────── /** - * @param {Buffer|string} content + * @param {Uint8Array|string} content * @returns {Promise} */ async writeBlob(content) { - const buf = Buffer.isBuffer(content) ? content : Buffer.from(content); - const oid = hashBlob(buf); - this._blobs.set(oid, buf); + await this._cryptoReady; + const bytes = toBytes(content); + const oid = hashBlob(this._hash, bytes); + this._blobs.set(oid, bytes); return oid; } /** * @param {string} oid - * @returns {Promise} + * @returns {Promise} */ async readBlob(oid) { validateOid(oid); @@ -206,7 +319,7 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { for (const p of parents) { validateOid(p); } - return this._createCommit(EMPTY_TREE_OID, parents, message); + return await this._createCommit(EMPTY_TREE_OID, parents, message); } /** @@ -218,7 +331,7 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { for (const p of parents) { validateOid(p); } - return this._createCommit(treeOid, parents, message); + return await this._createCommit(treeOid, parents, message); } /** @@ -321,13 +434,14 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { /** * @param {{ ref: string, limit?: number, format?: string }} options - * @returns {Promise} + * @returns {Promise} */ async logNodesStream({ ref, limit = 1000000, format: _format }) { validateRef(ref); validateLimit(limit); const records = this._walkLog(ref, limit); const formatted = records.map(c => this._formatCommitRecord(c)).join('\0') + (records.length > 0 ? '\0' : ''); + const { Readable } = await import('node:stream'); return Readable.from([formatted]); } @@ -444,11 +558,12 @@ export default class InMemoryGraphAdapter extends GraphPersistencePort { * @param {string} treeOid * @param {string[]} parents * @param {string} message - * @returns {string} + * @returns {Promise} */ - _createCommit(treeOid, parents, message) { + async _createCommit(treeOid, parents, message) { + await this._cryptoReady; const date = new Date(this._clock.now()).toISOString(); - const sha = hashCommit({ + const sha = hashCommit(this._hash, { treeOid, parents, message, diff --git a/src/infrastructure/adapters/LoggerObservabilityBridge.js b/src/infrastructure/adapters/LoggerObservabilityBridge.js new file mode 100644 index 00000000..e61d5549 --- /dev/null +++ b/src/infrastructure/adapters/LoggerObservabilityBridge.js @@ -0,0 +1,61 @@ +/** + * LoggerObservabilityBridge — bridges git-cas's ObservabilityPort to + * git-warp's LoggerPort. + * + * Translates ObservabilityPort calls (metric, log, span) into LoggerPort + * calls so that CAS operations surface through git-warp's existing + * structured logging infrastructure. + * + * @module infrastructure/adapters/LoggerObservabilityBridge + */ + +/** + * @typedef {import('../../ports/LoggerPort.js').default} LoggerPort + */ + +export default class LoggerObservabilityBridge { + /** + * @param {LoggerPort} logger + */ + constructor(logger) { + this._logger = logger; + } + + /** + * Forward a metric as a debug-level log with structured context. + * + * @param {string} channel + * @param {Record} data + */ + metric(channel, data) { + this._logger.debug(`cas:metric:${channel}`, data); + } + + /** + * Forward a log call to the corresponding LoggerPort level method. + * + * @param {'debug'|'info'|'warn'|'error'} level + * @param {string} msg + * @param {Record} [meta] + */ + log(level, msg, meta) { + this._logger[level](msg, meta); + } + + /** + * Start a named span. Returns an object with `end()` that logs + * span duration as a debug metric. + * + * @param {string} name + * @returns {{ end(meta?: Record): void }} + */ + span(name) { + const start = performance.now(); + return { + end: (meta) => { + const durationMs = performance.now() - start; + this._logger.debug(`cas:span:${name}`, { ...meta, durationMs }); + }, + }; + } +} diff --git a/src/infrastructure/adapters/NodeCryptoAdapter.js b/src/infrastructure/adapters/NodeCryptoAdapter.js index 9f3e86b1..3fbbbd3f 100644 --- a/src/infrastructure/adapters/NodeCryptoAdapter.js +++ b/src/infrastructure/adapters/NodeCryptoAdapter.js @@ -15,7 +15,7 @@ import { export default class NodeCryptoAdapter extends CryptoPort { /** * @param {string} algorithm - * @param {string|Buffer|Uint8Array} data + * @param {string|Uint8Array} data * @returns {Promise} */ // eslint-disable-next-line @typescript-eslint/require-await -- async ensures sync throws become rejected promises @@ -25,18 +25,19 @@ export default class NodeCryptoAdapter extends CryptoPort { /** * @param {string} algorithm - * @param {string|Buffer|Uint8Array} key - * @param {string|Buffer|Uint8Array} data - * @returns {Promise} + * @param {string|Uint8Array} key + * @param {string|Uint8Array} data + * @returns {Promise} */ // eslint-disable-next-line @typescript-eslint/require-await -- async ensures sync throws become rejected promises async hmac(algorithm, key, data) { - return createHmac(algorithm, key).update(data).digest(); + const result = createHmac(algorithm, key).update(data).digest(); + return new Uint8Array(result); } /** - * @param {Buffer|Uint8Array} a - * @param {Buffer|Uint8Array} b + * @param {Uint8Array} a + * @param {Uint8Array} b * @returns {boolean} */ timingSafeEqual(a, b) { diff --git a/src/infrastructure/adapters/NodeWsAdapter.js b/src/infrastructure/adapters/NodeWsAdapter.js new file mode 100644 index 00000000..2224540f --- /dev/null +++ b/src/infrastructure/adapters/NodeWsAdapter.js @@ -0,0 +1,219 @@ +import { createServer as createHttpServer } from 'node:http'; +import { WebSocketServer } from 'ws'; +import WebSocketServerPort from '../../ports/WebSocketServerPort.js'; +import { normalizeHost, assertNotListening, messageToString } from './wsAdapterUtils.js'; + +/** + * Wraps a raw `ws` WebSocket into a port-compliant WsConnection. + * + * @param {import('ws').WebSocket} ws + * @returns {import('../../ports/WebSocketServerPort.js').WsConnection} + */ +function wrapConnection(ws) { + /** @type {((message: string) => void)|null} */ + let messageHandler = null; + /** @type {((code?: number, reason?: string) => void)|null} */ + let closeHandler = null; + /** @type {string[]} */ + const messageBuffer = []; + + ws.on('message', (/** @type {import('ws').RawData} */ data) => { + const text = messageToString(data); + if (messageHandler) { + messageHandler(text); + } else { + messageBuffer.push(text); + } + }); + + ws.on('close', (/** @type {number} */ code, /** @type {Buffer} */ reason) => { + if (closeHandler) { + closeHandler(code, reason?.toString()); + } + }); + + return { + send(message) { + if (ws.readyState === ws.OPEN) { + ws.send(message); + } + }, + onMessage(handler) { + // Flush any messages that arrived before the handler was set + for (const buffered of messageBuffer) { + handler(buffered); + } + messageBuffer.length = 0; + messageHandler = handler; + }, + onClose(handler) { closeHandler = handler; }, + close() { ws.close(); }, + }; +} + +/** + * Creates an HTTP request handler that serves static files. + * + * @param {string} staticDir + * @param {((err: Error) => void)|undefined} [onError] + * @returns {(req: import('node:http').IncomingMessage, res: import('node:http').ServerResponse) => void} + */ +function createStaticHandler(staticDir, onError) { + /** @type {typeof import('./staticFileHandler.js').handleStaticRequest|null} */ + let handler = null; + return (req, res) => { + const urlPath = new URL(req.url || '/', 'http://localhost').pathname; + (handler + ? Promise.resolve(handler) + : import('./staticFileHandler.js').then(m => { handler = m.handleStaticRequest; return handler; }) + ).then((h) => h(staticDir, urlPath)).then((result) => { + res.writeHead(result.status, result.headers); + res.end(result.body); + }).catch((/** @type {Error} */ err) => { + if (onError) { onError(err); } + res.writeHead(500, { 'content-type': 'text/plain' }); + res.end('Internal Server Error'); + }); + }; +} + +/** + * @typedef {Object} ListenOptions + * @property {(conn: import('../../ports/WebSocketServerPort.js').WsConnection) => void} onConnection + * @property {number} port + * @property {string} bindHost + * @property {{ wss: WebSocketServer|null, httpServer: import('node:http').Server|null }} state + * @property {((err: Error) => void)|undefined} [onError] + */ + +/** + * Starts listening with an HTTP server underneath for static file serving. + * + * @param {string} staticDir + * @param {ListenOptions} opts + * @returns {Promise<{ port: number, host: string }>} + */ +function listenWithHttp(staticDir, opts) { + const { onConnection, port, bindHost, state, onError } = opts; + return new Promise((resolve, reject) => { + state.httpServer = createHttpServer(createStaticHandler(staticDir, onError)); + state.wss = new WebSocketServer({ server: state.httpServer }); + state.wss.on('connection', (/** @type {import('ws').WebSocket} */ ws) => onConnection(wrapConnection(ws))); + const onStartupError = (/** @type {Error} */ err) => { + state.wss = null; + state.httpServer = null; + reject(err); + }; + state.httpServer.on('error', onStartupError); + state.httpServer.listen(port, bindHost, () => { + // Remove startup error listener — the promise is resolved. + state.httpServer?.removeListener('error', onStartupError); + state.httpServer?.on('error', (/** @type {Error} */ err) => { if (onError) { onError(err); } }); + const addr = state.httpServer?.address(); + const actualPort = typeof addr === 'object' && addr ? addr.port : port; + resolve({ port: actualPort, host: bindHost }); + }); + }); +} + +/** + * Starts listening with a standalone WebSocket server (no HTTP). + * + * @param {ListenOptions} opts + * @returns {Promise<{ port: number, host: string }>} + */ +function listenWsOnly(opts) { + const { onConnection, port, bindHost, state, onError } = opts; + return new Promise((resolve, reject) => { + state.wss = new WebSocketServer({ port, host: bindHost }); + const onStartupError = (/** @type {Error} */ err) => { + state.wss = null; + state.httpServer = null; + reject(err); + }; + state.wss.on('listening', () => { + // Remove startup error listener — the promise is resolved. + state.wss?.removeListener('error', onStartupError); + state.wss?.on('error', (/** @type {Error} */ err) => { if (onError) { onError(err); } }); + const addr = state.wss?.address(); + const actualPort = typeof addr === 'object' && addr ? addr.port : port; + resolve({ port: actualPort, host: bindHost }); + }); + state.wss.on('error', onStartupError); + state.wss.on('connection', (/** @type {import('ws').WebSocket} */ ws) => onConnection(wrapConnection(ws))); + }); +} + +/** + * Node.js WebSocket adapter implementing WebSocketServerPort. + * + * Uses the `ws` npm package for WebSocket server functionality. + * This is the only file that imports `ws` directly. + * + * When `staticDir` is provided, creates an HTTP server that serves + * static files and mounts the WebSocket server on top of it. + * + * @extends WebSocketServerPort + */ +export default class NodeWsAdapter extends WebSocketServerPort { + /** + * @param {{ staticDir?: string|null, onError?: (err: Error) => void }} [options] + */ + constructor({ staticDir, onError } = {}) { + super(); + /** @type {string|null} */ + this._staticDir = staticDir || null; + /** @type {((err: Error) => void)|undefined} */ + this._onError = onError; + } + + /** + * @param {(connection: import('../../ports/WebSocketServerPort.js').WsConnection) => void} onConnection + * @returns {import('../../ports/WebSocketServerPort.js').WsServerHandle} + */ + createServer(onConnection) { + /** @type {{ wss: WebSocketServer|null, httpServer: import('node:http').Server|null }} */ + const state = { wss: null, httpServer: null }; + const staticDir = this._staticDir; + const onError = this._onError; + + return { + listen(/** @type {number} */ port, /** @type {string} [host] */ host = '127.0.0.1') { + assertNotListening(state.wss); + const bindHost = normalizeHost(host); + const opts = { onConnection, port, bindHost, state, onError }; + if (staticDir) { + return listenWithHttp(staticDir, opts); + } + return listenWsOnly(opts); + }, + + close() { + return new Promise((resolve, reject) => { + if (!state.wss) { + resolve(); + return; + } + for (const client of state.wss.clients) { + client.close(); + } + const httpSrv = state.httpServer; + state.wss.close((/** @type {Error|undefined} */ wssErr) => { + state.wss = null; + state.httpServer = null; + if (httpSrv) { + httpSrv.close((httpErr) => { + const err = wssErr || httpErr; + if (err) { reject(err); } else { resolve(); } + }); + } else if (wssErr) { + reject(wssErr); + } else { + resolve(); + } + }); + }); + }, + }; + } +} diff --git a/src/domain/trust/TrustCrypto.js b/src/infrastructure/adapters/TrustCryptoAdapter.js similarity index 89% rename from src/domain/trust/TrustCrypto.js rename to src/infrastructure/adapters/TrustCryptoAdapter.js index ddb5c6df..ba973530 100644 --- a/src/domain/trust/TrustCrypto.js +++ b/src/infrastructure/adapters/TrustCryptoAdapter.js @@ -4,12 +4,16 @@ * Uses `node:crypto` directly — Ed25519 is trust-specific and does not * belong on the general CryptoPort hash/hmac interface. * - * @module domain/trust/TrustCrypto + * This module lives in infrastructure because it depends on `node:crypto` + * and `Buffer`. Import directly from this file. The former domain re-export + * (`src/domain/trust/TrustCrypto.js`) was removed in v14. + * + * @module infrastructure/adapters/TrustCryptoAdapter * @see docs/specs/TRUST_V1_CRYPTO.md */ import { createHash, createPublicKey, verify } from 'node:crypto'; -import TrustError from '../errors/TrustError.js'; +import TrustError from '../../domain/errors/TrustError.js'; /** Algorithms supported by this module. */ export const SUPPORTED_ALGORITHMS = new Set(['ed25519']); @@ -63,7 +67,7 @@ function decodePublicKey(base64) { /** * Verifies an Ed25519 signature against a payload. * - * @param {{ algorithm: string, publicKeyBase64: string, signatureBase64: string, payload: Buffer }} params + * @param {{ algorithm: string, publicKeyBase64: string, signatureBase64: string, payload: Uint8Array }} params * @returns {boolean} true if signature is valid * @throws {TrustError} E_TRUST_UNSUPPORTED_ALGORITHM for non-ed25519 * @throws {TrustError} E_TRUST_INVALID_KEY for malformed public key diff --git a/src/infrastructure/adapters/WebCryptoAdapter.js b/src/infrastructure/adapters/WebCryptoAdapter.js index dbbf8d08..08b050be 100644 --- a/src/infrastructure/adapters/WebCryptoAdapter.js +++ b/src/infrastructure/adapters/WebCryptoAdapter.js @@ -31,7 +31,7 @@ function toWebCryptoAlgo(algorithm) { /** * Converts input data to a Uint8Array for Web Crypto API consumption. - * @param {string|Buffer|Uint8Array} data - Input data + * @param {string|Uint8Array} data - Input data * @returns {Uint8Array} Data as Uint8Array * @throws {Error} If data type is not supported */ @@ -79,7 +79,7 @@ export default class WebCryptoAdapter extends CryptoPort { /** * @param {string} algorithm - * @param {string|Buffer|Uint8Array} data + * @param {string|Uint8Array} data * @returns {Promise} */ async hash(algorithm, data) { @@ -92,8 +92,8 @@ export default class WebCryptoAdapter extends CryptoPort { /** * @param {string} algorithm - * @param {string|Buffer|Uint8Array} key - * @param {string|Buffer|Uint8Array} data + * @param {string|Uint8Array} key + * @param {string|Uint8Array} data * @returns {Promise} */ async hmac(algorithm, key, data) { @@ -115,9 +115,9 @@ export default class WebCryptoAdapter extends CryptoPort { * Uses XOR accumulation with no early exit to prevent timing attacks. * This is the standard approach when crypto.timingSafeEqual is unavailable. * - * @param {Buffer|Uint8Array} a - First buffer - * @param {Buffer|Uint8Array} b - Second buffer - * @returns {boolean} True if buffers are equal + * @param {Uint8Array} a - First byte array + * @param {Uint8Array} b - Second byte array + * @returns {boolean} True if byte arrays are equal */ timingSafeEqual(a, b) { if (a.length !== b.length) { return false; } diff --git a/src/infrastructure/adapters/httpAdapterUtils.js b/src/infrastructure/adapters/httpAdapterUtils.js index 971e4f7a..dd9151f7 100644 --- a/src/infrastructure/adapters/httpAdapterUtils.js +++ b/src/infrastructure/adapters/httpAdapterUtils.js @@ -51,3 +51,51 @@ export async function readStreamBody(bodyStream) { /** No-op logger matching the `{ error(...) }` interface. */ export const noopLogger = { error() {} }; + +// ── Shared error response bodies ──────────────────────────────────────────── + +export const ERROR_BODY = 'Internal Server Error'; +export const ERROR_BODY_BYTES = new TextEncoder().encode(ERROR_BODY); +export const ERROR_BODY_LENGTH = String(ERROR_BODY_BYTES.byteLength); + +export const PAYLOAD_TOO_LARGE_BODY = 'Payload Too Large'; +export const PAYLOAD_TOO_LARGE_BYTES = new TextEncoder().encode(PAYLOAD_TOO_LARGE_BODY); +export const PAYLOAD_TOO_LARGE_LENGTH = String(PAYLOAD_TOO_LARGE_BYTES.byteLength); + +// ── Shared request conversion ─────────────────────────────────────────────── + +/** + * Converts a Web API Request into the plain-object format expected by + * HttpServerPort request handlers. + * + * Used by both BunHttpAdapter and DenoHttpAdapter. + * + * @param {Request} request - Web API Request + * @returns {Promise<{ method: string, url: string, headers: Record, body: Uint8Array|undefined }>} + */ +export async function toPortRequest(request) { + /** @type {Record} */ + const headers = {}; + request.headers.forEach((value, key) => { + headers[key] = value; + }); + + let body; + if (request.method !== 'GET' && request.method !== 'HEAD') { + const cl = headers['content-length']; + if (cl !== undefined && Number(cl) > MAX_BODY_BYTES) { + throw Object.assign(new Error('Payload Too Large'), { status: 413 }); + } + if (request.body) { + body = await readStreamBody(request.body); + } + } + + const parsedUrl = new URL(request.url); + return { + method: request.method, + url: parsedUrl.pathname + parsedUrl.search, + headers, + body, + }; +} diff --git a/src/infrastructure/adapters/lazyCasInit.js b/src/infrastructure/adapters/lazyCasInit.js new file mode 100644 index 00000000..166d75a9 --- /dev/null +++ b/src/infrastructure/adapters/lazyCasInit.js @@ -0,0 +1,32 @@ +/** + * Shared lazy-init helper for CAS (Content Addressable Store) adapters. + * + * Both CasBlobAdapter and CasSeekCacheAdapter use the same pattern: + * cache a pending promise, reset on failure so the next call retries. + * + * @module infrastructure/adapters/lazyCasInit + * @private + */ + +/** + * Creates a lazy CAS initializer that caches the resolved promise + * and resets on rejection so subsequent calls retry. + * + * @template T + * @param {() => Promise} initFn - Factory that creates the CAS instance + * @returns {() => Promise} A `getCas()` function + */ +export function createLazyCas(initFn) { + /** @type {Promise | null} */ + let promise = null; + + return () => { + if (!promise) { + promise = initFn().catch((err) => { + promise = null; + throw err; + }); + } + return promise; + }; +} diff --git a/src/infrastructure/adapters/sha1sync.js b/src/infrastructure/adapters/sha1sync.js new file mode 100644 index 00000000..b3f7f1cc --- /dev/null +++ b/src/infrastructure/adapters/sha1sync.js @@ -0,0 +1,120 @@ +/** + * Synchronous SHA-1 for browser use with InMemoryGraphAdapter. + * + * This is a minimal, standards-compliant SHA-1 implementation used + * solely for Git content addressing (blob/tree/commit object IDs). + * It is NOT used for security purposes. + * + * @module infrastructure/adapters/sha1sync + */ + +/** + * Left-rotate a 32-bit integer by n bits. + * @param {number} x + * @param {number} n + * @returns {number} + */ +function rotl(x, n) { + return ((x << n) | (x >>> (32 - n))) >>> 0; +} + +/** + * Pads and parses a message into 512-bit blocks for SHA-1. + * @param {Uint8Array} msg + * @returns {Uint32Array[]} + */ +function preprocess(msg) { + const bitLen = msg.length * 8; + const totalBytes = msg.length + 1 + ((119 - (msg.length % 64)) % 64) + 8; + const padded = new Uint8Array(totalBytes); + padded.set(msg); + padded[msg.length] = 0x80; + const dv = new DataView(padded.buffer); + // SHA-1 spec requires 64-bit big-endian message length in the final 8 bytes. + // High 32 bits are zero-initialized by the Uint8Array, so we only set the + // low 32 bits. msg.length is safe because `bitLen = msg.length * 8` stays + // within uint32 range for messages under 512 MB (0x20000000 bytes). + dv.setUint32(totalBytes - 4, bitLen, false); + + const blocks = []; + for (let i = 0; i < totalBytes; i += 64) { + const block = new Uint32Array(80); + for (let j = 0; j < 16; j++) { + block[j] = dv.getUint32(i + j * 4, false); + } + for (let j = 16; j < 80; j++) { + block[j] = rotl(block[j - 3] ^ block[j - 8] ^ block[j - 14] ^ block[j - 16], 1); + } + blocks.push(block); + } + return blocks; +} + +/** + * Processes a single 512-bit block, updating the hash state in-place. + * @param {number[]} state - Five-element hash state [h0..h4] + * @param {Uint32Array} w - 80-word expanded block + */ +function processBlock(state, w) { + let a = state[0]; + let b = state[1]; + let c = state[2]; + let d = state[3]; + let e = state[4]; + + for (let i = 0; i < 80; i++) { + let f; + let k; + if (i < 20) { + f = (b & c) | (~b & d); + k = 0x5A827999; + } else if (i < 40) { + f = b ^ c ^ d; + k = 0x6ED9EBA1; + } else if (i < 60) { + f = (b & c) | (b & d) | (c & d); + k = 0x8F1BBCDC; + } else { + f = b ^ c ^ d; + k = 0xCA62C1D6; + } + + const temp = (rotl(a, 5) + f + e + k + w[i]) >>> 0; + e = d; + d = c; + c = rotl(b, 30); + b = a; + a = temp; + } + + state[0] = (state[0] + a) >>> 0; + state[1] = (state[1] + b) >>> 0; + state[2] = (state[2] + c) >>> 0; + state[3] = (state[3] + d) >>> 0; + state[4] = (state[4] + e) >>> 0; +} + +/** + * Computes the SHA-1 hash of a Uint8Array, returning a 40-char hex string. + * + * @param {Uint8Array} data + * @returns {string} 40-hex SHA-1 digest + * + * @example + * import { sha1sync } from './sha1sync.js'; + * const hex = sha1sync(new TextEncoder().encode('hello')); + * // => 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d' + */ +export function sha1sync(data) { + if (data.length >= 0x20000000) { + throw new RangeError('sha1sync: input exceeds 512 MB limit'); + } + const blocks = preprocess(data); + const state = [0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0]; + + for (const w of blocks) { + processBlock(state, w); + } + + return state.map(v => v.toString(16).padStart(8, '0')).join(''); +} diff --git a/src/infrastructure/adapters/staticFileHandler.js b/src/infrastructure/adapters/staticFileHandler.js new file mode 100644 index 00000000..19e3ea22 --- /dev/null +++ b/src/infrastructure/adapters/staticFileHandler.js @@ -0,0 +1,134 @@ +import { readFile, realpath } from 'node:fs/promises'; +import { resolve, extname, sep, normalize } from 'node:path'; + +/** + * Minimal MIME type map covering typical SPA assets. + * @type {Record} + */ +const MIME_TYPES = { + '.html': 'text/html; charset=utf-8', + '.js': 'text/javascript; charset=utf-8', + '.mjs': 'text/javascript; charset=utf-8', + '.css': 'text/css; charset=utf-8', + '.json': 'application/json; charset=utf-8', + '.svg': 'image/svg+xml', + '.png': 'image/png', + '.jpg': 'image/jpeg', + '.jpeg': 'image/jpeg', + '.gif': 'image/gif', + '.ico': 'image/x-icon', + '.woff': 'font/woff', + '.woff2': 'font/woff2', + '.map': 'application/json', + '.txt': 'text/plain; charset=utf-8', +}; + +const FORBIDDEN = Object.freeze({ status: 403, headers: Object.freeze({ 'content-type': 'text/plain' }), body: new TextEncoder().encode('Forbidden') }); +const NOT_FOUND = Object.freeze({ status: 404, headers: Object.freeze({ 'content-type': 'text/plain' }), body: new TextEncoder().encode('Not Found') }); + +/** + * Resolves and validates a URL path against a static directory root. + * Returns null if the path escapes the root (traversal attack). + * + * @param {string} root - Absolute path to the static directory + * @param {string} urlPath - URL path (e.g., "/assets/index.js") + * @returns {string|null} Resolved absolute file path, or null if blocked + */ +function safePath(root, urlPath) { + if (urlPath.includes('\0')) { + return null; + } + + /** @type {string} */ + let decoded; + try { + decoded = decodeURIComponent(urlPath); + } catch { + return null; + } + + if (decoded.includes('\0')) { + return null; + } + + const resolved = resolve(root, `.${normalize(`/${decoded}`)}`); + + if (!resolved.startsWith(`${root}${sep}`) && resolved !== root) { + return null; + } + + return resolved; +} + +/** + * Reads a file and returns a static response with the correct MIME type. + * Resolves symlinks before reading and re-checks the real path against + * the root directory to prevent symlink-based traversal attacks. + * + * @param {string} root - Absolute path to the static directory root + * @param {string} filePath - Absolute path to the file + * @param {string} [mimeOverride] - Optional MIME type override + * @returns {Promise<{ status: number, headers: Record, body: Uint8Array }|null>} + */ +async function tryReadFile(root, filePath, mimeOverride) { + try { + // Resolve symlinks to prevent traversal via symlinks pointing outside root. + // Both root and filePath must be resolved — on macOS, /var → /private/var. + const realRoot = await realpath(root); + const real = await realpath(filePath); + if (!real.startsWith(`${realRoot}${sep}`) && real !== realRoot) { + return null; + } + const body = await readFile(real); + const contentType = mimeOverride || MIME_TYPES[extname(filePath).toLowerCase()] || 'application/octet-stream'; + return { + status: 200, + headers: { + 'content-type': contentType, + 'content-length': String(body.byteLength), + }, + body: new Uint8Array(body), + }; + } catch { + return null; + } +} + +/** + * Handles an HTTP request for a static file. + * + * - Serves `index.html` for `/` and directory-like paths + * - Returns correct MIME types for known extensions + * - SPA fallback: extensionless paths serve `index.html` + * - Path traversal prevention: rejects `..` escapes and null bytes + * + * @param {string} staticDir - Absolute path to the static file directory + * @param {string} urlPath - Request URL path (e.g., "/", "/assets/index.js") + * @returns {Promise<{ status: number, headers: Record, body: Uint8Array|null }>} + */ +export async function handleStaticRequest(staticDir, urlPath) { + const cleanPath = urlPath === '/' || urlPath.endsWith('/') ? `${urlPath}index.html` : urlPath; + const filePath = safePath(staticDir, cleanPath); + + if (!filePath) { + return FORBIDDEN; + } + + const result = await tryReadFile(staticDir, filePath); + if (result) { + return result; + } + + // SPA fallback: extensionless paths serve index.html + if (!extname(cleanPath)) { + const indexPath = safePath(staticDir, '/index.html'); + if (indexPath) { + const indexResult = await tryReadFile(staticDir, indexPath, 'text/html; charset=utf-8'); + if (indexResult) { + return indexResult; + } + } + } + + return NOT_FOUND; +} diff --git a/src/infrastructure/adapters/wsAdapterUtils.js b/src/infrastructure/adapters/wsAdapterUtils.js new file mode 100644 index 00000000..52f335c4 --- /dev/null +++ b/src/infrastructure/adapters/wsAdapterUtils.js @@ -0,0 +1,72 @@ +/** + * Shared utilities for WebSocket server adapters. + * + * Extracted from BunWsAdapter, DenoWsAdapter, and NodeWsAdapter + * to eliminate duplicated constants and message-decoding logic. + * Follows the same pattern as httpAdapterUtils.js. + * + * @module infrastructure/adapters/wsAdapterUtils + * @private + */ + +/** Default bind host (loopback only). */ +export const DEFAULT_HOST = '127.0.0.1'; + +/** + * Normalizes a host parameter, falling back to loopback. + * + * @param {string} [host] + * @returns {string} + */ +export function normalizeHost(host) { + return host || DEFAULT_HOST; +} + +/** + * Guards against calling `listen()` on a server that is already running. + * Throws if `server` is truthy. + * + * @param {unknown} server - The current server handle + * @returns {void} + */ +export function assertNotListening(server) { + if (server) { + throw new Error('Server already listening. Call close() before listening again.'); + } +} + +const _textDecoder = new TextDecoder(); + +/** + * Converts a WebSocket message payload to a UTF-8 string. + * Handles both string data and binary data (ArrayBuffer, Uint8Array). + * + * @param {string|ArrayBuffer|Uint8Array|Buffer|Buffer[]} data + * @returns {string} + */ +export function messageToString(data) { + if (typeof data === 'string') { + return data; + } + if (ArrayBuffer.isView(data)) { + return _textDecoder.decode(data); + } + if (data instanceof ArrayBuffer) { + return _textDecoder.decode(data); + } + // Node `ws` can send Buffer[] for fragmented messages + if (Array.isArray(data)) { + let total = 0; + for (const chunk of data) { + total += chunk.byteLength; + } + const merged = new Uint8Array(total); + let offset = 0; + for (const chunk of data) { + merged.set(chunk, offset); + offset += chunk.byteLength; + } + return _textDecoder.decode(merged); + } + return String(data); +} diff --git a/src/infrastructure/codecs/CborCodec.js b/src/infrastructure/codecs/CborCodec.js index 3cebaf2d..635e563d 100644 --- a/src/infrastructure/codecs/CborCodec.js +++ b/src/infrastructure/codecs/CborCodec.js @@ -247,14 +247,14 @@ function sortKeys(value) { * | undefined | 7 | Simple value (undefined=23) | * | Array | 4 | Array of data items | * | Object | 5 | Map of pairs (keys sorted) | - * | Buffer | 2 | Byte string | + * | Uint8Array | 2 | Byte string | * | BigInt | 0, 1, or 6 | Integer or tagged bignum | * * @param {unknown} data - The data to encode. Can be any JSON-serializable value, * plus Buffer, BigInt, and other types supported by cbor-x. Objects have their * keys sorted before encoding. - * @returns {Buffer} CBOR-encoded bytes. The buffer is a Node.js Buffer instance - * that can be written directly to files, network sockets, or Git objects. + * @returns {Uint8Array} CBOR-encoded bytes that can be written directly to + * files, network sockets, or Git objects. * * @example * // Encode a simple patch operation @@ -298,7 +298,7 @@ export function encode(data) { * |-----------------|-----------------|----------------------------------| * | 0 (pos int) | number | Up to Number.MAX_SAFE_INTEGER | * | 1 (neg int) | number | Down to Number.MIN_SAFE_INTEGER | - * | 2 (byte string) | Buffer | Node.js Buffer | + * | 2 (byte string) | Uint8Array | Binary data | * | 3 (text string) | string | UTF-8 decoded | * | 4 (array) | Array | Recursive decode | * | 5 (map) | Object | Due to mapsAsObjects: true | @@ -325,8 +325,7 @@ export function encode(data) { * bytes.equals(reEncoded); // true - canonical encoding is idempotent * ``` * - * @param {Buffer|Uint8Array} buffer - CBOR-encoded bytes to decode. Accepts - * Node.js Buffer, Uint8Array, or any ArrayBufferView. + * @param {Uint8Array} buffer - CBOR-encoded bytes to decode. * @returns {unknown} The decoded JavaScript value. Type depends on the encoded * CBOR data - could be a primitive, array, or plain object. * @throws {Error} If the buffer contains invalid CBOR data or is truncated. @@ -376,14 +375,14 @@ export function decode(buffer) { export class CborCodec extends CodecPort { /** * @param {unknown} data - * @returns {Buffer|Uint8Array} + * @returns {Uint8Array} */ encode(data) { return encode(data); } /** - * @param {Buffer|Uint8Array} buffer + * @param {Uint8Array} buffer * @returns {unknown} */ decode(buffer) { diff --git a/src/ports/BlobStoragePort.js b/src/ports/BlobStoragePort.js new file mode 100644 index 00000000..011795dc --- /dev/null +++ b/src/ports/BlobStoragePort.js @@ -0,0 +1,33 @@ +/** + * Port interface for content blob storage operations. + * + * Abstracts how large binary content is stored and retrieved. + * Concrete adapters may use git-cas (chunked, CDC-deduped, optionally + * encrypted) or raw Git blobs. + * + * @abstract + */ +export default class BlobStoragePort { + /** + * Stores content and returns a storage identifier (e.g. CAS tree OID). + * + * @param {Uint8Array|string} _content - The content to store + * @param {{ slug?: string }} [_options] - Optional metadata + * @returns {Promise} Storage identifier for retrieval + * @abstract + */ + async store(_content, _options) { + throw new Error('BlobStoragePort.store() not implemented'); + } + + /** + * Retrieves content by its storage identifier. + * + * @param {string} _oid - Storage identifier returned by store() + * @returns {Promise} The stored content + * @abstract + */ + async retrieve(_oid) { + throw new Error('BlobStoragePort.retrieve() not implemented'); + } +} diff --git a/src/ports/CodecPort.js b/src/ports/CodecPort.js index 1117f599..2b15e69d 100644 --- a/src/ports/CodecPort.js +++ b/src/ports/CodecPort.js @@ -8,7 +8,7 @@ export default class CodecPort { /** * Encodes data to binary format. * @param {unknown} _data - Data to encode - * @returns {Buffer|Uint8Array} Encoded bytes + * @returns {Uint8Array} Encoded bytes */ encode(_data) { throw new Error('CodecPort.encode() not implemented'); @@ -16,7 +16,7 @@ export default class CodecPort { /** * Decodes binary data back to a JavaScript value. - * @param {Buffer|Uint8Array} _bytes - Encoded bytes to decode + * @param {Uint8Array} _bytes - Encoded bytes to decode * @returns {unknown} Decoded value */ decode(_bytes) { diff --git a/src/ports/CryptoPort.js b/src/ports/CryptoPort.js index 26cb36e0..7c86e3d7 100644 --- a/src/ports/CryptoPort.js +++ b/src/ports/CryptoPort.js @@ -8,7 +8,7 @@ export default class CryptoPort { /** * Computes a hash digest of the given data. * @param {string} _algorithm - Hash algorithm (e.g. 'sha1', 'sha256') - * @param {string|Buffer|Uint8Array} _data - Data to hash + * @param {string|Uint8Array} _data - Data to hash * @returns {Promise} Hex-encoded digest */ async hash(_algorithm, _data) { @@ -18,9 +18,9 @@ export default class CryptoPort { /** * Computes an HMAC of the given data. * @param {string} _algorithm - Hash algorithm (e.g. 'sha256') - * @param {string|Buffer|Uint8Array} _key - HMAC key - * @param {string|Buffer|Uint8Array} _data - Data to authenticate - * @returns {Promise} Raw HMAC digest + * @param {string|Uint8Array} _key - HMAC key + * @param {string|Uint8Array} _data - Data to authenticate + * @returns {Promise} Raw HMAC digest */ async hmac(_algorithm, _key, _data) { throw new Error('CryptoPort.hmac() not implemented'); @@ -28,8 +28,8 @@ export default class CryptoPort { /** * Constant-time comparison of two buffers. - * @param {Buffer|Uint8Array} _a - First buffer - * @param {Buffer|Uint8Array} _b - Second buffer + * @param {Uint8Array} _a - First buffer + * @param {Uint8Array} _b - Second buffer * @returns {boolean} True if buffers are equal */ timingSafeEqual(_a, _b) { diff --git a/src/ports/HttpServerPort.js b/src/ports/HttpServerPort.js index d56f5e21..95941628 100644 --- a/src/ports/HttpServerPort.js +++ b/src/ports/HttpServerPort.js @@ -3,14 +3,14 @@ * @property {string} method - HTTP method (GET, POST, etc.) * @property {string} url - Request URL path + query string * @property {Record} headers - Lowercased header map - * @property {Buffer | Uint8Array | undefined} body - Raw body bytes (undefined for bodiless requests) + * @property {Uint8Array | undefined} body - Raw body bytes (undefined for bodiless requests) */ /** * @typedef {Object} HttpResponse * @property {number} [status] - HTTP status code (defaults to 200) * @property {Record} [headers] - Response headers - * @property {string | Buffer | Uint8Array | null} [body] - Response body + * @property {string | Uint8Array | null} [body] - Response body */ /** diff --git a/src/ports/SeekCachePort.js b/src/ports/SeekCachePort.js index 591491b5..837de0fe 100644 --- a/src/ports/SeekCachePort.js +++ b/src/ports/SeekCachePort.js @@ -14,7 +14,7 @@ export default class SeekCachePort { /** * Retrieves a cached state buffer by key. * @param {string} _key - Cache key (e.g., 'v1:t42-') - * @returns {Promise<{ buffer: Buffer|Uint8Array, indexTreeOid?: string } | null>} The cached entry, or null on miss + * @returns {Promise<{ buffer: Uint8Array, indexTreeOid?: string } | null>} The cached entry, or null on miss * @throws {Error} If not implemented by a concrete adapter */ async get(_key) { @@ -24,7 +24,7 @@ export default class SeekCachePort { /** * Stores a state buffer under the given key. * @param {string} _key - Cache key - * @param {Buffer|Uint8Array} _buffer - Serialized state to cache + * @param {Uint8Array} _buffer - Serialized state to cache * @param {{ indexTreeOid?: string }} [_options] - Optional metadata * @returns {Promise} * @throws {Error} If not implemented by a concrete adapter diff --git a/src/ports/TreePort.js b/src/ports/TreePort.js index 6f61a5d3..56704411 100644 --- a/src/ports/TreePort.js +++ b/src/ports/TreePort.js @@ -21,7 +21,7 @@ export default class TreePort { /** * Reads a tree and returns a map of path to content. * @param {string} _treeOid - The tree OID to read - * @returns {Promise>} Map of file path to blob content + * @returns {Promise>} Map of file path to blob content * @throws {Error} If not implemented by a concrete adapter */ async readTree(_treeOid) { diff --git a/src/ports/WebSocketServerPort.js b/src/ports/WebSocketServerPort.js new file mode 100644 index 00000000..09db8e50 --- /dev/null +++ b/src/ports/WebSocketServerPort.js @@ -0,0 +1,34 @@ +/** + * @typedef {Object} WsConnection + * @property {(message: string) => void} send - Send a text message to the client + * @property {(handler: (message: string) => void) => void} onMessage - Register message handler + * @property {(handler: (code?: number, reason?: string) => void) => void} onClose - Register close handler + * @property {() => void} close - Close the connection + */ + +/** + * @typedef {Object} WsServerHandle + * @property {(port: number, host?: string) => Promise<{ port: number, host: string }>} listen + * @property {() => Promise} close + */ + +/** + * Port for WebSocket server creation. + * + * Abstracts platform-specific WebSocket server APIs (Node ws, Bun.serve, + * Deno.upgradeWebSocket) so domain code doesn't depend on any runtime + * directly. + */ +export default class WebSocketServerPort { + /** + * Creates a WebSocket server. + * + * @param {(connection: WsConnection) => void} _onConnection - Called for each new client connection. + * The callback MUST register `onMessage` and `onClose` handlers synchronously. + * Deferred registration risks dropping messages that arrive before handlers are set. + * @returns {WsServerHandle} Server handle with listen() and close() + */ + createServer(_onConnection) { + throw new Error('WebSocketServerPort.createServer() not implemented'); + } +} diff --git a/src/visualization/renderers/ascii/path.js b/src/visualization/renderers/ascii/path.js index 02eab7ad..e006e8cc 100644 --- a/src/visualization/renderers/ascii/path.js +++ b/src/visualization/renderers/ascii/path.js @@ -203,7 +203,7 @@ export function renderPathView(payload, options = {}) { } const { from, to, found, path, length } = payload; - const terminalWidth = options.terminalWidth || process.stdout.columns || DEFAULT_TERMINAL_WIDTH; + const terminalWidth = options.terminalWidth || DEFAULT_TERMINAL_WIDTH; // Handle "no path found" case if (!found) { diff --git a/test/integration/api/content-attachment.test.js b/test/integration/api/content-attachment.test.js index 5cb261ab..2fc3a22b 100644 --- a/test/integration/api/content-attachment.test.js +++ b/test/integration/api/content-attachment.test.js @@ -25,7 +25,7 @@ describe('API: Content Attachment', () => { await graph.materialize(); const content = await graph.getContent('doc:1'); expect(content).not.toBeNull(); - expect(content.toString('utf8')).toBe('# Hello World\n\nThis is content.'); + expect(new TextDecoder().decode(content)).toBe('# Hello World\n\nThis is content.'); }); it('getContentOid returns hex OID', async () => { @@ -75,7 +75,7 @@ describe('API: Content Attachment', () => { await graph.materialize(); const content = await graph.getEdgeContent('a', 'b', 'rel'); expect(content).not.toBeNull(); - expect(content.toString('utf8')).toBe('edge payload'); + expect(new TextDecoder().decode(content)).toBe('edge payload'); const oid = await graph.getEdgeContentOid('a', 'b', 'rel'); expect(oid).toMatch(/^[0-9a-f]+$/); @@ -103,7 +103,7 @@ describe('API: Content Attachment', () => { expect(content).not.toBeNull(); // Bob's content should win (higher Lamport tick) - expect(content.toString('utf8')).toBe('bob version'); + expect(new TextDecoder().decode(content)).toBe('bob version'); }); it('time-travel: materialize with ceiling returns historical content', async () => { @@ -124,12 +124,12 @@ describe('API: Content Attachment', () => { // Latest should be v2 await graph.materialize(); const latest = await graph.getContent('doc:1'); - expect(latest.toString('utf8')).toBe('version 2'); + expect(new TextDecoder().decode(latest)).toBe('version 2'); // Ceiling=1 should be v1 await graph.materialize({ ceiling: 1 }); const historical = await graph.getContent('doc:1'); - expect(historical.toString('utf8')).toBe('version 1'); + expect(new TextDecoder().decode(historical)).toBe('version 1'); }); it('node deletion removes content reference', async () => { @@ -163,7 +163,7 @@ describe('API: Content Attachment', () => { await graph.materialize(); const content = await graph.getContent('doc:1'); - expect(content.toString('utf8')).toBe('via writer API'); + expect(new TextDecoder().decode(content)).toBe('via writer API'); }); it('GC durability: content survives git gc --prune=now', async () => { @@ -181,7 +181,7 @@ describe('API: Content Attachment', () => { await graph.materialize(); const content = await graph.getContent('doc:1'); expect(content).not.toBeNull(); - expect(content.toString('utf8')).toBe('must survive gc'); + expect(new TextDecoder().decode(content)).toBe('must survive gc'); }); it('checkpoint anchoring: content survives GC after checkpoint', async () => { @@ -205,12 +205,12 @@ describe('API: Content Attachment', () => { await graph2.materialize(); const content = await graph2.getContent('doc:1'); expect(content).not.toBeNull(); - expect(content.toString('utf8')).toBe('checkpointed content'); + expect(new TextDecoder().decode(content)).toBe('checkpointed content'); }); it('binary content round-trips correctly', async () => { const graph = await repo.openGraph('test', 'alice'); - const binary = Buffer.from([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd]); + const binary = new Uint8Array([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd]); const patch = await graph.createPatch(); patch.addNode('bin:1'); @@ -220,6 +220,7 @@ describe('API: Content Attachment', () => { await graph.materialize(); const content = await graph.getContent('bin:1'); expect(content).not.toBeNull(); - expect(Buffer.compare(content, binary)).toBe(0); + expect(content).toBeInstanceOf(Uint8Array); + expect(content).toEqual(binary); }); }); diff --git a/test/type-check/consumer.ts b/test/type-check/consumer.ts index 1d3aef63..39ea60c5 100644 --- a/test/type-check/consumer.ts +++ b/test/type-check/consumer.ts @@ -180,11 +180,11 @@ const edges: Array<{ from: string; to: string; label: string; props: Record> = builder.serialize(); +const serialized: Promise> = builder.serialize(); const reader = new BitmapIndexReader({ storage: gitAdapter, strict: true, logger, crypto }); reader.setup({ 'meta_ab.json': 'oid1', 'shards_fwd_ab.json': 'oid2' }); @@ -431,6 +431,19 @@ const encoded: string = encodeEdgePropKey('a', 'b', 'c', 'd'); const decoded = decodeEdgePropKey(encoded); const isEdge: boolean = isEdgePropKey(encoded); +// --------------------------------------------------------------------------- +// Browser entry point — verify missing exports (#1) +// --------------------------------------------------------------------------- +import { + WarpError as BrowserWarpError, + createVersionVector as browserCreateVV, + generateWriterId as browserGenWriterId, +} from '../../browser.js'; + +const _browserErr: BrowserWarpError = new BrowserWarpError('test', { code: 'TEST' }); +const _browserVV: Map = browserCreateVV(); +const _browserWriterId: string = browserGenWriterId(); + // --------------------------------------------------------------------------- // Negative tests -- must FAIL compilation (verified via @ts-expect-error) // --------------------------------------------------------------------------- diff --git a/test/unit/cli/commands/serve.test.js b/test/unit/cli/commands/serve.test.js new file mode 100644 index 00000000..45a1c0db --- /dev/null +++ b/test/unit/cli/commands/serve.test.js @@ -0,0 +1,242 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// Mock the dependencies that serve.js imports +vi.mock('../../../../bin/cli/shared.js', () => ({ + createPersistence: vi.fn(), + listGraphNames: vi.fn(), +})); + +vi.mock('../../../../src/domain/WarpGraph.js', () => ({ + default: { open: vi.fn() }, +})); + +vi.mock('../../../../src/infrastructure/adapters/WebCryptoAdapter.js', () => ({ + default: class MockWebCryptoAdapter {}, +})); + +class MockWsAdapter { + createServer(/** @type {Function} */ _onConnection) { + return { + async listen(/** @type {number} */ port, /** @type {string} */ host) { + return { port: port || 3000, host: host || '127.0.0.1' }; + }, + async close() {}, + }; + } +} + +vi.mock('../../../../src/infrastructure/adapters/NodeWsAdapter.js', () => ({ + default: MockWsAdapter, +})); + +vi.mock('../../../../src/infrastructure/adapters/BunWsAdapter.js', () => ({ + default: MockWsAdapter, +})); + +vi.mock('../../../../src/infrastructure/adapters/DenoWsAdapter.js', () => ({ + default: MockWsAdapter, +})); + +vi.mock('../../../../src/domain/services/WarpServeService.js', () => ({ + default: class MockWarpServeService { + constructor(/** @type {any} */ opts) { + this.opts = opts; + this.listenCalled = false; + this.closeCalled = false; + } + async listen(/** @type {number} */ port, /** @type {string} */ host) { + this.listenCalled = true; + this.listenPort = port; + this.listenHost = host; + return { port, host }; + } + async close() { this.closeCalled = true; } + }, +})); + +// Must import AFTER mocks are set up +const { createPersistence, listGraphNames } = await import('../../../../bin/cli/shared.js'); +const WarpGraph = (await import('../../../../src/domain/WarpGraph.js')).default; +const handleServe = (await import('../../../../bin/cli/commands/serve.js')).default; + +describe('handleServe', () => { + beforeEach(() => { + vi.clearAllMocks(); + + /** @type {any} */ + const mockPersistence = { ping: vi.fn().mockResolvedValue({ ok: true }) }; + /** @type {any} */ (createPersistence).mockResolvedValue({ persistence: mockPersistence }); + /** @type {any} */ (listGraphNames).mockResolvedValue(['default']); + + /** @type {any} */ (WarpGraph.open).mockResolvedValue({ + graphName: 'default', + materialize: vi.fn().mockResolvedValue({}), + subscribe: vi.fn().mockReturnValue({ unsubscribe: vi.fn() }), + getNodeProps: vi.fn(), + createPatch: vi.fn(), + query: vi.fn(), + }); + }); + + it('discovers all graphs when --graph is not specified', async () => { + /** @type {any} */ (listGraphNames).mockResolvedValue(['alpha', 'beta']); + /** @type {any} */ (WarpGraph.open).mockImplementation( + async (/** @type {any} */ opts) => ({ + graphName: opts.graphName, + materialize: vi.fn().mockResolvedValue({}), + subscribe: vi.fn().mockReturnValue({ unsubscribe: vi.fn() }), + getNodeProps: vi.fn(), + createPatch: vi.fn(), + query: vi.fn(), + }), + ); + + const result = await handleServe({ + options: /** @type {any} */ ({ repo: '.', graph: undefined, writer: 'cli' }), + args: [], + }); + + expect(WarpGraph.open).toHaveBeenCalledTimes(2); + expect(result.payload.graphs).toEqual(['alpha', 'beta']); + }); + + it('scopes to a single graph with --graph', async () => { + /** @type {any} */ (listGraphNames).mockResolvedValue(['alpha', 'beta']); + + const result = await handleServe({ + options: /** @type {any} */ ({ repo: '.', graph: 'alpha', writer: 'cli' }), + args: [], + }); + + expect(WarpGraph.open).toHaveBeenCalledTimes(1); + const openCall = /** @type {any} */ (WarpGraph.open).mock.calls[0][0]; + expect(openCall.graphName).toBe('alpha'); + expect(result.payload.graphs).toEqual(['alpha']); + }); + + it('throws when specified graph does not exist', async () => { + /** @type {any} */ (listGraphNames).mockResolvedValue(['alpha']); + + await expect( + handleServe({ + options: /** @type {any} */ ({ repo: '.', graph: 'nonexistent', writer: 'cli' }), + args: [], + }), + ).rejects.toThrow(/not found/i); + }); + + it('throws when no graphs exist in the repo', async () => { + /** @type {any} */ (listGraphNames).mockResolvedValue([]); + + await expect( + handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: [], + }), + ).rejects.toThrow(/no.*graph/i); + }); + + it('returns server address in the payload', async () => { + const result = await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: [], + }); + + expect(result.payload.host).toBeDefined(); + expect(result.payload.port).toBeDefined(); + expect(result.payload.url).toMatch(/^ws:\/\//); + }); + + it('uses custom port from args', async () => { + const result = await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--port', '4567'], + }); + + expect(result.payload.port).toBe(4567); + }); + + it('includes process.pid in ephemeral writerId for uniqueness', async () => { + await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--port', '0'], + }); + + const openCall = /** @type {any} */ (WarpGraph.open).mock.calls[0][0]; + expect(openCall.writerId).toContain('ephemeral'); + expect(openCall.writerId).toContain(String(process.pid)); + }); + + it('derives ephemeral writerId for port 0 (OS-assigned)', async () => { + await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--port', '0'], + }); + + const openCall = /** @type {any} */ (WarpGraph.open).mock.calls[0][0]; + // Port 0 means the OS assigns an ephemeral port — writerId should NOT embed "0" + expect(openCall.writerId).toContain('ephemeral'); + // Positive format check: serve--ephemeral-- + expect(openCall.writerId).toMatch(/^serve-[\w.-]+-ephemeral-[a-z0-9]+-\d+$/); + expect(openCall.writerId).not.toContain('-0-'); + expect(openCall.writerId).not.toMatch(/-0$/); + }); + + it('uses --writer-id when provided instead of derived writerId', async () => { + await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--writer-id', 'my-custom-writer'], + }); + + const openCall = /** @type {any} */ (WarpGraph.open).mock.calls[0][0]; + expect(openCall.writerId).toBe('my-custom-writer'); + }); + + it('falls back to derived writerId when --writer-id is not provided', async () => { + await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--port', '5000'], + }); + + const openCall = /** @type {any} */ (WarpGraph.open).mock.calls[0][0]; + expect(openCall.writerId).toMatch(/^serve-/); + expect(openCall.writerId).toContain('5000'); + }); + + it('rejects 0.0.0.0 without --expose', async () => { + await expect( + handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--host', '0.0.0.0'], + }), + ).rejects.toThrow(/--expose/); + }); + + it('rejects :: without --expose', async () => { + await expect( + handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--host', '::'], + }), + ).rejects.toThrow(/--expose/); + }); + + it('rejects 0:0:0:0:0:0:0:0 without --expose', async () => { + await expect( + handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: ['--host', '0:0:0:0:0:0:0:0'], + }), + ).rejects.toThrow(/--expose/); + }); + + it('returns a close function for clean shutdown', async () => { + const result = await handleServe({ + options: /** @type {any} */ ({ repo: '.', writer: 'cli' }), + args: [], + }); + + expect(typeof result.close).toBe('function'); + await expect(result.close()).resolves.toBeUndefined(); + }); +}); diff --git a/test/unit/cli/schemas.test.js b/test/unit/cli/schemas.test.js index dc586a94..c21270f7 100644 --- a/test/unit/cli/schemas.test.js +++ b/test/unit/cli/schemas.test.js @@ -9,6 +9,7 @@ import { querySchema, viewSchema, seekSchema, + serveSchema, } from '../../../bin/cli/schemas.js'; describe('bisectSchema', () => { @@ -188,6 +189,52 @@ describe('viewSchema', () => { }); }); +describe('serveSchema', () => { + it('defaults port to 3000, host to 127.0.0.1, expose to false', () => { + const result = serveSchema.parse({}); + expect(result.port).toBe(3000); + expect(result.host).toBe('127.0.0.1'); + expect(result.expose).toBe(false); + expect(result.writerId).toBeUndefined(); + }); + + it('accepts --writer-id with valid characters', () => { + const result = serveSchema.parse({ 'writer-id': 'my-serve-instance' }); + expect(result.writerId).toBe('my-serve-instance'); + }); + + it('accepts --writer-id with dots and underscores', () => { + const result = serveSchema.parse({ 'writer-id': 'serve.test_01' }); + expect(result.writerId).toBe('serve.test_01'); + }); + + it('rejects --writer-id with invalid characters', () => { + expect(() => serveSchema.parse({ 'writer-id': 'has spaces' })).toThrow(/writer-id/); + }); + + it('rejects --writer-id with slashes', () => { + expect(() => serveSchema.parse({ 'writer-id': 'a/b' })).toThrow(/writer-id/); + }); + + it('rejects --writer-id with colons', () => { + expect(() => serveSchema.parse({ 'writer-id': 'serve:3000' })).toThrow(/writer-id/); + }); + + it('rejects empty --writer-id', () => { + expect(() => serveSchema.parse({ 'writer-id': '' })).toThrow(); + }); + + it('accepts --port and --host overrides', () => { + const result = serveSchema.parse({ port: '8080', host: '0.0.0.0' }); + expect(result.port).toBe(8080); + expect(result.host).toBe('0.0.0.0'); + }); + + it('rejects unknown keys', () => { + expect(() => serveSchema.parse({ unknown: true })).toThrow(/unknown/i); + }); +}); + describe('seekSchema', () => { it('defaults to status action', () => { const result = seekSchema.parse({}); diff --git a/test/unit/domain/WarpGraph.content.test.js b/test/unit/domain/WarpGraph.content.test.js index 2bb570d2..986a06fb 100644 --- a/test/unit/domain/WarpGraph.content.test.js +++ b/test/unit/domain/WarpGraph.content.test.js @@ -35,7 +35,7 @@ describe('WarpGraph content attachment (query methods)', () => { updateRef: vi.fn().mockResolvedValue(undefined), configGet: vi.fn().mockResolvedValue(null), configSet: vi.fn().mockResolvedValue(undefined), - readBlob: vi.fn().mockResolvedValue(Buffer.from('hello world')), + readBlob: vi.fn().mockResolvedValue(new TextEncoder().encode('hello world')), }; graph = await WarpGraph.open({ @@ -87,7 +87,7 @@ describe('WarpGraph content attachment (query methods)', () => { describe('getContent()', () => { it('reads and returns the blob buffer', async () => { - const buf = Buffer.from('# ADR 001\n\nSome content'); + const buf = new TextEncoder().encode('# ADR 001\n\nSome content'); mockPersistence.readBlob.mockResolvedValue(buf); setupGraphState(graph, (/** @type {any} */ state) => { @@ -119,6 +119,70 @@ describe('WarpGraph content attachment (query methods)', () => { }); }); + describe('getContent() with blobStorage', () => { + it('uses blobStorage.retrieve() when blobStorage is provided', async () => { + const casBuf = new TextEncoder().encode('cas-stored content'); + const blobStorage = { + store: vi.fn(), + retrieve: vi.fn().mockResolvedValue(casBuf), + }; + /** @type {any} */ (graph)._blobStorage = blobStorage; + + setupGraphState(graph, (/** @type {any} */ state) => { + addNode(state, 'doc:1', 1); + const propKey = encodePropKey('doc:1', '_content'); + state.prop.set(propKey, { eventId: null, value: 'cas-tree-oid' }); + }); + + const content = await graph.getContent('doc:1'); + + expect(content).toEqual(casBuf); + expect(blobStorage.retrieve).toHaveBeenCalledWith('cas-tree-oid'); + expect(mockPersistence.readBlob).not.toHaveBeenCalled(); + }); + + it('falls back to persistence.readBlob() when blobStorage is not provided', async () => { + const rawBuf = new TextEncoder().encode('raw blob'); + mockPersistence.readBlob.mockResolvedValue(rawBuf); + + setupGraphState(graph, (/** @type {any} */ state) => { + addNode(state, 'doc:1', 1); + const propKey = encodePropKey('doc:1', '_content'); + state.prop.set(propKey, { eventId: null, value: 'raw-oid' }); + }); + + const content = await graph.getContent('doc:1'); + + expect(content).toEqual(rawBuf); + expect(mockPersistence.readBlob).toHaveBeenCalledWith('raw-oid'); + }); + }); + + describe('getEdgeContent() with blobStorage', () => { + it('uses blobStorage.retrieve() when blobStorage is provided', async () => { + const casBuf = new TextEncoder().encode('cas-edge content'); + const blobStorage = { + store: vi.fn(), + retrieve: vi.fn().mockResolvedValue(casBuf), + }; + /** @type {any} */ (graph)._blobStorage = blobStorage; + + setupGraphState(graph, (/** @type {any} */ state) => { + addNode(state, 'a', 1); + addNode(state, 'b', 2); + addEdge(state, 'a', 'b', 'rel', 3); + const propKey = encodeEdgePropKey('a', 'b', 'rel', '_content'); + state.prop.set(propKey, { eventId: { lamport: 2, writerId: 'w1', patchSha: 'aabbccdd', opIndex: 0 }, value: 'cas-edge-oid' }); + }); + + const content = await graph.getEdgeContent('a', 'b', 'rel'); + + expect(content).toEqual(casBuf); + expect(blobStorage.retrieve).toHaveBeenCalledWith('cas-edge-oid'); + expect(mockPersistence.readBlob).not.toHaveBeenCalled(); + }); + }); + describe('getEdgeContentOid()', () => { it('returns the _content property value for an edge', async () => { setupGraphState(graph, (/** @type {any} */ state) => { @@ -154,7 +218,7 @@ describe('WarpGraph content attachment (query methods)', () => { describe('getEdgeContent()', () => { it('reads and returns the blob buffer', async () => { - const buf = Buffer.from('edge content'); + const buf = new TextEncoder().encode('edge content'); mockPersistence.readBlob.mockResolvedValue(buf); setupGraphState(graph, (/** @type {any} */ state) => { diff --git a/test/unit/domain/WarpGraph.encryption.test.js b/test/unit/domain/WarpGraph.encryption.test.js new file mode 100644 index 00000000..75f90f8e --- /dev/null +++ b/test/unit/domain/WarpGraph.encryption.test.js @@ -0,0 +1,268 @@ +/** + * Integration tests for graph encryption at rest (B164). + * + * Tests the patchBlobStorage flow end-to-end using a mock + * BlobStoragePort that simulates encrypted storage in memory. + */ +import { describe, it, expect, beforeEach } from 'vitest'; +import WarpGraph from '../../../src/domain/WarpGraph.js'; +import BlobStoragePort from '../../../src/ports/BlobStoragePort.js'; +import EncryptionError from '../../../src/domain/errors/EncryptionError.js'; +import { createInMemoryRepo } from '../../helpers/warpGraphTestUtils.js'; + +// --------------------------------------------------------------------------- +// Mock BlobStoragePort — stores/retrieves from an in-memory Map +// --------------------------------------------------------------------------- + +class InMemoryBlobStorage extends BlobStoragePort { + constructor() { + super(); + /** @type {Map} */ + this._blobs = new Map(); + this._counter = 0; + } + + async store(/** @type {string|Uint8Array} */ content) { + this._counter++; + // Generate a fake OID (40-char hex) + const oid = this._counter.toString(16).padStart(40, '0'); + const buf = typeof content === 'string' + ? new TextEncoder().encode(content) + : new Uint8Array(content); + this._blobs.set(oid, buf); + return oid; + } + + async retrieve(/** @type {string} */ oid) { + const buf = this._blobs.get(oid); + if (!buf) { + throw new Error(`InMemoryBlobStorage: OID not found: ${oid}`); + } + return buf; + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('WarpGraph encryption at rest (B164)', () => { + /** @type {ReturnType} */ + let repo; + /** @type {InMemoryBlobStorage} */ + let patchStorage; + + beforeEach(() => { + repo = createInMemoryRepo(); + patchStorage = new InMemoryBlobStorage(); + }); + + it('writes encrypted patches via patchBlobStorage and reads them back', async () => { + const graph = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'enc-test', + writerId: 'writer-1', + patchBlobStorage: patchStorage, + }); + + // Write a patch + const sha = await graph.patch(p => { + p.addNode('user:alice'); + p.setProperty('user:alice', 'name', 'Alice'); + }); + + expect(sha).toBeTruthy(); + // Patch CBOR should be in our mock storage, not raw persistence + expect(patchStorage._blobs.size).toBe(1); + + // Materialize should work (reads back via patchBlobStorage) + const state = await graph.materialize(); + expect(state).toBeTruthy(); + + // Query to verify data integrity + expect(await graph.hasNode('user:alice')).toBe(true); + const props = /** @type {any} */ (await graph.getNodeProps('user:alice')); + expect(props.name).toBe('Alice'); + }); + + it('reads encrypted patches after re-opening with patchBlobStorage', async () => { + // Write with encryption + const graph1 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'enc-test', + writerId: 'writer-1', + patchBlobStorage: patchStorage, + }); + await graph1.patch(p => { + p.addNode('user:bob'); + p.setProperty('user:bob', 'role', 'admin'); + }); + + // Re-open with same storage (simulating re-open with key) + const graph2 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'enc-test', + writerId: 'writer-2', + patchBlobStorage: patchStorage, + }); + await graph2.materialize(); + + expect(await graph2.hasNode('user:bob')).toBe(true); + const props = /** @type {any} */ (await graph2.getNodeProps('user:bob')); + expect(props.role).toBe('admin'); + }); + + it('throws EncryptionError when reading encrypted patches without patchBlobStorage', async () => { + // Write with encryption + const graph1 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'enc-test', + writerId: 'writer-1', + patchBlobStorage: patchStorage, + }); + await graph1.patch(p => { + p.addNode('user:charlie'); + }); + + // Re-open WITHOUT patchBlobStorage — should fail during open() or materialize() + // (the migration boundary check in open() reads the tip patch) + const openPromise = WarpGraph.open({ + persistence: repo.persistence, + graphName: 'enc-test', + writerId: 'writer-2', + }); + + await expect(openPromise).rejects.toThrow(/encrypted patches/); + }); + + it('handles mixed encrypted and unencrypted patches', async () => { + // Write unencrypted patches first + const graph1 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'mixed-test', + writerId: 'writer-1', + }); + await graph1.patch(p => { + p.addNode('user:plain'); + p.setProperty('user:plain', 'mode', 'clear'); + }); + + // Then write encrypted patches with a different writer + const graph2 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'mixed-test', + writerId: 'writer-2', + patchBlobStorage: patchStorage, + }); + await graph2.patch(p => { + p.addNode('user:secret'); + p.setProperty('user:secret', 'mode', 'encrypted'); + }); + + // Re-open with patchBlobStorage — should read both + const graph3 = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'mixed-test', + writerId: 'reader', + patchBlobStorage: patchStorage, + }); + await graph3.materialize(); + + expect(await graph3.hasNode('user:plain')).toBe(true); + expect(await graph3.hasNode('user:secret')).toBe(true); + const plainProps = /** @type {any} */ (await graph3.getNodeProps('user:plain')); + expect(plainProps.mode).toBe('clear'); + const secretProps = /** @type {any} */ (await graph3.getNodeProps('user:secret')); + expect(secretProps.mode).toBe('encrypted'); + }); + + it('no behavior change when patchBlobStorage is not provided', async () => { + const graph = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'plain-test', + writerId: 'writer-1', + }); + + await graph.patch(p => { + p.addNode('user:normal'); + p.setProperty('user:normal', 'status', 'active'); + }); + + // patchBlobStorage should be empty — patches went to persistence directly + expect(patchStorage._blobs.size).toBe(0); + + const state = await graph.materialize(); + expect(state).toBeTruthy(); + + expect(await graph.hasNode('user:normal')).toBe(true); + const props = /** @type {any} */ (await graph.getNodeProps('user:normal')); + expect(props.status).toBe('active'); + }); + + it('multiple encrypted patches accumulate correctly', async () => { + const graph = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'multi-test', + writerId: 'writer-1', + patchBlobStorage: patchStorage, + }); + + await graph.patch(p => { + p.addNode('a'); + p.setProperty('a', 'v', 1); + }); + await graph.patch(p => { + p.addNode('b'); + p.addEdge('a', 'b', 'link'); + }); + await graph.patch(p => { + p.setProperty('a', 'v', 2); + }); + + // 3 patches stored + expect(patchStorage._blobs.size).toBe(3); + + const state = await graph.materialize(); + expect(state).toBeTruthy(); + + const nodes = await graph.getNodes(); + expect(nodes.sort()).toEqual(['a', 'b']); + const aProps = /** @type {any} */ (await graph.getNodeProps('a')); + expect(aProps.v).toBe(2); // LWW: latest wins + }); + + it('provenance methods work with encrypted patches', async () => { + const graph = await WarpGraph.open({ + persistence: repo.persistence, + graphName: 'prov-test', + writerId: 'writer-1', + patchBlobStorage: patchStorage, + }); + + await graph.patch(p => { + p.addNode('x'); + p.setProperty('x', 'k', 'v1'); + }); + await graph.patch(p => { + p.setProperty('x', 'k', 'v2'); + }); + + await graph.materialize(); + + // patchesFor should work + const patches = await graph.patchesFor('x'); + expect(patches.length).toBeGreaterThanOrEqual(2); + + // loadPatchBySha should work + const loaded = await graph.loadPatchBySha(patches[0]); + expect(loaded).toBeTruthy(); + expect(loaded.ops).toBeDefined(); + }); + + it('EncryptionError has correct code', () => { + const err = new EncryptionError('test'); + expect(err.code).toBe('E_ENCRYPTED_PATCH'); + expect(err.name).toBe('EncryptionError'); + expect(err).toBeInstanceOf(Error); + }); +}); diff --git a/test/unit/domain/__snapshots__/WarpGraph.apiSurface.test.js.snap b/test/unit/domain/__snapshots__/WarpGraph.apiSurface.test.js.snap index 5e7e26a3..56560f9e 100644 --- a/test/unit/domain/__snapshots__/WarpGraph.apiSurface.test.js.snap +++ b/test/unit/domain/__snapshots__/WarpGraph.apiSurface.test.js.snap @@ -107,6 +107,11 @@ exports[`WarpGraph API surface > all prototype methods have correct property des "enumerable": false, "type": "method", }, + "_readPatchBlob": { + "configurable": true, + "enumerable": false, + "type": "method", + }, "_relationToCheckpointHead": { "configurable": true, "enumerable": false, @@ -435,7 +440,7 @@ exports[`WarpGraph API surface > all prototype methods have correct property des } `; -exports[`WarpGraph API surface > prototype method count matches snapshot 1`] = `86`; +exports[`WarpGraph API surface > prototype method count matches snapshot 1`] = `87`; exports[`WarpGraph API surface > prototype methods match snapshot 1`] = ` [ @@ -460,6 +465,7 @@ exports[`WarpGraph API surface > prototype methods match snapshot 1`] = ` "_notifySubscribers", "_onPatchCommitted", "_persistSeekCacheEntry", + "_readPatchBlob", "_relationToCheckpointHead", "_resolveCeiling", "_restoreIndexFromCache", diff --git a/test/unit/domain/seekCache.test.js b/test/unit/domain/seekCache.test.js index 8e498604..10e2b0df 100644 --- a/test/unit/domain/seekCache.test.js +++ b/test/unit/domain/seekCache.test.js @@ -131,41 +131,41 @@ function createMockSeekCache() { // =========================================================================== describe('buildSeekCacheKey', () => { - it('produces deterministic keys for identical inputs', () => { + it('produces deterministic keys for identical inputs', async () => { const frontier = new Map([['alice', 'aaa'], ['bob', 'bbb']]); - const k1 = buildSeekCacheKey(5, frontier); - const k2 = buildSeekCacheKey(5, frontier); + const k1 = await buildSeekCacheKey(5, frontier); + const k2 = await buildSeekCacheKey(5, frontier); expect(k1).toBe(k2); }); - it('starts with version prefix', () => { - const key = buildSeekCacheKey(10, new Map([['w1', 'sha1']])); + it('starts with version prefix', async () => { + const key = await buildSeekCacheKey(10, new Map([['w1', 'sha1']])); expect(key).toMatch(/^v1:t10-/); }); - it('uses full 64-char SHA-256 hex digest', () => { - const key = buildSeekCacheKey(1, new Map([['w', 's']])); + it('uses full 64-char SHA-256 hex digest', async () => { + const key = await buildSeekCacheKey(1, new Map([['w', 's']])); // v1:t1-<64 hex chars> const hash = key.split('-').slice(1).join('-'); expect(hash).toHaveLength(64); expect(hash).toMatch(/^[0-9a-f]{64}$/); }); - it('differs when ceiling changes', () => { + it('differs when ceiling changes', async () => { const f = new Map([['w', 'sha']]); - expect(buildSeekCacheKey(1, f)).not.toBe(buildSeekCacheKey(2, f)); + expect(await buildSeekCacheKey(1, f)).not.toBe(await buildSeekCacheKey(2, f)); }); - it('differs when frontier changes', () => { + it('differs when frontier changes', async () => { const f1 = new Map([['w', 'sha1']]); const f2 = new Map([['w', 'sha2']]); - expect(buildSeekCacheKey(1, f1)).not.toBe(buildSeekCacheKey(1, f2)); + expect(await buildSeekCacheKey(1, f1)).not.toBe(await buildSeekCacheKey(1, f2)); }); - it('is order-independent for frontier entries', () => { + it('is order-independent for frontier entries', async () => { const f1 = new Map([['alice', 'a'], ['bob', 'b']]); const f2 = new Map([['bob', 'b'], ['alice', 'a']]); - expect(buildSeekCacheKey(1, f1)).toBe(buildSeekCacheKey(1, f2)); + expect(await buildSeekCacheKey(1, f1)).toBe(await buildSeekCacheKey(1, f2)); }); }); diff --git a/test/unit/domain/services/BitmapIndexBuilder.frontier.test.js b/test/unit/domain/services/BitmapIndexBuilder.frontier.test.js index 124fdb26..d5d52adb 100644 --- a/test/unit/domain/services/BitmapIndexBuilder.frontier.test.js +++ b/test/unit/domain/services/BitmapIndexBuilder.frontier.test.js @@ -41,8 +41,8 @@ describe('BitmapIndexBuilder frontier metadata (GK/IDX/1)', () => { const tree = await builder.serialize({ frontier }); - expect(tree['frontier.cbor']).toBeInstanceOf(Buffer); - expect(tree['frontier.json']).toBeInstanceOf(Buffer); + expect(tree['frontier.cbor']).toBeInstanceOf(Uint8Array); + expect(tree['frontier.json']).toBeInstanceOf(Uint8Array); }); it('CBOR roundtrip: decode → verify envelope structure', async () => { @@ -72,7 +72,7 @@ describe('BitmapIndexBuilder frontier metadata (GK/IDX/1)', () => { const tree = await builder.serialize({ frontier }); const cborEnvelope = cborDecode(tree['frontier.cbor']); - const jsonEnvelope = JSON.parse(tree['frontier.json'].toString('utf-8')); + const jsonEnvelope = JSON.parse(new TextDecoder().decode(tree['frontier.json'])); expect(jsonEnvelope).toEqual(cborEnvelope); }); @@ -86,7 +86,7 @@ describe('BitmapIndexBuilder frontier metadata (GK/IDX/1)', () => { ]); const tree = await builder.serialize({ frontier }); - const envelope = JSON.parse(tree['frontier.json'].toString('utf-8')); + const envelope = JSON.parse(new TextDecoder().decode(tree['frontier.json'])); const keys = Object.keys(envelope.frontier); expect(keys).toEqual(['alpha', 'mike', 'zulu']); @@ -117,7 +117,7 @@ describe('BitmapIndexBuilder frontier metadata (GK/IDX/1)', () => { // All non-frontier entries should be identical for (const key of Object.keys(treeWithout)) { expect(treeWith[key]).toBeDefined(); - expect(treeWith[key].equals(treeWithout[key])).toBe(true); + expect(treeWith[key]).toEqual(treeWithout[key]); } // Frontier files are extra diff --git a/test/unit/domain/services/BitmapIndexBuilder.test.js b/test/unit/domain/services/BitmapIndexBuilder.test.js index 70b41242..448428c3 100644 --- a/test/unit/domain/services/BitmapIndexBuilder.test.js +++ b/test/unit/domain/services/BitmapIndexBuilder.test.js @@ -75,7 +75,7 @@ describe('BitmapIndexBuilder', () => { builder.addEdge('aabbcc', 'aaddee'); const tree = await builder.serialize(); - const envelope = JSON.parse(tree['shards_fwd_aa.json'].toString()); + const envelope = JSON.parse(new TextDecoder().decode(tree['shards_fwd_aa.json'])); // Shard is wrapped in version/checksum envelope expect(envelope.version).toBeDefined(); @@ -90,15 +90,15 @@ describe('BitmapIndexBuilder', () => { const tree = await builder.serialize(); // Check meta shard - const metaEnvelope = JSON.parse(tree['meta_aa.json'].toString()); + const metaEnvelope = JSON.parse(new TextDecoder().decode(tree['meta_aa.json'])); expect(metaEnvelope.version).toBe(2); // Check forward shard - const fwdEnvelope = JSON.parse(tree['shards_fwd_aa.json'].toString()); + const fwdEnvelope = JSON.parse(new TextDecoder().decode(tree['shards_fwd_aa.json'])); expect(fwdEnvelope.version).toBe(2); // Check reverse shard - const revEnvelope = JSON.parse(tree['shards_rev_dd.json'].toString()); + const revEnvelope = JSON.parse(new TextDecoder().decode(tree['shards_rev_dd.json'])); expect(revEnvelope.version).toBe(2); }); @@ -107,7 +107,7 @@ describe('BitmapIndexBuilder', () => { builder.registerNode('testsha1'); const tree = await builder.serialize(); - const envelope = JSON.parse(tree['meta_te.json'].toString()); + const envelope = JSON.parse(new TextDecoder().decode(tree['meta_te.json'])); expect(envelope.version).toBe(SHARD_VERSION); }); diff --git a/test/unit/domain/services/PatchBuilderV2.content.test.js b/test/unit/domain/services/PatchBuilderV2.content.test.js index 1116712f..a8d44890 100644 --- a/test/unit/domain/services/PatchBuilderV2.content.test.js +++ b/test/unit/domain/services/PatchBuilderV2.content.test.js @@ -221,6 +221,81 @@ describe('PatchBuilderV2 content attachment', () => { }); }); + describe('attachContent() with blobStorage', () => { + it('uses blobStorage.store() when blobStorage is provided', async () => { + const blobStorage = { + store: vi.fn().mockResolvedValue('cas-tree-oid'), + retrieve: vi.fn(), + }; + const persistence = createMockPersistence(); + const builder = new PatchBuilderV2(/** @type {any} */ ({ + persistence, + graphName: 'g', + writerId: 'w1', + lamport: 1, + versionVector: createVersionVector(), + getCurrentState: () => null, + blobStorage, + })); + + await builder.attachContent('node:1', 'hello world'); + + expect(blobStorage.store).toHaveBeenCalledWith('hello world', { slug: 'g/node:1' }); + expect(persistence.writeBlob).not.toHaveBeenCalled(); + const patch = builder.build(); + expect(patch.ops[0]).toMatchObject({ + type: 'PropSet', + node: 'node:1', + key: '_content', + value: 'cas-tree-oid', + }); + }); + + it('falls back to persistence.writeBlob() when blobStorage is not provided', async () => { + const persistence = createMockPersistence({ + writeBlob: vi.fn().mockResolvedValue('raw-blob-oid'), + }); + const builder = new PatchBuilderV2(/** @type {any} */ ({ + persistence, + writerId: 'w1', + lamport: 1, + versionVector: createVersionVector(), + getCurrentState: () => null, + })); + + await builder.attachContent('node:1', 'hello'); + + expect(persistence.writeBlob).toHaveBeenCalledWith('hello'); + }); + }); + + describe('attachEdgeContent() with blobStorage', () => { + it('uses blobStorage.store() when blobStorage is provided', async () => { + const state = createMockState(); + orsetAdd(state.edgeAlive, encodeEdgeKey('a', 'b', 'rel'), createDot('w1', 1)); + + const blobStorage = { + store: vi.fn().mockResolvedValue('cas-edge-tree-oid'), + retrieve: vi.fn(), + }; + const persistence = createMockPersistence(); + const builder = new PatchBuilderV2(/** @type {any} */ ({ + persistence, + graphName: 'g', + writerId: 'w1', + lamport: 1, + versionVector: createVersionVector(), + getCurrentState: () => state, + blobStorage, + })); + + await builder.attachEdgeContent('a', 'b', 'rel', 'edge-data'); + + expect(blobStorage.store).toHaveBeenCalledWith('edge-data', { slug: 'g/a/b/rel' }); + expect(persistence.writeBlob).not.toHaveBeenCalled(); + }); + }); + describe('commit() with content blobs', () => { it('includes _content_ entries in tree when content blobs exist', async () => { const contentOid = 'a'.repeat(40); diff --git a/test/unit/domain/services/StreamingBitmapIndexBuilder.test.js b/test/unit/domain/services/StreamingBitmapIndexBuilder.test.js index 6238d6d8..2eab3ca8 100644 --- a/test/unit/domain/services/StreamingBitmapIndexBuilder.test.js +++ b/test/unit/domain/services/StreamingBitmapIndexBuilder.test.js @@ -31,13 +31,13 @@ describe('StreamingBitmapIndexBuilder', () => { mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.push({ oid, content: buffer.toString('utf-8') }); + writtenBlobs.push({ oid, content: new TextDecoder().decode(buffer) }); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { const blob = writtenBlobs.find((/** @type {any} */ b) => b.oid === oid); - return Buffer.from(blob ? blob.content : '{}'); + return new TextEncoder().encode(blob ? blob.content : '{}'); }), }; }); @@ -254,17 +254,17 @@ describe('StreamingBitmapIndexBuilder memory guard', () => { const mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.set(oid, buffer.toString('utf-8')); + writtenBlobs.set(oid, new TextDecoder().decode(buffer)); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { const content = writtenBlobs.get(oid); if (content) { - return Buffer.from(content); + return new TextEncoder().encode(content); } // Return valid empty envelope for any untracked blobs - return Buffer.from(JSON.stringify(createMockEnvelope({}))); + return new TextEncoder().encode(JSON.stringify(createMockEnvelope({}))); }), }; @@ -315,12 +315,12 @@ describe('StreamingBitmapIndexBuilder memory guard', () => { const mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.set(oid, buffer.toString('utf-8')); + writtenBlobs.set(oid, new TextDecoder().decode(buffer)); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { - return Buffer.from(writtenBlobs.get(oid) || '{}'); + return new TextEncoder().encode(writtenBlobs.get(oid) || '{}'); }), }; @@ -368,12 +368,12 @@ describe('StreamingBitmapIndexBuilder extreme stress tests', () => { const mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.set(oid, buffer.toString('utf-8')); + writtenBlobs.set(oid, new TextDecoder().decode(buffer)); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { - return Buffer.from(writtenBlobs.get(oid) || '{}'); + return new TextEncoder().encode(writtenBlobs.get(oid) || '{}'); }), }; @@ -436,7 +436,7 @@ describe('StreamingBitmapIndexBuilder extreme stress tests', () => { return `blob-${writeCallCount}`; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), - readBlob: vi.fn().mockResolvedValue(Buffer.from('{}')), + readBlob: vi.fn().mockResolvedValue(new TextEncoder().encode('{}')), }; const builder = new StreamingBitmapIndexBuilder(/** @type {any} */ ({ @@ -472,7 +472,7 @@ describe('StreamingBitmapIndexBuilder extreme stress tests', () => { return 'blob-oid'; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), - readBlob: vi.fn().mockResolvedValue(Buffer.from('{}')), + readBlob: vi.fn().mockResolvedValue(new TextEncoder().encode('{}')), }; const builder = new StreamingBitmapIndexBuilder(/** @type {any} */ ({ @@ -513,12 +513,12 @@ describe('StreamingBitmapIndexBuilder extreme stress tests', () => { const mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.set(oid, buffer.toString('utf-8')); + writtenBlobs.set(oid, new TextDecoder().decode(buffer)); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { - return Buffer.from(writtenBlobs.get(oid) || '{}'); + return new TextEncoder().encode(writtenBlobs.get(oid) || '{}'); }), }; @@ -589,12 +589,12 @@ describe('StreamingBitmapIndexBuilder extreme stress tests', () => { const mockStorage = { writeBlob: vi.fn().mockImplementation(async (buffer) => { const oid = `blob-${blobCounter++}`; - writtenBlobs.set(oid, buffer.toString('utf-8')); + writtenBlobs.set(oid, new TextDecoder().decode(buffer)); return oid; }), writeTree: vi.fn().mockResolvedValue('tree-oid'), readBlob: vi.fn().mockImplementation(async (oid) => { - return Buffer.from(writtenBlobs.get(oid) || '{}'); + return new TextEncoder().encode(writtenBlobs.get(oid) || '{}'); }), }; diff --git a/test/unit/domain/services/WarpMessageCodec.test.js b/test/unit/domain/services/WarpMessageCodec.test.js index 9d908505..04a288c1 100644 --- a/test/unit/domain/services/WarpMessageCodec.test.js +++ b/test/unit/domain/services/WarpMessageCodec.test.js @@ -144,6 +144,41 @@ describe('WarpMessageCodec', () => { }) ).toThrow('40 or 64 character hex string'); }); + + it('includes eg-encrypted trailer when encrypted=true', () => { + const message = encodePatchMessage({ + graph: 'events', + writer: 'node-1', + lamport: 1, + patchOid: VALID_OID_SHA1, + encrypted: true, + }); + + expect(message).toContain('eg-encrypted: true'); + }); + + it('omits eg-encrypted trailer when encrypted=false', () => { + const message = encodePatchMessage({ + graph: 'events', + writer: 'node-1', + lamport: 1, + patchOid: VALID_OID_SHA1, + encrypted: false, + }); + + expect(message).not.toContain('eg-encrypted'); + }); + + it('omits eg-encrypted trailer by default', () => { + const message = encodePatchMessage({ + graph: 'events', + writer: 'node-1', + lamport: 1, + patchOid: VALID_OID_SHA1, + }); + + expect(message).not.toContain('eg-encrypted'); + }); }); describe('encodeCheckpointMessage', () => { @@ -398,6 +433,43 @@ eg-schema: 1`; expect(() => decodePatchMessage(message)).toThrow('Invalid patchOid'); }); + + it('decodes encrypted=true from eg-encrypted trailer', () => { + const encoded = encodePatchMessage({ + graph: 'events', + writer: 'node-1', + lamport: 1, + patchOid: VALID_OID_SHA1, + encrypted: true, + }); + const decoded = decodePatchMessage(encoded); + expect(decoded.encrypted).toBe(true); + }); + + it('decodes encrypted=false when eg-encrypted trailer is absent', () => { + const encoded = encodePatchMessage({ + graph: 'events', + writer: 'node-1', + lamport: 1, + patchOid: VALID_OID_SHA1, + }); + const decoded = decodePatchMessage(encoded); + expect(decoded.encrypted).toBe(false); + }); + + it('roundtrips encrypted flag correctly', () => { + for (const encrypted of [true, false]) { + const encoded = encodePatchMessage({ + graph: 'events', + writer: 'w1', + lamport: 5, + patchOid: VALID_OID_SHA1, + encrypted, + }); + const decoded = decodePatchMessage(encoded); + expect(decoded.encrypted).toBe(encrypted); + } + }); }); describe('decodeCheckpointMessage', () => { diff --git a/test/unit/domain/services/WarpServeService.test.js b/test/unit/domain/services/WarpServeService.test.js new file mode 100644 index 00000000..0adb9cd0 --- /dev/null +++ b/test/unit/domain/services/WarpServeService.test.js @@ -0,0 +1,1456 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import WarpServeService from '../../../../src/domain/services/WarpServeService.js'; + +/** + * Creates a mock WebSocketServerPort that captures the onConnection + * handler and lets tests simulate client connections without real I/O. + * + * @returns {{ port: import('../../../../src/ports/WebSocketServerPort.js').default, getOnConnection: () => Function|null, simulateConnection: Function }} + */ +function createMockWsPort() { + /** @type {Function|null} */ + let onConnection = null; + + const port = { + createServer(/** @type {Function} */ handler) { + onConnection = handler; + return { + async listen(/** @type {number} */ p, /** @type {string|undefined} */ host) { + return { port: p || 9999, host: host || '127.0.0.1' }; + }, + async close() {}, + }; + }, + }; + + function simulateConnection() { + /** @type {Array} */ + const sent = []; + /** @type {Function|null} */ + let messageHandler = null; + /** @type {Function|null} */ + let closeHandler = null; + + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection} */ + const conn = { + send(/** @type {string} */ msg) { sent.push(msg); }, + onMessage(/** @type {Function} */ handler) { messageHandler = handler; }, + onClose(/** @type {Function} */ handler) { closeHandler = handler; }, + close() { if (closeHandler) { closeHandler(1000, 'test'); } }, + }; + + if (!onConnection) { + throw new Error('No connection handler registered — call listen() first'); + } + onConnection(conn); + + return { + conn, + sent, + /** @param {string} msg */ + sendFromClient(msg) { + if (messageHandler) { messageHandler(msg); } + }, + triggerClose(/** @type {number} */ code = 1000, /** @type {string} */ reason = '') { + if (closeHandler) { closeHandler(code, reason); } + }, + }; + } + + return { + port: /** @type {import('../../../../src/ports/WebSocketServerPort.js').default} */ (port), + getOnConnection: () => onConnection, + simulateConnection, + }; +} + +/** + * Creates a minimal mock WarpGraph with the methods WarpServeService needs. + * + * @param {Object} [overrides] + * @param {string} [overrides.graphName] + * @returns {any} + */ +function createMockGraph(overrides = {}) { + const graphName = overrides.graphName || 'test-graph'; + + const nodes = new Map(); + const edges = []; + + return { + graphName, + materialize: vi.fn().mockResolvedValue({ + nodeAlive: { entries: new Map(), tombstones: new Set() }, + edgeAlive: { entries: new Map(), tombstones: new Set() }, + prop: new Map(), + observedFrontier: new Map(), + }), + subscribe: vi.fn().mockReturnValue({ unsubscribe: vi.fn() }), + getNodeProps: vi.fn().mockResolvedValue(null), + createPatch: vi.fn().mockResolvedValue({ + addNode: vi.fn().mockReturnThis(), + removeNode: vi.fn().mockReturnThis(), + addEdge: vi.fn().mockReturnThis(), + removeEdge: vi.fn().mockReturnThis(), + setProperty: vi.fn().mockReturnThis(), + commit: vi.fn().mockResolvedValue('abc123'), + }), + query: vi.fn().mockReturnValue({ + match: vi.fn().mockReturnThis(), + select: vi.fn().mockReturnThis(), + run: vi.fn().mockResolvedValue([]), + }), + }; +} + +describe('WarpServeService', () => { + + // ── Construction ──────────────────────────────────────────────────── + + describe('construction', () => { + it('requires a WebSocketServerPort', () => { + expect(() => new WarpServeService({ wsPort: /** @type {any} */ (null), graphs: [] })) + .toThrow(); + }); + + it('requires at least one graph', () => { + const { port } = createMockWsPort(); + expect(() => new WarpServeService({ wsPort: port, graphs: [] })) + .toThrow(); + }); + + it('accepts a single graph', () => { + const { port } = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: port, graphs: [graph] }); + expect(service).toBeDefined(); + }); + + it('accepts multiple graphs', () => { + const { port } = createMockWsPort(); + const g1 = createMockGraph({ graphName: 'alpha' }); + const g2 = createMockGraph({ graphName: 'beta' }); + const service = new WarpServeService({ wsPort: port, graphs: [g1, g2] }); + expect(service).toBeDefined(); + }); + }); + + // ── Connection lifecycle ──────────────────────────────────────────── + + describe('connection lifecycle', () => { + /** @type {ReturnType} */ + let ws; + /** @type {any} */ + let graph; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + graph = createMockGraph(); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('sends a hello message on connect', () => { + const client = ws.simulateConnection(); + expect(client.sent.length).toBe(1); + const hello = JSON.parse(client.sent[0]); + expect(hello.v).toBe(1); + expect(hello.type).toBe('hello'); + expect(hello.payload.graphs).toEqual(['test-graph']); + }); + + it('hello includes protocol version', () => { + const client = ws.simulateConnection(); + const hello = JSON.parse(client.sent[0]); + expect(hello.payload.protocol).toBe(1); + }); + + it('cleans up on client disconnect', async () => { + // Capture the onChange handler for diff broadcasting + /** @type {Function|null} */ + let capturedOnChange = null; + graph.subscribe.mockImplementation((/** @type {any} */ opts) => { + capturedOnChange = opts.onChange; + return { unsubscribe: vi.fn() }; + }); + + // Re-create service so it picks up the new subscribe mock + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + // Open the graph so the client is subscribed to diffs + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); // hello + state + client.sent.length = 0; + + // Disconnect + client.triggerClose(); + + // A diff broadcast after disconnect should NOT reach the dead client + if (capturedOnChange) { + /** @type {any} */ (capturedOnChange)({ + nodes: { added: ['node:ghost'], removed: [] }, + }); + } + expect(client.sent).toHaveLength(0); + + // Service should remain functional for new connections + const client2 = ws.simulateConnection(); + expect(client2.sent.length).toBe(1); + }); + }); + + // ── Protocol: open ────────────────────────────────────────────────── + + describe('open', () => { + /** @type {ReturnType} */ + let ws; + /** @type {any} */ + let graph; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + graph = createMockGraph(); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('responds with materialized state when client opens a graph', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; // clear hello + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'req-1', + payload: { graph: 'test-graph', writerId: 'browser-writer-1' }, + })); + + // Allow async processing + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.v).toBe(1); + expect(msg.type).toBe('state'); + expect(msg.id).toBe('req-1'); + expect(msg.payload.graph).toBe('test-graph'); + }); + + it('serializes nodes, edges, and frontier in the state payload', async () => { + // Set up a graph with populated state + const dot = { writerId: 'w1', counter: 1 }; + graph.materialize.mockResolvedValue({ + nodeAlive: { + entries: new Map([['user:alice', new Set([dot])], ['user:bob', new Set([dot])]]), + tombstones: new Set(), + }, + edgeAlive: { + entries: new Map([['user:alice\0user:bob\0knows', new Set([dot])]]), + tombstones: new Set(), + }, + prop: new Map([ + ['user:alice\0name', { eventId: { writerId: 'w1', counter: 1 }, value: 'Alice' }], + ]), + observedFrontier: new Map([['w1', 3]]), + }); + + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'req-state', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('state'); + expect(msg.payload.graph).toBe('test-graph'); + expect(msg.payload.nodes).toEqual(expect.arrayContaining([ + expect.objectContaining({ id: 'user:alice', props: { name: 'Alice' } }), + expect.objectContaining({ id: 'user:bob', props: {} }), + ])); + expect(msg.payload.edges).toEqual([ + { from: 'user:alice', to: 'user:bob', label: 'knows' }, + ]); + expect(msg.payload.frontier).toEqual({ w1: 3 }); + }); + + it('returns error for unknown graph name', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'req-2', + payload: { graph: 'nonexistent', writerId: 'w1' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_UNKNOWN_GRAPH'); + }); + + it('rejects messages with unsupported protocol version', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 999, type: 'open', id: 'req-3', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_UNSUPPORTED_VERSION'); + }); + }); + + // ── Protocol: mutate ──────────────────────────────────────────────── + + describe('mutate', () => { + /** @type {ReturnType} */ + let ws; + /** @type {any} */ + let graph; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + graph = createMockGraph(); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('applies addNode mutation and returns ack', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'open-1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); // hello + state + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-1', + payload: { + graph: 'test-graph', + ops: [{ op: 'addNode', args: ['node:test'] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('ack'); + expect(msg.id).toBe('mut-1'); + expect(graph.createPatch).toHaveBeenCalled(); + }); + + it('rejects ops not in the allowlist', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-bad', + payload: { + graph: 'test-graph', + ops: [{ op: 'constructor', args: [] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-bad'); + expect(msg.payload.code).toBe('E_INVALID_OP'); + }); + + it('rejects mutate with wrong arg count', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-argc', + payload: { + graph: 'test-graph', + ops: [{ op: 'addNode', args: [] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-argc'); + expect(msg.payload.code).toBe('E_INVALID_ARGS'); + }); + + it('rejects mutate with wrong arg type', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-argt', + payload: { + graph: 'test-graph', + ops: [{ op: 'addNode', args: [42] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-argt'); + expect(msg.payload.code).toBe('E_INVALID_ARGS'); + }); + + it('allows wildcard arg types for setProperty value', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-wild', + payload: { + graph: 'test-graph', + ops: [{ op: 'setProperty', args: ['node:1', 'color', 42] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('ack'); + expect(msg.id).toBe('mut-wild'); + }); + + it('rejects attachContent with non-string content', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-bad-content', + payload: { + graph: 'test-graph', + ops: [{ op: 'attachContent', args: ['node:1', 42] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-bad-content'); + expect(msg.payload.code).toBe('E_INVALID_ARGS'); + }); + + it('rejects attachEdgeContent with non-string content', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-bad-edge-content', + payload: { + graph: 'test-graph', + ops: [{ op: 'attachEdgeContent', args: ['n1', 'n2', 'knows', { binary: true }] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-bad-edge-content'); + expect(msg.payload.code).toBe('E_INVALID_ARGS'); + }); + + it('awaits async ops like attachContent before commit', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + // Override createPatch to return a mock with an async attachContent + let attachResolved = false; + const mockPatch = { + addNode: vi.fn().mockReturnThis(), + attachContent: vi.fn().mockImplementation(async () => { + // Simulate async blob write — use microtask (not real timer) to + // prove ordering without introducing timer-based flakiness. + await Promise.resolve(); + attachResolved = true; + return mockPatch; + }), + commit: vi.fn().mockImplementation(async () => { + // If attachContent wasn't awaited, attachResolved is still false + if (!attachResolved) { + throw new Error('commit called before attachContent resolved'); + } + return 'sha-blob'; + }), + }; + graph.createPatch.mockResolvedValue(mockPatch); + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-attach', + payload: { + graph: 'test-graph', + ops: [ + { op: 'addNode', args: ['node:blob'] }, + { op: 'attachContent', args: ['node:blob', 'hello world'] }, + ], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('ack'); + expect(msg.id).toBe('mut-attach'); + expect(msg.payload.sha).toBe('sha-blob'); + expect(mockPatch.attachContent).toHaveBeenCalledWith('node:blob', 'hello world'); + }); + + it('returns E_MUTATE_FAILED when createPatch rejects', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + graph.createPatch.mockRejectedValueOnce(new Error('disk full')); + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-fail', + payload: { + graph: 'test-graph', + ops: [{ op: 'addNode', args: ['node:boom'] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-fail'); + expect(msg.payload.code).toBe('E_MUTATE_FAILED'); + expect(msg.payload.message).toBe('disk full'); + }); + + it('rejects mutate before open', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-2', + payload: { + graph: 'test-graph', + ops: [{ op: 'addNode', args: ['node:test'] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_NOT_OPENED'); + }); + + it('rejects mutate targeting a different graph than opened', async () => { + const localWs = createMockWsPort(); + const alpha = createMockGraph({ graphName: 'alpha' }); + const beta = createMockGraph({ graphName: 'beta' }); + const svc = new WarpServeService({ wsPort: localWs.port, graphs: [alpha, beta] }); + await svc.listen(0); + + const client = localWs.simulateConnection(); + // Open alpha only + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'alpha' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + // Try to mutate beta (not opened) + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-cross', + payload: { + graph: 'beta', + ops: [{ op: 'addNode', args: ['node:x'] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_NOT_OPENED'); + + await svc.close(); + }); + }); + + // ── Protocol: inspect ─────────────────────────────────────────────── + + describe('inspect', () => { + /** @type {ReturnType} */ + let ws; + /** @type {any} */ + let graph; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + graph = createMockGraph(); + graph.getNodeProps.mockResolvedValue({ name: 'Alice', role: 'admin' }); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('returns node properties', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'inspect', id: 'ins-1', + payload: { graph: 'test-graph', nodeId: 'user:alice' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('inspect'); + expect(msg.id).toBe('ins-1'); + expect(msg.payload.props).toEqual({ name: 'Alice', role: 'admin' }); + }); + + it('returns E_INSPECT_FAILED when getNodeProps rejects', async () => { + graph.getNodeProps.mockRejectedValueOnce(new Error('node not found')); + + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'inspect', id: 'ins-fail', + payload: { graph: 'test-graph', nodeId: 'user:ghost' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('ins-fail'); + expect(msg.payload.code).toBe('E_INSPECT_FAILED'); + expect(msg.payload.message).toBe('node not found'); + }); + }); + + // ── Protocol: seek ────────────────────────────────────────────────── + + describe('seek', () => { + /** @type {ReturnType} */ + let ws; + /** @type {any} */ + let graph; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + graph = createMockGraph(); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('re-materializes with ceiling and sends state', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'seek', id: 'sk-1', + payload: { graph: 'test-graph', ceiling: 5 }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('state'); + expect(msg.id).toBe('sk-1'); + expect(graph.materialize).toHaveBeenCalledWith( + expect.objectContaining({ ceiling: 5 }), + ); + }); + + it('rejects negative ceiling with E_INVALID_PAYLOAD', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'seek', id: 'sk-neg', + payload: { graph: 'test-graph', ceiling: -1 }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('sk-neg'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + }); + + it('treats Infinity ceiling as materialize-at-head', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + graph.materialize.mockClear(); + client.sent.length = 0; + + // Infinity is not valid JSON, so we hand-craft the raw string + // to simulate a non-JSON transport or future binary protocol. + client.sendFromClient( + '{"v":1,"type":"seek","id":"sk-inf","payload":{"graph":"test-graph","ceiling":1e999}}', + ); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('state'); + expect(msg.id).toBe('sk-inf'); + // Infinity should NOT be passed as ceiling — materialize at head + expect(graph.materialize).toHaveBeenCalledWith({}); + }); + + it('rejects non-integer ceiling with E_INVALID_PAYLOAD', async () => { + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'seek', id: 'sk-frac', + payload: { graph: 'test-graph', ceiling: 3.5 }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('sk-frac'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + expect(msg.payload.message).toContain('integer'); + }); + }); + + // ── Live diff push ────────────────────────────────────────────────── + + describe('live diff push', () => { + it('pushes diffs to subscribed clients when graph changes', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + + /** @type {Function|null} */ + let capturedOnChange = null; + graph.subscribe.mockImplementation((/** @type {any} */ opts) => { + capturedOnChange = opts.onChange; + return { unsubscribe: vi.fn() }; + }); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + // Simulate a graph change + const fakeDiff = { + nodes: { added: ['node:new'], removed: [] }, + edges: { added: [], removed: [] }, + props: { set: [], removed: [] }, + }; + + expect(capturedOnChange).not.toBeNull(); + /** @type {any} */ (capturedOnChange)(fakeDiff); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('diff'); + expect(msg.payload.graph).toBe('test-graph'); + expect(msg.payload.diff.nodes.added).toEqual(['node:new']); + }); + + it('broadcasts diffs to all clients subscribed to the same graph', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + + /** @type {Function|null} */ + let capturedOnChange = null; + graph.subscribe.mockImplementation((/** @type {any} */ opts) => { + capturedOnChange = opts.onChange; + return { unsubscribe: vi.fn() }; + }); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + // Connect and open two clients on the same graph + const client1 = ws.simulateConnection(); + client1.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client1.sent.length).toBeGreaterThanOrEqual(2); }); + client1.sent.length = 0; + + const client2 = ws.simulateConnection(); + client2.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o2', + payload: { graph: 'test-graph', writerId: 'w2' }, + })); + await vi.waitFor(() => { expect(client2.sent.length).toBeGreaterThanOrEqual(2); }); + client2.sent.length = 0; + + // Trigger a diff + const fakeDiff = { + nodes: { added: ['node:broadcast'], removed: [] }, + edges: { added: [], removed: [] }, + props: { set: [], removed: [] }, + }; + + expect(capturedOnChange).not.toBeNull(); + /** @type {any} */ (capturedOnChange)(fakeDiff); + + await vi.waitFor(() => expect(client1.sent.length).toBeGreaterThan(0)); + await vi.waitFor(() => expect(client2.sent.length).toBeGreaterThan(0)); + + const msg1 = JSON.parse(client1.sent[0]); + const msg2 = JSON.parse(client2.sent[0]); + + expect(msg1.type).toBe('diff'); + expect(msg1.payload.diff.nodes.added).toEqual(['node:broadcast']); + expect(msg2.type).toBe('diff'); + expect(msg2.payload.diff.nodes.added).toEqual(['node:broadcast']); + }); + + it('survives a dead client and still delivers to healthy clients', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + + /** @type {Function|null} */ + let capturedOnChange = null; + graph.subscribe.mockImplementation((/** @type {any} */ opts) => { + capturedOnChange = opts.onChange; + return { unsubscribe: vi.fn() }; + }); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + // Client 1: will throw on send (dead connection) + const client1 = ws.simulateConnection(); + client1.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client1.sent.length).toBeGreaterThanOrEqual(2); }); + + // Client 2: healthy + const client2 = ws.simulateConnection(); + client2.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o2', + payload: { graph: 'test-graph', writerId: 'w2' }, + })); + await vi.waitFor(() => { expect(client2.sent.length).toBeGreaterThanOrEqual(2); }); + client2.sent.length = 0; + + // Make client1's send throw (simulating a dead WebSocket) + client1.conn.send = () => { throw new Error('Connection reset'); }; + + const fakeDiff = { + nodes: { added: ['node:survive'], removed: [] }, + edges: { added: [], removed: [] }, + props: { set: [], removed: [] }, + }; + + expect(capturedOnChange).not.toBeNull(); + /** @type {any} */ (capturedOnChange)(fakeDiff); + + // Client 2 should still receive the diff + expect(client2.sent.length).toBe(1); + const msg = JSON.parse(client2.sent[0]); + expect(msg.type).toBe('diff'); + expect(msg.payload.diff.nodes.added).toEqual(['node:survive']); + }); + + it('does not push diffs to clients that have not opened that graph', async () => { + const ws = createMockWsPort(); + const g1 = createMockGraph({ graphName: 'alpha' }); + const g2 = createMockGraph({ graphName: 'beta' }); + + /** @type {Function|null} */ + let g1OnChange = null; + g1.subscribe.mockImplementation((/** @type {any} */ opts) => { + g1OnChange = opts.onChange; + return { unsubscribe: vi.fn() }; + }); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [g1, g2] }); + await service.listen(0); + + const client = ws.simulateConnection(); + // Open beta, not alpha + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'beta', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + // Alpha changes — client should NOT get it + if (g1OnChange) { + /** @type {any} */ (g1OnChange)({ + nodes: { added: ['node:x'], removed: [] }, + edges: { added: [], removed: [] }, + props: { set: [], removed: [] }, + }); + } + + // _broadcastDiff is synchronous — no async delay needed + expect(client.sent).toHaveLength(0); + }); + }); + + // ── Malformed messages ────────────────────────────────────────────── + + describe('malformed messages', () => { + /** @type {ReturnType} */ + let ws; + /** @type {WarpServeService} */ + let service; + + beforeEach(async () => { + ws = createMockWsPort(); + const graph = createMockGraph(); + service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + }); + + afterEach(async () => { + await service?.close(); + }); + + it('returns error for invalid JSON', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient('not valid json {{{'); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_INVALID_MESSAGE'); + }); + + it('returns error for missing type field', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ v: 1, payload: {} })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_INVALID_MESSAGE'); + }); + + it('returns E_INVALID_PAYLOAD for open with missing graph', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o-bad', + payload: {}, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('o-bad'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + }); + + it('returns E_INVALID_PAYLOAD for mutate with missing ops', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-bad', + payload: { graph: 'test-graph' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('mut-bad'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + }); + + it('returns E_INVALID_PAYLOAD for inspect with missing nodeId', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'inspect', id: 'ins-bad', + payload: { graph: 'test-graph' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('ins-bad'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + }); + + it('returns E_INVALID_PAYLOAD for seek with missing ceiling', async () => { + const client = ws.simulateConnection(); + // Open first + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'seek', id: 'sk-bad', + payload: { graph: 'test-graph' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.id).toBe('sk-bad'); + expect(msg.payload.code).toBe('E_INVALID_PAYLOAD'); + }); + + it('returns error for unknown message type', async () => { + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'explode', id: 'x', payload: {}, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_UNKNOWN_TYPE'); + }); + }); + + // ── Shutdown ──────────────────────────────────────────────────────── + + describe('shutdown', () => { + it('close() shuts down cleanly', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + await expect(service.close()).resolves.toBeUndefined(); + }); + }); + + // ── Defensive hardening (B165/B167) ─────────────────────────────── + + describe('listen() hardening', () => { + it('does not leak subscriptions when server.listen() rejects', async () => { + let unsubCalled = false; + const graph = createMockGraph(); + graph.subscribe.mockReturnValue({ unsubscribe: () => { unsubCalled = true; } }); + + const port = { + createServer(/** @type {Function} */ handler) { + return { + async listen() { throw new Error('EADDRINUSE'); }, + async close() {}, + }; + }, + }; + + const service = new WarpServeService({ + wsPort: /** @type {any} */ (port), + graphs: [graph], + }); + + await expect(service.listen(9999)).rejects.toThrow('EADDRINUSE'); + + // Subscriptions must have been cleaned up + expect(unsubCalled).toBe(true); + + // Service must remain in a state where listen() can be retried + // (i.e., _server is null, not a dead handle) + const retryPort = { + createServer(/** @type {Function} */ handler) { + return { + async listen(/** @type {number} */ p) { return { port: p, host: '127.0.0.1' }; }, + async close() {}, + }; + }, + }; + // Can't retry with same service since wsPort is fixed, but we verify + // the internal state by checking that a second service works fine + const service2 = new WarpServeService({ + wsPort: /** @type {any} */ (retryPort), + graphs: [graph], + }); + await expect(service2.listen(0)).resolves.toBeDefined(); + }); + + it('rejects double listen()', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + await expect(service.listen(0)).rejects.toThrow('already listening'); + }); + }); + + describe('error sanitization (B165)', () => { + it('does not leak internal error details to WebSocket clients', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + // Make materialize throw with a detailed internal error + graph.materialize.mockRejectedValue( + new Error('/Users/james/.secret/db at row 42: SQLITE_CORRUPT'), + ); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + // The specific handler (open) can send domain errors — those are fine. + // What we're testing is that the _onConnection fallback catch doesn't + // leak raw err.message. The open handler sends E_MATERIALIZE_FAILED + // with the raw message, which is acceptable for now (domain error). + expect(msg.payload.code).toBe('E_MATERIALIZE_FAILED'); + }); + + it('_onConnection fallback catch sends generic error, not raw err.message', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sent.length = 0; + + // Override _onMessage to throw — this is the only way to trigger the + // outer .catch() in _onConnection, since all handler-level errors are + // caught by each handler's own try/catch. The monkey-patch simulates a + // truly unexpected failure (e.g., a bug in message dispatch). + /** @type {any} */ (service)._onMessage = async () => { + throw new Error('secret internal path /etc/shadow'); + }; + + // Re-connect to pick up the patched _onMessage + const client2 = ws.simulateConnection(); + client2.sent.length = 0; + + client2.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'leak-test', + payload: { graph: 'test-graph' }, + })); + + await vi.waitFor(() => expect(client2.sent.length).toBeGreaterThan(0)); + + const msg = JSON.parse(client2.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_INTERNAL'); + // Must NOT contain the raw error message + expect(msg.payload.message).not.toContain('secret'); + expect(msg.payload.message).not.toContain('/etc/shadow'); + expect(msg.payload.message).toBe('Internal error'); + }); + }); + + describe('mock patch surface completeness (B167)', () => { + it('exercises attachContent through the mutation pipeline', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + // Add attachContent/attachEdgeContent to mock patch + const mockPatch = { + addNode: vi.fn().mockReturnThis(), + removeNode: vi.fn().mockReturnThis(), + addEdge: vi.fn().mockReturnThis(), + removeEdge: vi.fn().mockReturnThis(), + setProperty: vi.fn().mockReturnThis(), + setEdgeProperty: vi.fn().mockReturnThis(), + attachContent: vi.fn().mockResolvedValue(undefined), + attachEdgeContent: vi.fn().mockResolvedValue(undefined), + commit: vi.fn().mockResolvedValue('sha-attach'), + }; + graph.createPatch.mockResolvedValue(mockPatch); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-attach', + payload: { + graph: 'test-graph', + ops: [ + { op: 'addNode', args: ['node:blob'] }, + { op: 'attachContent', args: ['node:blob', 'hello'] }, + ], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('ack'); + expect(mockPatch.attachContent).toHaveBeenCalledWith('node:blob', 'hello'); + }); + + it('exercises attachEdgeContent through the mutation pipeline', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const mockPatch = { + addNode: vi.fn().mockReturnThis(), + removeNode: vi.fn().mockReturnThis(), + addEdge: vi.fn().mockReturnThis(), + removeEdge: vi.fn().mockReturnThis(), + setProperty: vi.fn().mockReturnThis(), + setEdgeProperty: vi.fn().mockReturnThis(), + attachContent: vi.fn().mockResolvedValue(undefined), + attachEdgeContent: vi.fn().mockResolvedValue(undefined), + commit: vi.fn().mockResolvedValue('sha-edge-attach'), + }; + graph.createPatch.mockResolvedValue(mockPatch); + + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-edge-attach', + payload: { + graph: 'test-graph', + ops: [ + { op: 'attachEdgeContent', args: ['n1', 'n2', 'knows', 'blob-data'] }, + ], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('ack'); + expect(mockPatch.attachEdgeContent).toHaveBeenCalledWith('n1', 'n2', 'knows', 'blob-data'); + }); + }); + + // ── Message size limits ────────────────────────────────────────────── + + describe('message size limits', () => { + it('rejects oversized WebSocket messages', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sent.length = 0; + + // Send a message larger than 1 MiB + const oversized = 'x'.repeat(1_048_577); + client.sendFromClient(oversized); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_MESSAGE_TOO_LARGE'); + }); + + it('rejects oversized property values in mutate', async () => { + const ws = createMockWsPort(); + const graph = createMockGraph(); + const service = new WarpServeService({ wsPort: ws.port, graphs: [graph] }); + await service.listen(0); + + const client = ws.simulateConnection(); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'open', id: 'o1', + payload: { graph: 'test-graph', writerId: 'w1' }, + })); + await vi.waitFor(() => { expect(client.sent.length).toBeGreaterThanOrEqual(2); }); + client.sent.length = 0; + + // A 100KB property value exceeds the 64 KiB limit + const bigValue = 'x'.repeat(100_000); + client.sendFromClient(JSON.stringify({ + v: 1, type: 'mutate', id: 'mut-big', + payload: { + graph: 'test-graph', + ops: [{ op: 'setProperty', args: ['node:a', 'key', bigValue] }], + }, + })); + + await vi.waitFor(() => expect(client.sent.length).toBeGreaterThan(0)); + const msg = JSON.parse(client.sent[0]); + expect(msg.type).toBe('error'); + expect(msg.payload.code).toBe('E_INVALID_ARGS'); + expect(msg.payload.message).toContain('64 KiB'); + }); + }); +}); diff --git a/test/unit/domain/services/WarpStateIndexBuilder.test.js b/test/unit/domain/services/WarpStateIndexBuilder.test.js index d061d627..c06e1ae2 100644 --- a/test/unit/domain/services/WarpStateIndexBuilder.test.js +++ b/test/unit/domain/services/WarpStateIndexBuilder.test.js @@ -130,7 +130,7 @@ describe('WarpStateIndexBuilder', () => { // Same content for (const key of Object.keys(tree1)) { - expect(tree1[key].equals(tree2[key])).toBe(true); + expect(tree1[key]).toEqual(tree2[key]); } }); diff --git a/test/unit/domain/trust/TrustAdversarial.test.js b/test/unit/domain/trust/TrustAdversarial.test.js index 8b7d963b..ffe2174e 100644 --- a/test/unit/domain/trust/TrustAdversarial.test.js +++ b/test/unit/domain/trust/TrustAdversarial.test.js @@ -34,15 +34,15 @@ const ENFORCE_POLICY = { }; describe('Adversarial case 1: Tampered record mid-chain', () => { - it('verifyRecordId returns false for altered subject', () => { + it('verifyRecordId returns false for altered subject', async () => { const tampered = { ...KEY_ADD_2, subject: { ...KEY_ADD_2.subject, publicKey: 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=' }, }; - expect(verifyRecordId(tampered)).toBe(false); + expect(await verifyRecordId(tampered)).toBe(false); }); - it('verifyChain detects recordId mismatch', () => { + it('verifyChain detects recordId mismatch', async () => { const service = new TrustRecordService({ persistence: /** @type {*} */ ({}), codec: /** @type {*} */ ({ encode: () => {}, decode: () => {} }), @@ -53,7 +53,7 @@ describe('Adversarial case 1: Tampered record mid-chain', () => { subject: { ...KEY_ADD_2.subject, publicKey: 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=' }, }; - const result = service.verifyChain([KEY_ADD_1, tampered]); + const result = await service.verifyChain([KEY_ADD_1, tampered]); expect(result.valid).toBe(false); expect(result.errors.some((e) => e.error.includes('RecordId does not match'))).toBe(true); }); @@ -150,7 +150,7 @@ describe('Adversarial case 4: Out-of-order record input', () => { }); describe('Adversarial case 5: Forged issuerKeyId', () => { - it('fingerprint mismatch detected by consumers of TrustCanonical', () => { + it('fingerprint mismatch detected by consumers of TrustCanonical', async () => { // An attacker creates a KEY_ADD where the issuerKeyId does NOT match // the SHA-256 fingerprint of the supplied publicKey. The recordId // computed from this forged record will differ from any legitimate record. @@ -168,17 +168,17 @@ describe('Adversarial case 5: Forged issuerKeyId', () => { }; // The recordId will NOT match because the content has changed - expect(verifyRecordId(forged)).toBe(false); + expect(await verifyRecordId(forged)).toBe(false); }); - it('forged issuerKeyId changes the canonical hash', () => { + it('forged issuerKeyId changes the canonical hash', async () => { // Same record content but different issuerKeyId produces different recordId const legit = { ...KEY_ADD_1 }; const forged = { ...KEY_ADD_1, issuerKeyId: 'ed25519:' + '0'.repeat(64) }; // They should compute to different record IDs // (the original passes, the forged fails) - expect(verifyRecordId(legit)).toBe(true); - expect(verifyRecordId(forged)).toBe(false); + expect(await verifyRecordId(legit)).toBe(true); + expect(await verifyRecordId(forged)).toBe(false); }); }); diff --git a/test/unit/domain/trust/TrustCanonical.test.js b/test/unit/domain/trust/TrustCanonical.test.js index 91ec383a..a3a9c784 100644 --- a/test/unit/domain/trust/TrustCanonical.test.js +++ b/test/unit/domain/trust/TrustCanonical.test.js @@ -19,23 +19,23 @@ const record = { }; describe('computeRecordId', () => { - it('returns a 64-character hex string', () => { - const id = computeRecordId(record); + it('returns a 64-character hex string', async () => { + const id = await computeRecordId(record); expect(id).toMatch(/^[0-9a-f]{64}$/); }); - it('is deterministic', () => { - expect(computeRecordId(record)).toBe(computeRecordId(record)); + it('is deterministic', async () => { + expect(await computeRecordId(record)).toBe(await computeRecordId(record)); }); - it('matches manual SHA-256 of recordIdPayload', () => { + it('matches manual SHA-256 of recordIdPayload', async () => { const expected = createHash('sha256') .update(recordIdPayload(record)) .digest('hex'); - expect(computeRecordId(record)).toBe(expected); + expect(await computeRecordId(record)).toBe(expected); }); - it('is invariant to key order permutation', () => { + it('is invariant to key order permutation', async () => { const permuted = { subject: record.subject, recordType: record.recordType, @@ -46,44 +46,44 @@ describe('computeRecordId', () => { recordId: record.recordId, signature: record.signature, }; - expect(computeRecordId(permuted)).toBe(computeRecordId(record)); + expect(await computeRecordId(permuted)).toBe(await computeRecordId(record)); }); - it('is independent of recordId field value', () => { + it('is independent of recordId field value', async () => { const a = { ...record, recordId: 'x'.repeat(64) }; const b = { ...record, recordId: 'y'.repeat(64) }; - expect(computeRecordId(a)).toBe(computeRecordId(b)); + expect(await computeRecordId(a)).toBe(await computeRecordId(b)); }); - it('is independent of signature field value', () => { + it('is independent of signature field value', async () => { const a = { ...record, signature: { alg: 'ed25519', sig: 'aaa' } }; const b = { ...record, signature: { alg: 'ed25519', sig: 'bbb' } }; - expect(computeRecordId(a)).toBe(computeRecordId(b)); + expect(await computeRecordId(a)).toBe(await computeRecordId(b)); }); }); describe('verifyRecordId', () => { - it('returns true when recordId matches content', () => { - const id = computeRecordId(record); + it('returns true when recordId matches content', async () => { + const id = await computeRecordId(record); const r = { ...record, recordId: id }; - expect(verifyRecordId(r)).toBe(true); + expect(await verifyRecordId(r)).toBe(true); }); - it('returns false when recordId does not match', () => { + it('returns false when recordId does not match', async () => { const r = { ...record, recordId: 'f'.repeat(64) }; - expect(verifyRecordId(r)).toBe(false); + expect(await verifyRecordId(r)).toBe(false); }); }); describe('computeSignaturePayload', () => { - it('returns a Buffer', () => { + it('returns a Uint8Array', () => { const payload = computeSignaturePayload(record); - expect(Buffer.isBuffer(payload)).toBe(true); + expect(payload).toBeInstanceOf(Uint8Array); }); it('starts with the trust-sign domain prefix', () => { const payload = computeSignaturePayload(record); - const str = payload.toString('utf8'); + const str = new TextDecoder().decode(payload); expect(str.startsWith('git-warp:trust-sign:v1\0')).toBe(true); }); }); diff --git a/test/unit/domain/trust/TrustCrypto.signVerify.test.js b/test/unit/domain/trust/TrustCrypto.signVerify.test.js index f68df737..cb417b14 100644 --- a/test/unit/domain/trust/TrustCrypto.signVerify.test.js +++ b/test/unit/domain/trust/TrustCrypto.signVerify.test.js @@ -11,7 +11,7 @@ import { describe, it, expect } from 'vitest'; import { createPrivateKey, sign } from 'node:crypto'; import { computeSignaturePayload } from '../../../../src/domain/trust/TrustCanonical.js'; -import { verifySignature, computeKeyFingerprint } from '../../../../src/domain/trust/TrustCrypto.js'; +import { verifySignature, computeKeyFingerprint } from '../../../../src/infrastructure/adapters/TrustCryptoAdapter.js'; import { KEY_ADD_1, KEY_ADD_2, diff --git a/test/unit/domain/trust/TrustCrypto.test.js b/test/unit/domain/trust/TrustCrypto.test.js index 1e4d3029..19b11790 100644 --- a/test/unit/domain/trust/TrustCrypto.test.js +++ b/test/unit/domain/trust/TrustCrypto.test.js @@ -4,7 +4,7 @@ import { verifySignature, computeKeyFingerprint, SUPPORTED_ALGORITHMS, -} from '../../../../src/domain/trust/TrustCrypto.js'; +} from '../../../../src/infrastructure/adapters/TrustCryptoAdapter.js'; import TrustError from '../../../../src/domain/errors/TrustError.js'; /** @type {string} */ diff --git a/test/unit/domain/trust/TrustRecordService.chain.test.js b/test/unit/domain/trust/TrustRecordService.chain.test.js index 2bcbd778..5f5eb2d1 100644 --- a/test/unit/domain/trust/TrustRecordService.chain.test.js +++ b/test/unit/domain/trust/TrustRecordService.chain.test.js @@ -136,7 +136,7 @@ describe('Chain integration (B15)', () => { } const records = readResult.records; for (const record of records) { - expect(verifyRecordId(record)).toBe(true); + expect(await verifyRecordId(record)).toBe(true); } }); @@ -151,7 +151,7 @@ describe('Chain integration (B15)', () => { throw readResult.error; } const records = readResult.records; - const result = service.verifyChain(records); + const result = await service.verifyChain(records); expect(result.valid).toBe(true); expect(result.errors).toHaveLength(0); }); diff --git a/test/unit/domain/trust/TrustRecordService.convergence.test.js b/test/unit/domain/trust/TrustRecordService.convergence.test.js index a0409b72..e94ec1cf 100644 --- a/test/unit/domain/trust/TrustRecordService.convergence.test.js +++ b/test/unit/domain/trust/TrustRecordService.convergence.test.js @@ -321,7 +321,7 @@ describe('Invariant 3 — CAS convergence (appendRecordWithRetry)', () => { const rebuilt = { ...record }; delete rebuilt.recordId; delete rebuilt.signature; - const newRecordId = computeRecordId(rebuilt); + const newRecordId = await computeRecordId(rebuilt); return { ...rebuilt, recordId: newRecordId, diff --git a/test/unit/domain/trust/TrustRecordService.test.js b/test/unit/domain/trust/TrustRecordService.test.js index bd890b2b..122f5229 100644 --- a/test/unit/domain/trust/TrustRecordService.test.js +++ b/test/unit/domain/trust/TrustRecordService.test.js @@ -274,43 +274,43 @@ describe('TrustRecordService.verifyChain', () => { }); }); - it('validates a correct chain', () => { + it('validates a correct chain', async () => { const records = [KEY_ADD_1, KEY_ADD_2, WRITER_BIND_ADD_ALICE]; - const result = service.verifyChain(records); + const result = await service.verifyChain(records); expect(result.valid).toBe(true); expect(result.errors).toHaveLength(0); }); - it('detects genesis with non-null prev (caught by recordId integrity)', () => { + it('detects genesis with non-null prev (caught by recordId integrity)', async () => { // Changing prev changes content → recordId mismatch fires first const bad = { ...KEY_ADD_1, prev: 'a'.repeat(64) }; - const result = service.verifyChain([bad]); + const result = await service.verifyChain([bad]); expect(result.valid).toBe(false); expect(result.errors[0].error).toContain('RecordId does not match'); }); - it('detects broken prev-link (caught by recordId integrity)', () => { + it('detects broken prev-link (caught by recordId integrity)', async () => { // Changing prev changes content → recordId mismatch fires first const broken = { ...KEY_ADD_2, prev: '0'.repeat(64) }; - const result = service.verifyChain([KEY_ADD_1, broken]); + const result = await service.verifyChain([KEY_ADD_1, broken]); expect(result.valid).toBe(false); expect(result.errors[0].error).toContain('RecordId does not match'); }); - it('detects duplicate recordIds', () => { + it('detects duplicate recordIds', async () => { const dup = { ...KEY_ADD_2, recordId: KEY_ADD_1.recordId }; - const result = service.verifyChain([KEY_ADD_1, dup]); + const result = await service.verifyChain([KEY_ADD_1, dup]); expect(result.valid).toBe(false); expect(result.errors.some(/** @param {*} e */ (e) => e.error.includes('Duplicate recordId'))).toBe(true); }); - it('validates full golden chain (first 3)', () => { - const result = service.verifyChain(GOLDEN_CHAIN.slice(0, 3)); + it('validates full golden chain (first 3)', async () => { + const result = await service.verifyChain(GOLDEN_CHAIN.slice(0, 3)); expect(result.valid).toBe(true); }); - it('validates full golden chain', () => { - const result = service.verifyChain(GOLDEN_CHAIN); + it('validates full golden chain', async () => { + const result = await service.verifyChain(GOLDEN_CHAIN); expect(result.valid).toBe(true); }); }); diff --git a/test/unit/domain/trust/canonical.freeze.test.js b/test/unit/domain/trust/canonical.freeze.test.js index 762ba303..48514d1b 100644 --- a/test/unit/domain/trust/canonical.freeze.test.js +++ b/test/unit/domain/trust/canonical.freeze.test.js @@ -18,39 +18,39 @@ import { } from './fixtures/goldenRecords.js'; describe('Canonical hash freeze', () => { - it('KEY_ADD_1 recordId is pinned', () => { - expect(computeRecordId(KEY_ADD_1)).toBe( + it('KEY_ADD_1 recordId is pinned', async () => { + expect(await computeRecordId(KEY_ADD_1)).toBe( '3d4f7c3bb432678a6e28b3d07de8ad2a86a8c6cbaf037ac90cdd4aaf388abbb4', ); }); - it('KEY_ADD_2 recordId is pinned', () => { - expect(computeRecordId(KEY_ADD_2)).toBe( + it('KEY_ADD_2 recordId is pinned', async () => { + expect(await computeRecordId(KEY_ADD_2)).toBe( '8b9a16431641093790226915c471b10ce5928c065c4abc5a25e0d90cb2ba936a', ); }); - it('WRITER_BIND_ADD_ALICE recordId is pinned', () => { - expect(computeRecordId(WRITER_BIND_ADD_ALICE)).toBe( + it('WRITER_BIND_ADD_ALICE recordId is pinned', async () => { + expect(await computeRecordId(WRITER_BIND_ADD_ALICE)).toBe( '70cc5fe9b9f0d12c4dc33ab7e9270702444f3b86b8be8785b966e449ffc889a8', ); }); - it('KEY_REVOKE_2 recordId is pinned', () => { - expect(computeRecordId(KEY_REVOKE_2)).toBe( + it('KEY_REVOKE_2 recordId is pinned', async () => { + expect(await computeRecordId(KEY_REVOKE_2)).toBe( '4281dd3741f61c7d3afb21a458284406685484343696719429d8dc90165177f1', ); }); - it('WRITER_BIND_REVOKE_BOB recordId is pinned', () => { - expect(computeRecordId(WRITER_BIND_REVOKE_BOB)).toBe( + it('WRITER_BIND_REVOKE_BOB recordId is pinned', async () => { + expect(await computeRecordId(WRITER_BIND_REVOKE_BOB)).toBe( 'f6646d48ee3bd4f2d85387fdad7711054249bc7e174b0c03b78dfa4ad20bdd5c', ); }); - it('all golden records pass verifyRecordId', () => { + it('all golden records pass verifyRecordId', async () => { for (const record of GOLDEN_CHAIN) { - expect(verifyRecordId(record)).toBe(true); + expect(await verifyRecordId(record)).toBe(true); } }); diff --git a/test/unit/domain/utils/bytes.test.js b/test/unit/domain/utils/bytes.test.js new file mode 100644 index 00000000..1586b3ad --- /dev/null +++ b/test/unit/domain/utils/bytes.test.js @@ -0,0 +1,158 @@ +import { describe, it, expect } from 'vitest'; +import { + hexEncode, + hexDecode, + base64Encode, + base64Decode, + concatBytes, + textEncode, + textDecode, +} from '../../../../src/domain/utils/bytes.js'; + +describe('bytes utilities', () => { + describe('hexEncode', () => { + it('encodes empty array', () => { + expect(hexEncode(new Uint8Array(0))).toBe(''); + }); + + it('encodes single byte', () => { + expect(hexEncode(new Uint8Array([0xff]))).toBe('ff'); + }); + + it('encodes multiple bytes with leading zeros', () => { + expect(hexEncode(new Uint8Array([0x00, 0x0a, 0xff]))).toBe('000aff'); + }); + + it('produces lowercase hex', () => { + expect(hexEncode(new Uint8Array([0xAB, 0xCD]))).toBe('abcd'); + }); + + it('round-trips through hexDecode', () => { + const original = new Uint8Array([1, 2, 3, 127, 128, 255]); + expect(hexDecode(hexEncode(original))).toEqual(original); + }); + }); + + describe('hexDecode', () => { + it('decodes empty string', () => { + expect(hexDecode('')).toEqual(new Uint8Array(0)); + }); + + it('decodes hex pairs', () => { + expect(hexDecode('ff00ab')).toEqual(new Uint8Array([0xff, 0x00, 0xab])); + }); + + it('handles uppercase input', () => { + expect(hexDecode('FF')).toEqual(new Uint8Array([0xff])); + }); + + it('throws RangeError for odd-length input', () => { + expect(() => hexDecode('abc')).toThrow(RangeError); + }); + + it('throws RangeError for non-hex characters', () => { + expect(() => hexDecode('zzzz')).toThrow(RangeError); + }); + + it('throws RangeError for mixed valid/invalid characters', () => { + expect(() => hexDecode('abgh')).toThrow(RangeError); + }); + }); + + describe('base64Encode', () => { + it('encodes empty array', () => { + expect(base64Encode(new Uint8Array(0))).toBe(''); + }); + + it('encodes "Hello"', () => { + const bytes = new TextEncoder().encode('Hello'); + expect(base64Encode(bytes)).toBe('SGVsbG8='); + }); + + it('round-trips through base64Decode', () => { + const original = new Uint8Array([0, 1, 2, 253, 254, 255]); + expect(base64Decode(base64Encode(original))).toEqual(original); + }); + }); + + describe('base64Decode', () => { + it('decodes empty string', () => { + expect(base64Decode('')).toEqual(new Uint8Array(0)); + }); + + it('decodes "SGVsbG8="', () => { + const result = base64Decode('SGVsbG8='); + expect(new TextDecoder().decode(result)).toBe('Hello'); + }); + + it('rejects base64 with length % 4 === 1', () => { + expect(() => base64Decode('AAAAA')).toThrow(RangeError); + expect(() => base64Decode('A')).toThrow(RangeError); + }); + + it('decodes valid unpadded base64', () => { + // "AA" is 2 chars (length % 4 === 2) — valid, decodes to 1 byte + expect(base64Decode('AA')).toEqual(new Uint8Array([0])); + // "AAA" is 3 chars (length % 4 === 3) — valid, decodes to 2 bytes + expect(base64Decode('AAA')).toEqual(new Uint8Array([0, 0])); + }); + }); + + describe('concatBytes', () => { + it('returns empty for no arguments', () => { + expect(concatBytes()).toEqual(new Uint8Array(0)); + }); + + it('returns copy for single argument', () => { + const a = new Uint8Array([1, 2]); + const result = concatBytes(a); + expect(result).toEqual(a); + // Should be a copy, not the same reference + expect(result).not.toBe(a); + }); + + it('concatenates two arrays', () => { + const a = new Uint8Array([1, 2]); + const b = new Uint8Array([3, 4]); + expect(concatBytes(a, b)).toEqual(new Uint8Array([1, 2, 3, 4])); + }); + + it('concatenates three arrays including empty', () => { + const a = new Uint8Array([1]); + const b = new Uint8Array(0); + const c = new Uint8Array([2, 3]); + expect(concatBytes(a, b, c)).toEqual(new Uint8Array([1, 2, 3])); + }); + }); + + describe('textEncode', () => { + it('encodes empty string', () => { + expect(textEncode('')).toEqual(new Uint8Array(0)); + }); + + it('encodes ASCII', () => { + expect(textEncode('abc')).toEqual(new Uint8Array([0x61, 0x62, 0x63])); + }); + + it('encodes multi-byte UTF-8', () => { + const bytes = textEncode('€'); + expect(bytes.length).toBe(3); + expect(bytes).toEqual(new Uint8Array([0xe2, 0x82, 0xac])); + }); + }); + + describe('textDecode', () => { + it('decodes empty array', () => { + expect(textDecode(new Uint8Array(0))).toBe(''); + }); + + it('decodes ASCII', () => { + expect(textDecode(new Uint8Array([0x61, 0x62, 0x63]))).toBe('abc'); + }); + + it('round-trips with textEncode', () => { + const original = 'Hello, 世界! 🌍'; + expect(textDecode(textEncode(original))).toBe(original); + }); + }); +}); diff --git a/test/unit/domain/utils/defaultCrypto.test.js b/test/unit/domain/utils/defaultCrypto.test.js index eb24cd37..766cce9a 100644 --- a/test/unit/domain/utils/defaultCrypto.test.js +++ b/test/unit/domain/utils/defaultCrypto.test.js @@ -29,21 +29,21 @@ describe('defaultCrypto', () => { }); describe('hmac', () => { - it('returns a Buffer', async () => { + it('returns a Uint8Array', async () => { const result = await defaultCrypto.hmac('sha256', 'secret-key', 'data'); - expect(Buffer.isBuffer(result)).toBe(true); + expect(result).toBeInstanceOf(Uint8Array); }); it('produces different results for different keys', async () => { - const a = /** @type {any} */ (await defaultCrypto.hmac('sha256', 'key-1', 'same-data')); - const b = /** @type {any} */ (await defaultCrypto.hmac('sha256', 'key-2', 'same-data')); - expect(a.equals(b)).toBe(false); + const a = await defaultCrypto.hmac('sha256', 'key-1', 'same-data'); + const b = await defaultCrypto.hmac('sha256', 'key-2', 'same-data'); + expect(a).not.toEqual(b); }); it('produces consistent results', async () => { - const first = /** @type {any} */ (await defaultCrypto.hmac('sha256', 'key', 'data')); - const second = /** @type {any} */ (await defaultCrypto.hmac('sha256', 'key', 'data')); - expect(first.equals(second)).toBe(true); + const first = await defaultCrypto.hmac('sha256', 'key', 'data'); + const second = await defaultCrypto.hmac('sha256', 'key', 'data'); + expect(first).toEqual(second); }); }); diff --git a/test/unit/domain/warp/readPatchBlob.test.js b/test/unit/domain/warp/readPatchBlob.test.js new file mode 100644 index 00000000..431a5174 --- /dev/null +++ b/test/unit/domain/warp/readPatchBlob.test.js @@ -0,0 +1,73 @@ +/** + * Tests for _readPatchBlob null-guard on readBlob return value. + * + * @see src/domain/warp/patch.methods.js + */ + +import { describe, it, expect, vi } from 'vitest'; +import { _readPatchBlob } from '../../../../src/domain/warp/patch.methods.js'; +import PersistenceError from '../../../../src/domain/errors/PersistenceError.js'; + +/** + * Builds a minimal mock context for _readPatchBlob.call(). + * Cast to `any` so tsc doesn't require the full WarpGraphWithMixins surface. + * + * @param {{ readBlob: import('vitest').Mock }} persistence + * @param {{ retrieve: import('vitest').Mock }|null} [patchBlobStorage] + * @returns {*} + */ +function mockCtx(persistence, patchBlobStorage = null) { + return /** @type {*} */ ({ _persistence: persistence, _patchBlobStorage: patchBlobStorage }); +} + +describe('_readPatchBlob', () => { + it('returns blob when readBlob succeeds', async () => { + const expected = new Uint8Array([1, 2, 3]); + const ctx = mockCtx({ readBlob: vi.fn().mockResolvedValue(expected) }); + const result = await _readPatchBlob.call(ctx, { + patchOid: 'a'.repeat(40), + encrypted: false, + }); + expect(result).toBe(expected); + }); + + it('throws PersistenceError with E_MISSING_OBJECT when readBlob returns null', async () => { + const oid = 'dead'.padEnd(40, '0'); + const ctx = mockCtx({ readBlob: vi.fn().mockResolvedValue(null) }); + await expect( + _readPatchBlob.call(ctx, { patchOid: oid, encrypted: false }), + ).rejects.toThrow(PersistenceError); + + try { + await _readPatchBlob.call(ctx, { patchOid: oid, encrypted: false }); + } catch (/** @type {*} */ err) { + expect(err.code).toBe(PersistenceError.E_MISSING_OBJECT); + expect(err.message).toContain(oid); + expect(err.context.oid).toBe(oid); + } + }); + + it('throws PersistenceError when readBlob returns undefined', async () => { + const oid = 'b'.repeat(40); + const ctx = mockCtx({ readBlob: vi.fn().mockResolvedValue(undefined) }); + await expect( + _readPatchBlob.call(ctx, { patchOid: oid, encrypted: false }), + ).rejects.toThrow(PersistenceError); + }); + + it('delegates to patchBlobStorage for encrypted patches', async () => { + const expected = new Uint8Array([4, 5, 6]); + const oid = 'c'.repeat(40); + const ctx = mockCtx( + { readBlob: vi.fn() }, + { retrieve: vi.fn().mockResolvedValue(expected) }, + ); + const result = await _readPatchBlob.call(ctx, { + patchOid: oid, + encrypted: true, + }); + expect(result).toBe(expected); + expect(ctx._persistence.readBlob).not.toHaveBeenCalled(); + expect(ctx._patchBlobStorage.retrieve).toHaveBeenCalledWith(oid); + }); +}); diff --git a/test/unit/infrastructure/adapters/BunHttpAdapter.test.js b/test/unit/infrastructure/adapters/BunHttpAdapter.test.js index e44964ba..e596ac35 100644 --- a/test/unit/infrastructure/adapters/BunHttpAdapter.test.js +++ b/test/unit/infrastructure/adapters/BunHttpAdapter.test.js @@ -1,4 +1,3 @@ -/* global ReadableStream */ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import BunHttpAdapter from '../../../../src/infrastructure/adapters/BunHttpAdapter.js'; import HttpServerPort from '../../../../src/ports/HttpServerPort.js'; diff --git a/test/unit/infrastructure/adapters/BunWsAdapter.test.js b/test/unit/infrastructure/adapters/BunWsAdapter.test.js new file mode 100644 index 00000000..4af5cc2d --- /dev/null +++ b/test/unit/infrastructure/adapters/BunWsAdapter.test.js @@ -0,0 +1,302 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import WebSocketServerPort from '../../../../src/ports/WebSocketServerPort.js'; +import BunWsAdapter from '../../../../src/infrastructure/adapters/BunWsAdapter.js'; + +/** + * Creates a mock Bun.serve() environment. + * + * Returns the mock and helper functions to simulate WebSocket events. + */ +function createBunMock() { + /** @type {{ fetch: Function, websocket: { open: Function, message: Function, close: Function } }|null} */ + let handlers = null; + /** @type {{ stop: ReturnType, port: number, hostname: string, upgrade: ReturnType }} */ + const mockServer = { + stop: vi.fn().mockResolvedValue(undefined), + port: 0, + hostname: '127.0.0.1', + upgrade: vi.fn().mockReturnValue(true), + }; + + globalThis.Bun = { + serve: vi.fn().mockImplementation((opts) => { + handlers = { fetch: opts.fetch, websocket: opts.websocket }; + mockServer.port = opts.port || 3000; + mockServer.hostname = opts.hostname || '127.0.0.1'; + return mockServer; + }), + }; + + return { + mockServer, + /** Simulate an incoming WebSocket connection */ + simulateConnection() { + const data = { messageHandler: null, closeHandler: null, messageBuffer: [] }; + const ws = { + readyState: 1, + data, + send: vi.fn(), + close: vi.fn(), + }; + if (handlers?.websocket.open) { + handlers.websocket.open(ws); + } + return ws; + }, + /** Simulate a message arriving on the given ws */ + simulateMessage(/** @type {any} */ ws, /** @type {string} */ msg) { + if (handlers?.websocket.message) { + handlers.websocket.message(ws, msg); + } + }, + /** Simulate the connection closing */ + simulateClose(/** @type {any} */ ws, /** @type {number} */ code, /** @type {string} */ reason) { + if (handlers?.websocket.close) { + handlers.websocket.close(ws, code, reason); + } + }, + /** Simulate a non-WS HTTP request */ + async simulateFetch(/** @type {Request} */ req) { + if (!handlers) { + throw new Error('Server not started'); + } + return handlers.fetch(req, mockServer); + }, + }; +} + +describe('BunWsAdapter', () => { + /** @type {ReturnType} */ + let mock; + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsServerHandle|null} */ + let server = null; + /** @type {any} */ + let originalBun; + + beforeEach(() => { + originalBun = globalThis.Bun; + mock = createBunMock(); + }); + + afterEach(async () => { + if (server) { + await server.close(); + server = null; + } + if (originalBun === undefined) { + Reflect.deleteProperty(globalThis, 'Bun'); + } else { + globalThis.Bun = originalBun; + } + }); + + it('is an instance of WebSocketServerPort', () => { + expect(new BunWsAdapter()).toBeInstanceOf(WebSocketServerPort); + }); + + it('starts a server via Bun.serve()', async () => { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(4000, '0.0.0.0'); + + expect(globalThis.Bun.serve).toHaveBeenCalledTimes(1); + expect(addr.port).toBe(4000); + expect(addr.host).toBe('0.0.0.0'); + }); + + it('uses 127.0.0.1 as default host', async () => { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(3000); + + expect(addr.host).toBe('127.0.0.1'); + }); + + it('fires onConnection when a WebSocket opens', async () => { + const adapter = new BunWsAdapter(); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection[]} */ + const connections = []; + + server = adapter.createServer((conn) => { connections.push(conn); }); + await server.listen(0); + + mock.simulateConnection(); + expect(connections).toHaveLength(1); + }); + + it('routes messages to conn.onMessage handler', async () => { + const adapter = new BunWsAdapter(); + /** @type {string[]} */ + const received = []; + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { received.push(msg); }); + }); + await server.listen(0); + + const ws = mock.simulateConnection(); + mock.simulateMessage(ws, 'hello'); + + expect(received).toEqual(['hello']); + }); + + it('routes close events to conn.onClose handler', async () => { + const adapter = new BunWsAdapter(); + /** @type {number[]} */ + const codes = []; + + server = adapter.createServer((conn) => { + conn.onClose((code) => { codes.push(code ?? -1); }); + }); + await server.listen(0); + + const ws = mock.simulateConnection(); + mock.simulateClose(ws, 1000, 'done'); + + expect(codes).toEqual([1000]); + }); + + it('conn.send() calls ws.send()', async () => { + const adapter = new BunWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const ws = /** @type {any} */ (mock.simulateConnection()); + captured?.send('outbound'); + + expect(ws.send).toHaveBeenCalledWith('outbound'); + }); + + it('conn.send() is a no-op when readyState is not OPEN', async () => { + const adapter = new BunWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const ws = /** @type {any} */ (mock.simulateConnection()); + ws.readyState = 3; // CLOSED + captured?.send('should not send'); + + expect(ws.send).not.toHaveBeenCalled(); + }); + + it('conn.close() calls ws.close()', async () => { + const adapter = new BunWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const ws = /** @type {any} */ (mock.simulateConnection()); + captured?.close(); + + expect(ws.close).toHaveBeenCalled(); + }); + + it('close() calls server.stop()', async () => { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + await server.listen(0); + + await server.close(); + expect(mock.mockServer.stop).toHaveBeenCalled(); + server = null; // already closed + }); + + it('close() awaits server.stop()', async () => { + vi.useFakeTimers(); + try { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + await server.listen(0); + + // Make stop() return a delayed promise to verify it's awaited + let stopResolved = false; + mock.mockServer.stop.mockImplementation(() => + new Promise((resolve) => { + setTimeout(() => { stopResolved = true; resolve(undefined); }, 10); + }), + ); + + const closePromise = server.close(); + await vi.advanceTimersByTimeAsync(10); + await closePromise; + expect(stopResolved).toBe(true); + server = null; // already closed + } finally { + vi.useRealTimers(); + } + }); + + it('close() is safe when server was never started', async () => { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + await expect(server.close()).resolves.toBeUndefined(); + server = null; + }); + + it('returns 404 for non-WebSocket HTTP requests', async () => { + const adapter = new BunWsAdapter(); + server = adapter.createServer(() => {}); + await server.listen(0); + + mock.mockServer.upgrade.mockReturnValue(false); + const resp = await mock.simulateFetch(new Request('http://localhost/')); + + expect(resp.status).toBe(404); + }); + + it('buffers messages arriving before onMessage handler is set', async () => { + const adapter = new BunWsAdapter(); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection|null} */ + let savedConn = null; + /** @type {string[]} */ + const received = []; + + server = adapter.createServer((conn) => { + // Save conn but DON'T call onMessage yet — simulates delayed setup + savedConn = conn; + }); + await server.listen(0); + + const ws = mock.simulateConnection(); + // Messages arrive before onMessage handler is registered + mock.simulateMessage(ws, 'early-1'); + mock.simulateMessage(ws, 'early-2'); + + // Now set the handler — should flush buffered messages + expect(savedConn).not.toBeNull(); + const conn = /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection} */ (/** @type {unknown} */ (savedConn)); + conn.onMessage((/** @type {string} */ msg) => { received.push(msg); }); + expect(received).toEqual(['early-1', 'early-2']); + + // Subsequent messages go directly to handler + mock.simulateMessage(ws, 'late-1'); + expect(received).toEqual(['early-1', 'early-2', 'late-1']); + }); + + it('handles multiple connections independently', async () => { + const adapter = new BunWsAdapter(); + /** @type {string[]} */ + const allMessages = []; + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { allMessages.push(msg); }); + }); + await server.listen(0); + + const ws1 = mock.simulateConnection(); + const ws2 = mock.simulateConnection(); + + mock.simulateMessage(ws1, 'from-1'); + mock.simulateMessage(ws2, 'from-2'); + + expect(allMessages).toEqual(['from-1', 'from-2']); + }); +}); diff --git a/test/unit/infrastructure/adapters/CasBlobAdapter.test.js b/test/unit/infrastructure/adapters/CasBlobAdapter.test.js new file mode 100644 index 00000000..842ef577 --- /dev/null +++ b/test/unit/infrastructure/adapters/CasBlobAdapter.test.js @@ -0,0 +1,384 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// Mock @git-stunts/git-cas (dynamic import used by _initCas) +const mockReadManifest = vi.fn(); +const mockRestore = vi.fn(); +const mockStore = vi.fn(); +const mockCreateTree = vi.fn(); + +/** Captures constructor args for assertion. @type {any} */ +let lastConstructorArgs = {}; + +class MockContentAddressableStore { + constructor(/** @type {any} */ opts) { + lastConstructorArgs = opts; + this.readManifest = mockReadManifest; + this.restore = mockRestore; + this.store = mockStore; + this.createTree = mockCreateTree; + } +} + +class MockCborCodec {} + +vi.mock('@git-stunts/git-cas', () => ({ + default: MockContentAddressableStore, + CborCodec: MockCborCodec, +})); + +// Import after mock setup +const { default: CasBlobAdapter } = await import( + '../../../../src/infrastructure/adapters/CasBlobAdapter.js' +); +const { default: BlobStoragePort } = await import( + '../../../../src/ports/BlobStoragePort.js' +); + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makePersistence() { + return { + readBlob: vi.fn().mockResolvedValue(new TextEncoder().encode('raw-blob-data')), + writeBlob: vi.fn().mockResolvedValue('blob-oid-1'), + }; +} + +function makePlumbing() { + return {}; +} + +function makeLogger() { + return { + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + child: vi.fn(), + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('CasBlobAdapter', () => { + beforeEach(() => { + vi.clearAllMocks(); + lastConstructorArgs = {}; + }); + + it('extends BlobStoragePort', () => { + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + expect(adapter).toBeInstanceOf(BlobStoragePort); + }); + + describe('store()', () => { + it('stores string content via CAS and returns tree OID', async () => { + const manifest = { chunks: ['chunk1'] }; + mockStore.mockResolvedValue(manifest); + mockCreateTree.mockResolvedValue('tree-oid-abc'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + const oid = await adapter.store('hello world', { slug: 'test/node1' }); + + expect(oid).toBe('tree-oid-abc'); + expect(mockStore).toHaveBeenCalledOnce(); + expect(mockCreateTree).toHaveBeenCalledWith({ manifest }); + }); + + it('stores Uint8Array content via CAS', async () => { + const manifest = { chunks: ['chunk1'] }; + mockStore.mockResolvedValue(manifest); + mockCreateTree.mockResolvedValue('tree-oid-123'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + const buf = new Uint8Array([1, 2, 3]); + const oid = await adapter.store(buf); + + expect(oid).toBe('tree-oid-123'); + expect(mockStore).toHaveBeenCalledOnce(); + }); + + it('generates a default slug when none provided', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + await adapter.store('data'); + + const storeCall = mockStore.mock.calls[0][0]; + expect(storeCall.slug).toMatch(/^blob-/); + }); + + it('passes encryptionKey to CAS store when configured', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const encKey = new Uint8Array(32); + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + encryptionKey: encKey, + }); + + await adapter.store('secret data'); + + const storeCall = mockStore.mock.calls[0][0]; + expect(storeCall.encryptionKey).toBe(encKey); + }); + + it('does not include encryptionKey when not configured', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + await adapter.store('plain data'); + + const storeCall = mockStore.mock.calls[0][0]; + expect(storeCall.encryptionKey).toBeUndefined(); + }); + }); + + describe('retrieve()', () => { + it('retrieves content via CAS when manifest exists', async () => { + const manifest = { chunks: ['chunk1'] }; + const contentBuf = new TextEncoder().encode('restored content'); + mockReadManifest.mockResolvedValue(manifest); + mockRestore.mockResolvedValue({ buffer: contentBuf }); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + const result = await adapter.retrieve('tree-oid-abc'); + + expect(result).toBe(contentBuf); + expect(mockReadManifest).toHaveBeenCalledWith({ treeOid: 'tree-oid-abc' }); + expect(mockRestore).toHaveBeenCalledWith({ manifest }); + }); + + it('passes encryptionKey to CAS restore when configured', async () => { + const manifest = { chunks: ['chunk1'] }; + mockReadManifest.mockResolvedValue(manifest); + mockRestore.mockResolvedValue({ buffer: new TextEncoder().encode('decrypted') }); + + const encKey = new Uint8Array(32); + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + encryptionKey: encKey, + }); + + await adapter.retrieve('tree-oid'); + + expect(mockRestore).toHaveBeenCalledWith({ manifest, encryptionKey: encKey }); + }); + + it('falls back to raw Git blob when CAS readManifest throws MANIFEST_NOT_FOUND', async () => { + const rawBuf = new TextEncoder().encode('legacy raw blob'); + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(rawBuf); + const casErr = Object.assign(new Error('No manifest entry'), { code: 'MANIFEST_NOT_FOUND' }); + mockReadManifest.mockRejectedValue(casErr); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + const result = await adapter.retrieve('raw-blob-oid'); + + expect(result).toBe(rawBuf); + expect(persistence.readBlob).toHaveBeenCalledWith('raw-blob-oid'); + }); + + it('falls back to raw Git blob when CAS readManifest throws GIT_ERROR', async () => { + const rawBuf = new TextEncoder().encode('legacy raw blob'); + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(rawBuf); + const casErr = Object.assign(new Error('Failed to read tree'), { code: 'GIT_ERROR' }); + mockReadManifest.mockRejectedValue(casErr); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + const result = await adapter.retrieve('raw-blob-oid'); + + expect(result).toBe(rawBuf); + expect(persistence.readBlob).toHaveBeenCalledWith('raw-blob-oid'); + }); + + it('falls back to raw Git blob on message-based legacy errors (no .code)', async () => { + const rawBuf = new TextEncoder().encode('legacy raw blob'); + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(rawBuf); + mockReadManifest.mockResolvedValue({ chunks: [] }); + mockRestore.mockRejectedValue(new Error('not a tree object')); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + const result = await adapter.retrieve('bad-tree-oid'); + + expect(result).toBe(rawBuf); + expect(persistence.readBlob).toHaveBeenCalledWith('bad-tree-oid'); + }); + + it('falls back to raw Git blob on "bad object" message (no .code)', async () => { + const rawBuf = new TextEncoder().encode('legacy raw blob'); + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(rawBuf); + mockReadManifest.mockRejectedValue(new Error('bad object abc123')); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + const result = await adapter.retrieve('bad-obj-oid'); + + expect(result).toBe(rawBuf); + expect(persistence.readBlob).toHaveBeenCalledWith('bad-obj-oid'); + }); + + it('falls back to raw Git blob on "does not exist" message (no .code)', async () => { + const rawBuf = new TextEncoder().encode('legacy raw blob'); + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(rawBuf); + mockReadManifest.mockRejectedValue(new Error('path does not exist')); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + const result = await adapter.retrieve('missing-oid'); + + expect(result).toBe(rawBuf); + expect(persistence.readBlob).toHaveBeenCalledWith('missing-oid'); + }); + + it('throws descriptive error when legacy fallback readBlob returns null', async () => { + const persistence = makePersistence(); + persistence.readBlob.mockResolvedValue(null); + const casErr = Object.assign(new Error('No manifest entry'), { code: 'MANIFEST_NOT_FOUND' }); + mockReadManifest.mockRejectedValue(casErr); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + await expect(adapter.retrieve('ghost-oid')).rejects.toThrow( + 'Blob not found: OID "ghost-oid" is neither a CAS manifest nor a readable Git blob', + ); + expect(persistence.readBlob).toHaveBeenCalledWith('ghost-oid'); + }); + + it('rethrows non-legacy CAS errors', async () => { + const persistence = makePersistence(); + const casErr = Object.assign(new Error('decryption failed'), { code: 'INTEGRITY_ERROR' }); + mockReadManifest.mockRejectedValue(casErr); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence, + }); + + await expect(adapter.retrieve('enc-oid')).rejects.toThrow('decryption failed'); + expect(persistence.readBlob).not.toHaveBeenCalled(); + }); + }); + + describe('CAS initialization', () => { + it('lazily initializes CAS on first store() call', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + // CAS not yet initialized + expect(lastConstructorArgs).toEqual({}); + + await adapter.store('data'); + + // CAS initialized with correct options + expect(lastConstructorArgs.chunking).toEqual({ strategy: 'cdc' }); + }); + + it('reuses CAS instance across multiple calls', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + await adapter.store('data1'); + const firstArgs = { ...lastConstructorArgs }; + await adapter.store('data2'); + + // Same instance (constructor called only once) + expect(lastConstructorArgs).toEqual(firstArgs); + }); + + it('configures observability bridge when logger is provided', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + logger: makeLogger(), + }); + + await adapter.store('data'); + + expect(lastConstructorArgs.observability).toBeDefined(); + }); + + it('does not configure observability when no logger', async () => { + mockStore.mockResolvedValue({}); + mockCreateTree.mockResolvedValue('tree-oid'); + + const adapter = new CasBlobAdapter({ + plumbing: makePlumbing(), + persistence: makePersistence(), + }); + + await adapter.store('data'); + + expect(lastConstructorArgs.observability).toBeUndefined(); + }); + }); +}); diff --git a/test/unit/infrastructure/adapters/CasSeekCacheAdapter.test.js b/test/unit/infrastructure/adapters/CasSeekCacheAdapter.test.js index 49f44421..36f1eaa4 100644 --- a/test/unit/infrastructure/adapters/CasSeekCacheAdapter.test.js +++ b/test/unit/infrastructure/adapters/CasSeekCacheAdapter.test.js @@ -3,19 +3,34 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; // Mock @git-stunts/git-cas (dynamic import used by _initCas) const mockReadManifest = vi.fn(); const mockRestore = vi.fn(); +const mockRestoreStream = vi.fn(); const mockStore = vi.fn(); const mockCreateTree = vi.fn(); -const mockCreateCbor = vi.fn(() => ({ - readManifest: mockReadManifest, - restore: mockRestore, - store: mockStore, - createTree: mockCreateTree, -})); + +/** When true, the mock CAS exposes restoreStream. */ +let exposeRestoreStream = false; + +/** Captures constructor args for assertion. @type {any} */ +let lastConstructorArgs = {}; + +class MockContentAddressableStore { + constructor(/** @type {any} */ opts) { + lastConstructorArgs = opts; + this.readManifest = mockReadManifest; + this.restore = mockRestore; + this.store = mockStore; + this.createTree = mockCreateTree; + if (exposeRestoreStream) { + this.restoreStream = mockRestoreStream; + } + } +} + +class MockCborCodec {} vi.mock('@git-stunts/git-cas', () => ({ - default: { - createCbor: mockCreateCbor, - }, + default: MockContentAddressableStore, + CborCodec: MockCborCodec, })); // Import after mock setup @@ -34,7 +49,7 @@ const { default: SeekCachePort } = await import( function makePersistence() { return { readRef: vi.fn().mockResolvedValue(null), - readBlob: vi.fn().mockResolvedValue(Buffer.from('{}', 'utf8')), + readBlob: vi.fn().mockResolvedValue(new TextEncoder().encode('{}')), writeBlob: vi.fn().mockResolvedValue('blob-oid-1'), updateRef: vi.fn().mockResolvedValue(undefined), deleteRef: vi.fn().mockResolvedValue(undefined), @@ -47,13 +62,13 @@ function makePlumbing() { /** Builds a JSON index buffer for readBlob to return. */ function indexBuffer(entries = {}) { - return Buffer.from(JSON.stringify({ schemaVersion: 1, entries }), 'utf8'); + return new TextEncoder().encode(JSON.stringify({ schemaVersion: 1, entries })); } const GRAPH_NAME = 'test-graph'; const EXPECTED_REF = `refs/warp/${GRAPH_NAME}/seek-cache`; const SAMPLE_KEY = 'v1:t42-abcdef0123456789'; -const SAMPLE_BUFFER = Buffer.from('serialized-state-data'); +const SAMPLE_BUFFER = new TextEncoder().encode('serialized-state-data'); // --------------------------------------------------------------------------- // Tests @@ -69,6 +84,7 @@ describe('CasSeekCacheAdapter', () => { beforeEach(() => { vi.clearAllMocks(); + exposeRestoreStream = false; persistence = makePersistence(); plumbing = makePlumbing(); adapter = new CasSeekCacheAdapter({ @@ -105,8 +121,38 @@ describe('CasSeekCacheAdapter', () => { expect(adapter._ref).toBe(EXPECTED_REF); }); - it('initialises _casPromise to null', () => { - expect(adapter._casPromise).toBeNull(); + it('exposes _getCas as a function', () => { + expect(typeof adapter._getCas).toBe('function'); + }); + + it('stores encryptionKey when provided', () => { + const key = new Uint8Array(32).fill(0xab); + const encrypted = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + encryptionKey: key, + }); + expect(encrypted._encryptionKey).toBe(key); + }); + + it('defaults encryptionKey to undefined', () => { + expect(adapter._encryptionKey).toBeUndefined(); + }); + + it('stores logger when provided', () => { + const logger = { debug: vi.fn(), info: vi.fn(), warn: vi.fn(), error: vi.fn(), child: vi.fn() }; + const withLogger = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + logger, + }); + expect(withLogger._logger).toBe(logger); + }); + + it('defaults logger to undefined', () => { + expect(adapter._logger).toBeUndefined(); }); }); @@ -115,33 +161,46 @@ describe('CasSeekCacheAdapter', () => { // ------------------------------------------------------------------------- describe('_getCas()', () => { - it('creates CAS instance on first call', async () => { + it('creates CAS instance with CDC chunking on first call', async () => { await adapter._getCas(); - expect(mockCreateCbor).toHaveBeenCalledWith({ plumbing }); + expect(lastConstructorArgs.plumbing).toBe(plumbing); + expect(lastConstructorArgs.codec).toBeInstanceOf(MockCborCodec); + expect(lastConstructorArgs.chunking).toEqual({ strategy: 'cdc' }); }); it('caches the CAS promise across multiple calls', async () => { - await adapter._getCas(); - await adapter._getCas(); - expect(mockCreateCbor).toHaveBeenCalledTimes(1); + const first = await adapter._getCas(); + const second = await adapter._getCas(); + expect(first).toBe(second); }); it('resets cached promise on init error so next call retries', async () => { - mockCreateCbor.mockImplementationOnce(() => { - throw new Error('init failure'); + // Temporarily break the mock module to force an init failure + const origDefault = (await import('@git-stunts/git-cas')).default; + const { default: CASModule } = await import('@git-stunts/git-cas'); + + // Create a fresh adapter whose _initCas will throw + /** @type {any} */ + const badAdapter = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, }); + // Override _initCas to throw once + let throwOnce = true; + const origInit = badAdapter._initCas.bind(badAdapter); + badAdapter._initCas = async () => { + if (throwOnce) { + throwOnce = false; + throw new Error('init failure'); + } + return origInit(); + }; - await expect(adapter._getCas()).rejects.toThrow('init failure'); - expect(adapter._casPromise).toBeNull(); + await expect(badAdapter._getCas()).rejects.toThrow('init failure'); - // Second call should retry and succeed - mockCreateCbor.mockReturnValueOnce({ - readManifest: mockReadManifest, - restore: mockRestore, - store: mockStore, - createTree: mockCreateTree, - }); - await expect(adapter._getCas()).resolves.toBeDefined(); + // Second call should retry and succeed (promise was reset on failure) + await expect(badAdapter._getCas()).resolves.toBeDefined(); }); }); @@ -185,7 +244,7 @@ describe('CasSeekCacheAdapter', () => { it('returns buffer on cache hit', async () => { const treeOid = 'tree-oid-abc'; const manifest = { chunks: ['c1'] }; - const stateBuffer = Buffer.from('restored-state'); + const stateBuffer = new TextEncoder().encode('restored-state'); persistence.readRef.mockResolvedValue('index-oid'); persistence.readBlob.mockResolvedValue( @@ -204,7 +263,7 @@ describe('CasSeekCacheAdapter', () => { it('updates lastAccessedAt on successful cache hit', async () => { const treeOid = 'tree-oid-abc'; const manifest = { chunks: ['c1'] }; - const stateBuffer = Buffer.from('restored-state'); + const stateBuffer = new TextEncoder().encode('restored-state'); const originalEntry = { treeOid, createdAt: '2025-01-01T00:00:00Z', @@ -222,7 +281,7 @@ describe('CasSeekCacheAdapter', () => { // Verify index was written back with lastAccessedAt expect(persistence.writeBlob).toHaveBeenCalled(); const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); expect(writtenJson.entries[SAMPLE_KEY].lastAccessedAt).toBeDefined(); expect(writtenJson.entries[SAMPLE_KEY].createdAt).toBe('2025-01-01T00:00:00Z'); @@ -260,6 +319,110 @@ describe('CasSeekCacheAdapter', () => { const result = await adapter.get(SAMPLE_KEY); expect(result).toBeNull(); }); + + it('passes encryptionKey to cas.restore when configured', async () => { + const encKey = new Uint8Array(32).fill(0xab); + const encAdapter = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + encryptionKey: encKey, + }); + const treeOid = 'tree-oid-enc'; + const manifest = { chunks: ['c1'] }; + const stateBuffer = new TextEncoder().encode('encrypted-state'); + + persistence.readRef.mockResolvedValue('index-oid'); + persistence.readBlob.mockResolvedValue( + indexBuffer({ [SAMPLE_KEY]: { treeOid, createdAt: new Date().toISOString() } }) + ); + mockReadManifest.mockResolvedValue(manifest); + mockRestore.mockResolvedValue({ buffer: stateBuffer }); + + await encAdapter.get(SAMPLE_KEY); + + expect(mockRestore).toHaveBeenCalledWith({ + manifest, + encryptionKey: encKey, + }); + }); + + it('does not pass encryptionKey to cas.restore when not configured', async () => { + const treeOid = 'tree-oid-plain'; + const manifest = { chunks: ['c1'] }; + + persistence.readRef.mockResolvedValue('index-oid'); + persistence.readBlob.mockResolvedValue( + indexBuffer({ [SAMPLE_KEY]: { treeOid, createdAt: new Date().toISOString() } }) + ); + mockReadManifest.mockResolvedValue(manifest); + mockRestore.mockResolvedValue({ buffer: new TextEncoder().encode('plain') }); + + await adapter.get(SAMPLE_KEY); + + expect(mockRestore).toHaveBeenCalledWith({ manifest }); + }); + + it('uses restoreStream() when available, concatenating chunks', async () => { + // Create adapter from CAS that exposes restoreStream + exposeRestoreStream = true; + const streamAdapter = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + }); + + const treeOid = 'tree-stream'; + const manifest = { chunks: ['c1', 'c2'] }; + const chunk1 = new TextEncoder().encode('hello-'); + const chunk2 = new TextEncoder().encode('world'); + + persistence.readRef.mockResolvedValue('index-oid'); + persistence.readBlob.mockResolvedValue( + indexBuffer({ [SAMPLE_KEY]: { treeOid, createdAt: new Date().toISOString() } }) + ); + mockReadManifest.mockResolvedValue(manifest); + // restoreStream returns an async iterable of chunks + mockRestoreStream.mockReturnValue((async function* () { + yield chunk1; + yield chunk2; + })()); + + const result = await streamAdapter.get(SAMPLE_KEY); + + expect(result).not.toBeNull(); + expect(new TextDecoder().decode(/** @type {any} */ (result).buffer)).toBe('hello-world'); + expect(mockRestoreStream).toHaveBeenCalledWith({ manifest }); + // Should NOT fall back to cas.restore() + expect(mockRestore).not.toHaveBeenCalled(); + }); + + it('falls back to cas.restore() when restoreStream is not available', async () => { + // Create adapter from a CAS without restoreStream + exposeRestoreStream = false; + const fallbackAdapter = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + }); + + const treeOid = 'tree-fallback'; + const manifest = { chunks: ['c1'] }; + const stateBuffer = new TextEncoder().encode('fallback-state'); + + persistence.readRef.mockResolvedValue('index-oid'); + persistence.readBlob.mockResolvedValue( + indexBuffer({ [SAMPLE_KEY]: { treeOid, createdAt: new Date().toISOString() } }) + ); + mockReadManifest.mockResolvedValue(manifest); + mockRestore.mockResolvedValue({ buffer: stateBuffer }); + + const result = await fallbackAdapter.get(SAMPLE_KEY); + + expect(result).toEqual({ buffer: stateBuffer }); + expect(mockRestore).toHaveBeenCalledWith({ manifest }); + expect(mockRestoreStream).not.toHaveBeenCalled(); + }); }); // ------------------------------------------------------------------------- @@ -286,7 +449,7 @@ describe('CasSeekCacheAdapter', () => { // Index updated expect(persistence.writeBlob).toHaveBeenCalled(); const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); const entry = writtenJson.entries[SAMPLE_KEY]; expect(entry.treeOid).toBe(treeOid); @@ -320,11 +483,42 @@ describe('CasSeekCacheAdapter', () => { await adapter.set(SAMPLE_KEY, SAMPLE_BUFFER); const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); expect(writtenJson.entries[existingKey]).toEqual(existingEntry); expect(writtenJson.entries[SAMPLE_KEY]).toBeDefined(); }); + + it('passes encryptionKey to cas.store when configured', async () => { + const encKey = new Uint8Array(32).fill(0xab); + const encAdapter = new CasSeekCacheAdapter({ + persistence, + plumbing, + graphName: GRAPH_NAME, + encryptionKey: encKey, + }); + + mockStore.mockResolvedValue({ chunks: [] }); + mockCreateTree.mockResolvedValue('enc-tree'); + persistence.readRef.mockResolvedValue(null); + + await encAdapter.set(SAMPLE_KEY, SAMPLE_BUFFER); + + expect(mockStore).toHaveBeenCalledWith( + expect.objectContaining({ encryptionKey: encKey }) + ); + }); + + it('does not pass encryptionKey to cas.store when not configured', async () => { + mockStore.mockResolvedValue({ chunks: [] }); + mockCreateTree.mockResolvedValue('plain-tree'); + persistence.readRef.mockResolvedValue(null); + + await adapter.set(SAMPLE_KEY, SAMPLE_BUFFER); + + const storeArg = mockStore.mock.calls[0][0]; + expect(storeArg.encryptionKey).toBeUndefined(); + }); }); // ------------------------------------------------------------------------- @@ -395,7 +589,7 @@ describe('CasSeekCacheAdapter', () => { // Verify the written index no longer contains the key const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); expect(writtenJson.entries[SAMPLE_KEY]).toBeUndefined(); }); @@ -420,7 +614,7 @@ describe('CasSeekCacheAdapter', () => { await adapter.delete(SAMPLE_KEY); const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); expect(writtenJson.entries[otherKey]).toBeDefined(); expect(writtenJson.entries[SAMPLE_KEY]).toBeUndefined(); @@ -599,10 +793,10 @@ describe('CasSeekCacheAdapter', () => { mockStore.mockResolvedValue({ chunks: [] }); mockCreateTree.mockResolvedValue('new-tree'); - await tinyAdapter.set('v1:t99-newhash', Buffer.from('new')); + await tinyAdapter.set('v1:t99-newhash', new TextEncoder().encode('new')); const writtenJson = JSON.parse( - persistence.writeBlob.mock.calls[0][0].toString('utf8') + new TextDecoder().decode(persistence.writeBlob.mock.calls[0][0]) ); expect(Object.keys(writtenJson.entries)).toHaveLength(1); expect(writtenJson.entries['v1:t99-newhash']).toBeDefined(); @@ -680,7 +874,7 @@ describe('CasSeekCacheAdapter', () => { it('returns empty index when blob is invalid JSON', async () => { persistence.readRef.mockResolvedValue('oid'); - persistence.readBlob.mockResolvedValue(Buffer.from('not-json!!!')); + persistence.readBlob.mockResolvedValue(new TextEncoder().encode('not-json!!!')); const result = await adapter._readIndex(); expect(result).toEqual({ schemaVersion: 1, entries: {} }); }); @@ -688,7 +882,7 @@ describe('CasSeekCacheAdapter', () => { it('returns empty index when schemaVersion mismatches', async () => { persistence.readRef.mockResolvedValue('oid'); persistence.readBlob.mockResolvedValue( - Buffer.from(JSON.stringify({ schemaVersion: 999, entries: { x: {} } })) + new TextEncoder().encode(JSON.stringify({ schemaVersion: 999, entries: { x: {} } })) ); const result = await adapter._readIndex(); expect(result).toEqual({ schemaVersion: 1, entries: {} }); @@ -725,7 +919,7 @@ describe('CasSeekCacheAdapter', () => { expect(persistence.writeBlob).toHaveBeenCalledTimes(1); const buf = persistence.writeBlob.mock.calls[0][0]; - expect(JSON.parse(buf.toString('utf8'))).toEqual(index); + expect(JSON.parse(new TextDecoder().decode(buf))).toEqual(index); expect(persistence.updateRef).toHaveBeenCalledWith(EXPECTED_REF, 'written-oid'); }); }); diff --git a/test/unit/infrastructure/adapters/DenoHttpAdapter.test.js b/test/unit/infrastructure/adapters/DenoHttpAdapter.test.js index b7cd9883..a409f387 100644 --- a/test/unit/infrastructure/adapters/DenoHttpAdapter.test.js +++ b/test/unit/infrastructure/adapters/DenoHttpAdapter.test.js @@ -1,4 +1,3 @@ -/* global Request, Response, ReadableStream, Headers */ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import HttpServerPort from '../../../../src/ports/HttpServerPort.js'; diff --git a/test/unit/infrastructure/adapters/DenoWsAdapter.test.js b/test/unit/infrastructure/adapters/DenoWsAdapter.test.js new file mode 100644 index 00000000..83f5e014 --- /dev/null +++ b/test/unit/infrastructure/adapters/DenoWsAdapter.test.js @@ -0,0 +1,313 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import WebSocketServerPort from '../../../../src/ports/WebSocketServerPort.js'; +import DenoWsAdapter from '../../../../src/infrastructure/adapters/DenoWsAdapter.js'; + +/** + * Creates a mock Deno runtime environment. + * + * Returns helper functions to simulate WebSocket upgrade and events. + */ +function createDenoMock() { + /** @type {Function|null} */ + let handler = null; + /** @type {{ shutdown: ReturnType, addr: { transport: string, hostname: string, port: number } }|null} */ + let mockServer = null; + + /** @type {any} */ + const denoShim = { + env: { get: vi.fn() }, + serve: vi.fn().mockImplementation((opts, requestHandler) => { + handler = requestHandler; + const assignedPort = opts.port || 49152; + mockServer = { + shutdown: vi.fn().mockResolvedValue(undefined), + addr: { + transport: 'tcp', + hostname: opts.hostname || '127.0.0.1', + port: assignedPort, + }, + }; + // Fire onListen asynchronously, matching Deno's behavior + if (opts.onListen) { + queueMicrotask(opts.onListen); + } + return mockServer; + }), + upgradeWebSocket: vi.fn(), + }; + globalThis.Deno = denoShim; + + return { + get mockServer() { return mockServer; }, + + /** + * Simulate a WebSocket upgrade request. + * Returns the mock socket for driving events. + */ + simulateUpgrade() { + /** @type {any} */ + const socket = { + onopen: null, + onmessage: null, + onclose: null, + readyState: 1, + send: vi.fn(), + close: vi.fn(), + }; + // Make it look like a WebSocket for readyState check + Object.defineProperty(socket, 'readyState', { value: 1, writable: true }); + + // Deno returns a 101 Switching Protocols response, but Node's Response + // rejects status < 200. Use a plain object mock instead. + const response = { status: 101 }; + /** @type {any} */ (globalThis.Deno.upgradeWebSocket).mockReturnValue({ socket, response }); + + // Simulate the fetch arriving + const req = new Request('http://localhost/', { + headers: { upgrade: 'websocket' }, + }); + if (handler) { + handler(req); + } + + // Fire onopen + if (socket.onopen) { + socket.onopen(); + } + + return socket; + }, + + /** Simulate a non-WS HTTP request */ + async simulateFetch(/** @type {Request} */ req) { + if (!handler) { + throw new Error('Server not started'); + } + return handler(req); + }, + }; +} + +describe('DenoWsAdapter', () => { + /** @type {ReturnType} */ + let mock; + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsServerHandle|null} */ + let server = null; + + beforeEach(() => { + mock = createDenoMock(); + }); + + afterEach(async () => { + if (server) { + await server.close(); + server = null; + } + Reflect.deleteProperty(globalThis, 'Deno'); + }); + + it('is an instance of WebSocketServerPort', () => { + expect(new DenoWsAdapter()).toBeInstanceOf(WebSocketServerPort); + }); + + it('starts a server via Deno.serve()', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(4000, '0.0.0.0'); + + expect(globalThis.Deno.serve).toHaveBeenCalledTimes(1); + expect(addr.port).toBe(4000); + expect(addr.host).toBe('0.0.0.0'); + }); + + it('resolves actual port when port 0 is requested', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + + // Port 0 means OS-assigned — the mock assigns 49152 + expect(addr.port).not.toBe(0); + expect(addr.port).toBe(49152); + }); + + it('uses 127.0.0.1 as default host', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(3000); + + expect(addr.host).toBe('127.0.0.1'); + }); + + it('fires onConnection when a WebSocket upgrades and opens', async () => { + const adapter = new DenoWsAdapter(); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection[]} */ + const connections = []; + + server = adapter.createServer((conn) => { connections.push(conn); }); + await server.listen(0); + + mock.simulateUpgrade(); + expect(connections).toHaveLength(1); + }); + + it('routes messages to conn.onMessage handler', async () => { + const adapter = new DenoWsAdapter(); + /** @type {string[]} */ + const received = []; + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { received.push(msg); }); + }); + await server.listen(0); + + const socket = mock.simulateUpgrade(); + // Simulate incoming message + if (socket.onmessage) { + socket.onmessage({ data: 'hello' }); + } + + expect(received).toEqual(['hello']); + }); + + it('routes close events to conn.onClose handler', async () => { + const adapter = new DenoWsAdapter(); + /** @type {number[]} */ + const codes = []; + + server = adapter.createServer((conn) => { + conn.onClose((code) => { codes.push(code ?? -1); }); + }); + await server.listen(0); + + const socket = mock.simulateUpgrade(); + if (socket.onclose) { + socket.onclose({ code: 1000, reason: 'done' }); + } + + expect(codes).toEqual([1000]); + }); + + it('conn.send() calls socket.send()', async () => { + const adapter = new DenoWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const socket = /** @type {any} */ (mock.simulateUpgrade()); + captured?.send('outbound'); + + expect(socket.send).toHaveBeenCalledWith('outbound'); + }); + + it('conn.send() is a no-op when readyState is not OPEN', async () => { + const adapter = new DenoWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const socket = /** @type {any} */ (mock.simulateUpgrade()); + Object.defineProperty(socket, 'readyState', { value: 3 }); // CLOSED + captured?.send('should not send'); + + expect(socket.send).not.toHaveBeenCalled(); + }); + + it('conn.close() calls socket.close()', async () => { + const adapter = new DenoWsAdapter(); + /** @type {any} */ + let captured = null; + + server = adapter.createServer((conn) => { captured = conn; }); + await server.listen(0); + + const socket = /** @type {any} */ (mock.simulateUpgrade()); + captured?.close(); + + expect(socket.close).toHaveBeenCalled(); + }); + + it('close() calls server.shutdown()', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + await server.listen(0); + + await server.close(); + expect(/** @type {any} */ (mock.mockServer).shutdown).toHaveBeenCalled(); + server = null; + }); + + it('close() is safe when server was never started', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + await expect(server.close()).resolves.toBeUndefined(); + server = null; + }); + + it('returns 404 for non-WebSocket HTTP requests', async () => { + const adapter = new DenoWsAdapter(); + server = adapter.createServer(() => {}); + await server.listen(0); + + const resp = await mock.simulateFetch(new Request('http://localhost/')); + expect(resp.status).toBe(404); + }); + + it('handles multiple connections independently', async () => { + const adapter = new DenoWsAdapter(); + /** @type {string[]} */ + const allMessages = []; + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { allMessages.push(msg); }); + }); + await server.listen(0); + + const s1 = mock.simulateUpgrade(); + const s2 = mock.simulateUpgrade(); + + if (s1.onmessage) { s1.onmessage({ data: 'from-1' }); } + if (s2.onmessage) { s2.onmessage({ data: 'from-2' }); } + + expect(allMessages).toEqual(['from-1', 'from-2']); + }); + + it('buffers messages arriving before onMessage handler is set', async () => { + const adapter = new DenoWsAdapter(); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection|null} */ + let savedConn = null; + /** @type {string[]} */ + const received = []; + + server = adapter.createServer((conn) => { + // Save conn but DON'T call onMessage yet + savedConn = conn; + }); + await server.listen(0); + + const socket = mock.simulateUpgrade(); + // Messages arrive before onMessage handler is registered + if (socket.onmessage) { + socket.onmessage({ data: 'early-1' }); + socket.onmessage({ data: 'early-2' }); + } + + // No handler yet — messages should be buffered + expect(received).toHaveLength(0); + + // Now set the handler — should flush buffered messages + expect(savedConn).not.toBeNull(); + const conn = /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection} */ (/** @type {unknown} */ (savedConn)); + conn.onMessage((/** @type {string} */ msg) => { received.push(msg); }); + expect(received).toEqual(['early-1', 'early-2']); + + // Subsequent messages go directly to handler + if (socket.onmessage) { + socket.onmessage({ data: 'late-1' }); + } + expect(received).toEqual(['early-1', 'early-2', 'late-1']); + }); +}); diff --git a/test/unit/infrastructure/adapters/InMemoryGraphAdapter.browser.test.js b/test/unit/infrastructure/adapters/InMemoryGraphAdapter.browser.test.js new file mode 100644 index 00000000..501c8565 --- /dev/null +++ b/test/unit/infrastructure/adapters/InMemoryGraphAdapter.browser.test.js @@ -0,0 +1,51 @@ +import { describe, it, expect } from 'vitest'; +import InMemoryGraphAdapter from '../../../../src/infrastructure/adapters/InMemoryGraphAdapter.js'; +import { sha1sync } from '../../../../src/infrastructure/adapters/sha1sync.js'; +import WarpGraph from '../../../../src/domain/WarpGraph.js'; +import WebCryptoAdapter from '../../../../src/infrastructure/adapters/WebCryptoAdapter.js'; + +describe('InMemoryGraphAdapter with injected hash (browser simulation)', () => { + it('basic operations work with sha1sync hash function', async () => { + const adapter = new InMemoryGraphAdapter({ hash: sha1sync }); + + const blobOid = await adapter.writeBlob('hello'); + const content = await adapter.readBlob(blobOid); + expect(new TextDecoder().decode(content)).toBe('hello'); + + const sha = await adapter.commitNode({ message: 'test commit' }); + expect(sha).toMatch(/^[0-9a-f]{40}$/); + + const info = await adapter.getNodeInfo(sha); + expect(info.message).toBe('test commit'); + }); + + it('produces identical SHAs to default node:crypto hash', async () => { + const clock = { now: () => 42 }; + const injected = new InMemoryGraphAdapter({ hash: sha1sync, clock }); + const defaultAdapter = new InMemoryGraphAdapter({ clock }); + + const sha1 = await injected.commitNode({ message: 'deterministic' }); + const sha2 = await defaultAdapter.commitNode({ message: 'deterministic' }); + expect(sha1).toBe(sha2); + }); + + it('WarpGraph works with injected hash and WebCryptoAdapter', async () => { + const persistence = new InMemoryGraphAdapter({ hash: sha1sync }); + const crypto = new WebCryptoAdapter(); + const graph = await WarpGraph.open({ + persistence, + graphName: 'browser-test', + writerId: 'alice', + crypto, + }); + + const patch = await graph.createPatch(); + patch.addNode('user:alice'); + patch.setProperty('user:alice', 'name', 'Alice'); + await patch.commit(); + + /** @type {any} */ + const state = await graph.materialize(); + expect(state.nodeAlive.entries.has('user:alice')).toBe(true); + }); +}); diff --git a/test/unit/infrastructure/adapters/LoggerObservabilityBridge.test.js b/test/unit/infrastructure/adapters/LoggerObservabilityBridge.test.js new file mode 100644 index 00000000..6de7fdf7 --- /dev/null +++ b/test/unit/infrastructure/adapters/LoggerObservabilityBridge.test.js @@ -0,0 +1,95 @@ +import { describe, it, expect, vi } from 'vitest'; +import LoggerObservabilityBridge from '../../../../src/infrastructure/adapters/LoggerObservabilityBridge.js'; + +function mockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + child: vi.fn(), + }; +} + +describe('LoggerObservabilityBridge', () => { + describe('metric()', () => { + it('forwards metric as debug log with channel prefix', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + bridge.metric('chunk', { oid: 'abc', size: 1024 }); + + expect(logger.debug).toHaveBeenCalledWith('cas:metric:chunk', { + oid: 'abc', + size: 1024, + }); + }); + }); + + describe('log()', () => { + it('forwards debug level to logger.debug', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + bridge.log('debug', 'test message', { key: 'val' }); + + expect(logger.debug).toHaveBeenCalledWith('test message', { key: 'val' }); + }); + + it('forwards info level to logger.info', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + bridge.log('info', 'info msg'); + + expect(logger.info).toHaveBeenCalledWith('info msg', undefined); + }); + + it('forwards warn level to logger.warn', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + bridge.log('warn', 'warning'); + + expect(logger.warn).toHaveBeenCalledWith('warning', undefined); + }); + + it('forwards error level to logger.error', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + bridge.log('error', 'failure', { code: 'E_TEST' }); + + expect(logger.error).toHaveBeenCalledWith('failure', { code: 'E_TEST' }); + }); + }); + + describe('span()', () => { + it('returns an object with end() that logs duration', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + const s = bridge.span('restore'); + s.end({ chunks: 5 }); + + expect(logger.debug).toHaveBeenCalledTimes(1); + const [msg, meta] = logger.debug.mock.calls[0]; + expect(msg).toBe('cas:span:restore'); + expect(meta.chunks).toBe(5); + expect(typeof meta.durationMs).toBe('number'); + expect(meta.durationMs).toBeGreaterThanOrEqual(0); + }); + + it('end() works without meta argument', () => { + const logger = mockLogger(); + const bridge = new LoggerObservabilityBridge(logger); + + const s = bridge.span('store'); + s.end(); + + const [msg, meta] = logger.debug.mock.calls[0]; + expect(msg).toBe('cas:span:store'); + expect(typeof meta.durationMs).toBe('number'); + }); + }); +}); diff --git a/test/unit/infrastructure/adapters/NodeWsAdapter.test.js b/test/unit/infrastructure/adapters/NodeWsAdapter.test.js new file mode 100644 index 00000000..3cfacd6a --- /dev/null +++ b/test/unit/infrastructure/adapters/NodeWsAdapter.test.js @@ -0,0 +1,315 @@ +import { describe, it, expect, afterEach, beforeAll, afterAll } from 'vitest'; +import { mkdtemp, writeFile, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import WebSocketServerPort from '../../../../src/ports/WebSocketServerPort.js'; +import NodeWsAdapter from '../../../../src/infrastructure/adapters/NodeWsAdapter.js'; + +describe('NodeWsAdapter', () => { + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsServerHandle|null} */ + let server = null; + + afterEach(async () => { + if (server) { + await server.close(); + server = null; + } + }); + + it('is an instance of WebSocketServerPort', () => { + const adapter = new NodeWsAdapter(); + expect(adapter).toBeInstanceOf(WebSocketServerPort); + }); + + it('starts and stops a server', async () => { + const adapter = new NodeWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + expect(addr.port).toBeGreaterThan(0); + expect(addr.host).toBe('127.0.0.1'); + }); + + it('accepts a WebSocket connection and delivers hello', async () => { + const adapter = new NodeWsAdapter(); + /** @type {string[]} */ + const received = []; + + server = adapter.createServer((conn) => { + conn.send('hello from server'); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + + const msg = await new Promise((resolve) => { + ws.onmessage = (e) => resolve(e.data); + }); + expect(msg).toBe('hello from server'); + ws.close(); + }); + + it('receives messages from clients via onMessage', async () => { + const adapter = new NodeWsAdapter(); + /** @type {string[]} */ + const serverReceived = []; + /** @type {Function} */ + let resolveMsg; + const msgPromise = new Promise((r) => { resolveMsg = r; }); + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { + serverReceived.push(msg); + resolveMsg(); + }); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + + ws.send('ping from client'); + await msgPromise; + + expect(serverReceived).toEqual(['ping from client']); + ws.close(); + }); + + it('fires onClose when client disconnects', async () => { + const adapter = new NodeWsAdapter(); + /** @type {Function} */ + let resolveClose; + const closePromise = new Promise((r) => { resolveClose = r; }); + + server = adapter.createServer((conn) => { + conn.onClose((code) => { + resolveClose(code); + }); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + ws.close(1000, 'done'); + + const code = await closePromise; + expect(code).toBe(1000); + }); + + it('round-trips a JSON message', async () => { + const adapter = new NodeWsAdapter(); + + server = adapter.createServer((conn) => { + conn.onMessage((msg) => { + const parsed = JSON.parse(msg); + conn.send(JSON.stringify({ echo: parsed.value })); + }); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + + ws.send(JSON.stringify({ value: 42 })); + const reply = await new Promise((resolve) => { + ws.onmessage = (e) => resolve(e.data); + }); + + expect(JSON.parse(reply)).toEqual({ echo: 42 }); + ws.close(); + }); + + it('conn.close() terminates the connection from server side', async () => { + const adapter = new NodeWsAdapter(); + /** @type {Function} */ + let resolveReady; + const readyPromise = new Promise((r) => { resolveReady = r; }); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection|null} */ + let serverConn = null; + + server = adapter.createServer((conn) => { + serverConn = conn; + resolveReady(undefined); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + const closePromise = new Promise((resolve) => { + ws.onclose = (e) => resolve(e.code); + }); + await readyPromise; + + // Close from server side + expect(serverConn).not.toBeNull(); + const conn = /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection} */ (/** @type {unknown} */ (serverConn)); + conn.close(); + const code = await closePromise; + // Server-initiated close should result in a clean close code + expect(code).toBeGreaterThanOrEqual(1000); + }); + + it('handles multiple concurrent connections', async () => { + const adapter = new NodeWsAdapter(); + let connectionCount = 0; + + server = adapter.createServer((conn) => { + connectionCount++; + conn.send(`you are #${connectionCount}`); + }); + const addr = await server.listen(0); + + const ws1 = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + const ws2 = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + + const [msg1, msg2] = await Promise.all([ + new Promise((resolve) => { ws1.onmessage = (e) => resolve(e.data); }), + new Promise((resolve) => { ws2.onmessage = (e) => resolve(e.data); }), + ]); + + expect(new Set([msg1, msg2])).toEqual(new Set(['you are #1', 'you are #2'])); + + ws1.close(); + ws2.close(); + }); + + it('listen() with custom host', async () => { + const adapter = new NodeWsAdapter(); + server = adapter.createServer(() => {}); + const addr = await server.listen(0, '127.0.0.1'); + expect(addr.host).toBe('127.0.0.1'); + expect(addr.port).toBeGreaterThan(0); + }); + + it('surfaces runtime server errors via onError callback', async () => { + const errors = /** @type {Error[]} */ ([]); + const adapter = new NodeWsAdapter({ onError: (err) => errors.push(err) }); + expect(adapter).toBeInstanceOf(WebSocketServerPort); + + // Verify onError is accepted without throwing + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + expect(addr.port).toBeGreaterThan(0); + }); + + it('buffers messages arriving before onMessage handler is set', async () => { + const adapter = new NodeWsAdapter(); + /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection|null} */ + let savedConn = null; + /** @type {string[]} */ + const received = []; + /** @type {Function} */ + let resolveReady; + const readyPromise = new Promise((r) => { resolveReady = r; }); + + server = adapter.createServer((conn) => { + // Save conn but DON'T call onMessage yet — simulates delayed setup + savedConn = conn; + resolveReady(undefined); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + await readyPromise; + + // Send messages before onMessage handler is registered + ws.send('early-1'); + ws.send('early-2'); + + // Small delay to ensure messages arrive at the server + await new Promise((r) => { setTimeout(r, 100); }); + + // Now set the handler — should flush buffered messages + expect(savedConn).not.toBeNull(); + const conn = /** @type {import('../../../../src/ports/WebSocketServerPort.js').WsConnection} */ (/** @type {unknown} */ (savedConn)); + conn.onMessage((/** @type {string} */ msg) => { received.push(msg); }); + expect(received).toEqual(['early-1', 'early-2']); + + // Subsequent messages go directly to handler + ws.send('late-1'); + await new Promise((r) => { setTimeout(r, 100); }); + expect(received).toEqual(['early-1', 'early-2', 'late-1']); + + ws.close(); + }); + + describe('with staticDir', () => { + /** @type {string} */ + let staticDir; + + beforeAll(async () => { + staticDir = await mkdtemp(join(tmpdir(), 'ws-static-')); + await writeFile(join(staticDir, 'index.html'), '

Hello

'); + await writeFile(join(staticDir, 'app.js'), 'console.log("ok")'); + }); + + afterAll(async () => { + await rm(staticDir, { recursive: true, force: true }); + }); + + it('serves static files over HTTP on the same port', async () => { + const adapter = new NodeWsAdapter({ staticDir }); + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + + const res = await fetch(`http://127.0.0.1:${addr.port}/`); + expect(res.status).toBe(200); + expect(res.headers.get('content-type')).toContain('text/html'); + const body = await res.text(); + expect(body).toContain('Hello'); + }); + + it('serves non-HTML static files with correct MIME', async () => { + const adapter = new NodeWsAdapter({ staticDir }); + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + + const res = await fetch(`http://127.0.0.1:${addr.port}/app.js`); + expect(res.status).toBe(200); + expect(res.headers.get('content-type')).toContain('text/javascript'); + }); + + it('still accepts WebSocket connections alongside HTTP', async () => { + const adapter = new NodeWsAdapter({ staticDir }); + server = adapter.createServer((conn) => { + conn.send('ws-hello'); + }); + const addr = await server.listen(0); + + const ws = new globalThis.WebSocket(`ws://127.0.0.1:${addr.port}`); + await new Promise((resolve, reject) => { + ws.onopen = resolve; + ws.onerror = reject; + }); + const msg = await new Promise((resolve) => { + ws.onmessage = (e) => resolve(e.data); + }); + expect(msg).toBe('ws-hello'); + ws.close(); + }); + + it('returns 404 for missing files with extension', async () => { + const adapter = new NodeWsAdapter({ staticDir }); + server = adapter.createServer(() => {}); + const addr = await server.listen(0); + + const res = await fetch(`http://127.0.0.1:${addr.port}/missing.css`); + expect(res.status).toBe(404); + }); + }); +}); diff --git a/test/unit/infrastructure/adapters/httpAdapterUtils.test.js b/test/unit/infrastructure/adapters/httpAdapterUtils.test.js new file mode 100644 index 00000000..e7ab7380 --- /dev/null +++ b/test/unit/infrastructure/adapters/httpAdapterUtils.test.js @@ -0,0 +1,131 @@ +import { describe, it, expect } from 'vitest'; +import { + readStreamBody, + toPortRequest, + MAX_BODY_BYTES, + ERROR_BODY, + ERROR_BODY_BYTES, + ERROR_BODY_LENGTH, + PAYLOAD_TOO_LARGE_BODY, + PAYLOAD_TOO_LARGE_BYTES, + PAYLOAD_TOO_LARGE_LENGTH, + noopLogger, +} from '../../../../src/infrastructure/adapters/httpAdapterUtils.js'; + +// ── helpers ────────────────────────────────────────────────────────────────── + +/** Build a ReadableStream from an array of Uint8Array chunks. */ +function streamFrom(/** @type {Uint8Array[]} */ chunks) { + return new ReadableStream({ + start(controller) { + for (const chunk of chunks) { + controller.enqueue(chunk); + } + controller.close(); + }, + }); +} + +// ── readStreamBody ─────────────────────────────────────────────────────────── + +describe('readStreamBody', () => { + it('returns undefined for an empty stream', async () => { + const result = await readStreamBody(streamFrom([])); + expect(result).toBeUndefined(); + }); + + it('concatenates a single chunk', async () => { + const data = new Uint8Array([1, 2, 3]); + const result = await readStreamBody(streamFrom([data])); + expect(result).toEqual(new Uint8Array([1, 2, 3])); + }); + + it('concatenates multiple chunks', async () => { + const a = new Uint8Array([10, 20]); + const b = new Uint8Array([30, 40, 50]); + const result = await readStreamBody(streamFrom([a, b])); + expect(result).toEqual(new Uint8Array([10, 20, 30, 40, 50])); + }); + + it('throws 413 when total exceeds MAX_BODY_BYTES', async () => { + // Stream two chunks that together exceed the limit. + const big = new Uint8Array(MAX_BODY_BYTES); + const extra = new Uint8Array([1]); + await expect(readStreamBody(streamFrom([big, extra]))) + .rejects.toMatchObject({ message: 'Payload Too Large', status: 413 }); + }); + + it('returns Uint8Array, not Buffer', async () => { + const result = await readStreamBody(streamFrom([new Uint8Array([0])])); + expect(result).toBeInstanceOf(Uint8Array); + // Ensure it's a plain Uint8Array, not a Buffer subclass. + expect(Object.getPrototypeOf(result)).toBe(Uint8Array.prototype); + }); +}); + +// ── toPortRequest ──────────────────────────────────────────────────────────── + +describe('toPortRequest', () => { + it('converts a GET request with no body', async () => { + const req = new Request('http://localhost:3000/foo?bar=1', { method: 'GET' }); + const result = await toPortRequest(req); + expect(result.method).toBe('GET'); + expect(result.url).toBe('/foo?bar=1'); + expect(result.body).toBeUndefined(); + }); + + it('converts a POST request with a body', async () => { + const payload = JSON.stringify({ hello: 'world' }); + const req = new Request('http://localhost:3000/api', { + method: 'POST', + body: payload, + headers: { 'content-type': 'application/json' }, + }); + const result = await toPortRequest(req); + expect(result.method).toBe('POST'); + expect(result.url).toBe('/api'); + expect(result.body).toBeInstanceOf(Uint8Array); + expect(new TextDecoder().decode(result.body)).toBe(payload); + expect(result.headers['content-type']).toBe('application/json'); + }); + + it('rejects POST when content-length exceeds limit', async () => { + const req = new Request('http://localhost:3000/api', { + method: 'POST', + body: 'x', + headers: { 'content-length': String(MAX_BODY_BYTES + 1) }, + }); + await expect(toPortRequest(req)) + .rejects.toMatchObject({ message: 'Payload Too Large', status: 413 }); + }); + + it('skips body reading for HEAD requests', async () => { + const req = new Request('http://localhost:3000/', { method: 'HEAD' }); + const result = await toPortRequest(req); + expect(result.body).toBeUndefined(); + }); +}); + +// ── constants ──────────────────────────────────────────────────────────────── + +describe('shared constants', () => { + it('MAX_BODY_BYTES is 10 MB', () => { + expect(MAX_BODY_BYTES).toBe(10 * 1024 * 1024); + }); + + it('error body constants are consistent', () => { + expect(ERROR_BODY).toBe('Internal Server Error'); + expect(new TextDecoder().decode(ERROR_BODY_BYTES)).toBe(ERROR_BODY); + expect(ERROR_BODY_LENGTH).toBe(String(ERROR_BODY_BYTES.byteLength)); + }); + + it('payload-too-large constants are consistent', () => { + expect(PAYLOAD_TOO_LARGE_BODY).toBe('Payload Too Large'); + expect(new TextDecoder().decode(PAYLOAD_TOO_LARGE_BYTES)).toBe(PAYLOAD_TOO_LARGE_BODY); + expect(PAYLOAD_TOO_LARGE_LENGTH).toBe(String(PAYLOAD_TOO_LARGE_BYTES.byteLength)); + }); + + it('noopLogger.error is a no-op', () => { + expect(() => { noopLogger.error(); }).not.toThrow(); + }); +}); diff --git a/test/unit/infrastructure/adapters/lazyCasInit.test.js b/test/unit/infrastructure/adapters/lazyCasInit.test.js new file mode 100644 index 00000000..2a3104c5 --- /dev/null +++ b/test/unit/infrastructure/adapters/lazyCasInit.test.js @@ -0,0 +1,68 @@ +import { describe, it, expect, vi } from 'vitest'; +import { createLazyCas } from '../../../../src/infrastructure/adapters/lazyCasInit.js'; + +describe('createLazyCas', () => { + it('caches a resolved promise across multiple calls', async () => { + const factory = vi.fn().mockResolvedValue('cas-instance'); + const getCas = createLazyCas(factory); + + const a = await getCas(); + const b = await getCas(); + + expect(a).toBe('cas-instance'); + expect(b).toBe('cas-instance'); + expect(factory).toHaveBeenCalledTimes(1); + }); + + it('resets on rejection so subsequent calls retry', async () => { + const factory = vi.fn() + .mockRejectedValueOnce(new Error('init failed')) + .mockResolvedValueOnce('recovered'); + + const getCas = createLazyCas(factory); + + await expect(getCas()).rejects.toThrow('init failed'); + const result = await getCas(); + + expect(result).toBe('recovered'); + expect(factory).toHaveBeenCalledTimes(2); + }); + + it('returns the same promise for concurrent callers', async () => { + /** @type {((value: string) => void) | undefined} */ + let resolveInit; + const factory = vi.fn().mockImplementation( + () => new Promise((resolve) => { resolveInit = resolve; }), + ); + const getCas = createLazyCas(factory); + + const p1 = getCas(); + const p2 = getCas(); + expect(p1).toBe(p2); + + if (!resolveInit) { throw new Error('resolveInit not assigned'); } + resolveInit('shared'); + expect(await p1).toBe('shared'); + expect(await p2).toBe('shared'); + expect(factory).toHaveBeenCalledTimes(1); + }); + + it('concurrent callers during rejection all see the error', async () => { + const factory = vi.fn() + .mockRejectedValueOnce(new Error('boom')) + .mockResolvedValueOnce('ok'); + + const getCas = createLazyCas(factory); + + const p1 = getCas(); + const p2 = getCas(); + // Both should reject with the same error + await expect(p1).rejects.toThrow('boom'); + await expect(p2).rejects.toThrow('boom'); + + // After rejection, a new call retries + const result = await getCas(); + expect(result).toBe('ok'); + expect(factory).toHaveBeenCalledTimes(2); + }); +}); diff --git a/test/unit/infrastructure/adapters/sha1sync.test.js b/test/unit/infrastructure/adapters/sha1sync.test.js new file mode 100644 index 00000000..76b53bf5 --- /dev/null +++ b/test/unit/infrastructure/adapters/sha1sync.test.js @@ -0,0 +1,50 @@ +import { describe, it, expect } from 'vitest'; +import { createHash } from 'node:crypto'; +import { sha1sync } from '../../../../src/infrastructure/adapters/sha1sync.js'; + +describe('sha1sync', () => { + it('matches node:crypto for empty input', () => { + const expected = createHash('sha1').update(Buffer.alloc(0)).digest('hex'); + expect(sha1sync(new Uint8Array(0))).toBe(expected); + }); + + it('matches node:crypto for "hello"', () => { + const data = new TextEncoder().encode('hello'); + const expected = createHash('sha1').update(data).digest('hex'); + expect(sha1sync(data)).toBe(expected); + }); + + it('matches node:crypto for a Git blob header', () => { + const content = 'hello world'; + const blob = `blob ${content.length}\0${content}`; + const data = new TextEncoder().encode(blob); + const expected = createHash('sha1').update(data).digest('hex'); + expect(sha1sync(data)).toBe(expected); + }); + + it('matches node:crypto for binary data', () => { + const data = new Uint8Array(256); + for (let i = 0; i < 256; i++) data[i] = i; + const expected = createHash('sha1').update(data).digest('hex'); + expect(sha1sync(data)).toBe(expected); + }); + + it('matches node:crypto for exactly 64-byte input (one block)', () => { + const data = new Uint8Array(64).fill(0x41); // 64 'A' bytes + const expected = createHash('sha1').update(data).digest('hex'); + expect(sha1sync(data)).toBe(expected); + }); + + it('matches node:crypto for multi-block input', () => { + const data = new Uint8Array(1000).fill(0xFF); + const expected = createHash('sha1').update(data).digest('hex'); + expect(sha1sync(data)).toBe(expected); + }); + + it('throws RangeError for inputs >= 512 MB', () => { + // Don't actually allocate 512 MB — verify the guard triggers based on length + const fakeHuge = { length: 0x20000000 }; + expect(() => sha1sync(/** @type {Uint8Array} */ (fakeHuge))).toThrow(RangeError); + expect(() => sha1sync(/** @type {Uint8Array} */ (fakeHuge))).toThrow('512 MB'); + }); +}); diff --git a/test/unit/infrastructure/adapters/staticFileHandler.test.js b/test/unit/infrastructure/adapters/staticFileHandler.test.js new file mode 100644 index 00000000..9ce1e772 --- /dev/null +++ b/test/unit/infrastructure/adapters/staticFileHandler.test.js @@ -0,0 +1,148 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { mkdtemp, writeFile, mkdir, rm, symlink } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { handleStaticRequest } from '../../../../src/infrastructure/adapters/staticFileHandler.js'; + +describe('handleStaticRequest', () => { + /** @type {string} */ + let root; + + beforeAll(async () => { + root = await mkdtemp(join(tmpdir(), 'static-test-')); + await mkdir(join(root, 'assets'), { recursive: true }); + await writeFile(join(root, 'index.html'), 'Hello'); + await writeFile(join(root, 'assets', 'app.js'), 'console.log("hi")'); + await writeFile(join(root, 'assets', 'style.css'), 'body { color: red }'); + await writeFile(join(root, 'data.json'), '{"ok":true}'); + await writeFile(join(root, 'image.png'), new Uint8Array([0x89, 0x50, 0x4e, 0x47])); + await writeFile(join(root, 'favicon.ico'), new Uint8Array([0x00])); + }); + + afterAll(async () => { + await rm(root, { recursive: true, force: true }); + }); + + it('serves index.html for /', async () => { + const result = await handleStaticRequest(root, '/'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toContain('text/html'); + expect(new TextDecoder().decode(/** @type {Uint8Array} */ (result.body))).toContain('Hello'); + }); + + it('serves index.html for trailing slash', async () => { + const result = await handleStaticRequest(root, '/subdir/'); + // subdir doesn't have index.html, so 404 + expect(result.status).toBe(404); + }); + + it('serves JS files with correct MIME type', async () => { + const result = await handleStaticRequest(root, '/assets/app.js'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toContain('text/javascript'); + expect(new TextDecoder().decode(/** @type {Uint8Array} */ (result.body))).toBe('console.log("hi")'); + }); + + it('serves CSS files with correct MIME type', async () => { + const result = await handleStaticRequest(root, '/assets/style.css'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toContain('text/css'); + }); + + it('serves JSON files with correct MIME type', async () => { + const result = await handleStaticRequest(root, '/data.json'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toContain('application/json'); + }); + + it('serves PNG files with correct MIME type', async () => { + const result = await handleStaticRequest(root, '/image.png'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toBe('image/png'); + }); + + it('serves ICO files with correct MIME type', async () => { + const result = await handleStaticRequest(root, '/favicon.ico'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toBe('image/x-icon'); + }); + + it('returns 404 for missing files with extension', async () => { + const result = await handleStaticRequest(root, '/missing.js'); + expect(result.status).toBe(404); + }); + + it('SPA fallback: serves index.html for extensionless paths', async () => { + const result = await handleStaticRequest(root, '/some/deep/route'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toContain('text/html'); + expect(new TextDecoder().decode(/** @type {Uint8Array} */ (result.body))).toContain('Hello'); + }); + + it('returns content-length header', async () => { + const result = await handleStaticRequest(root, '/data.json'); + expect(result.headers['content-length']).toBe('11'); + }); + + it('contains path traversal with .. inside root', async () => { + // resolve() normalizes ../.. to stay inside root — the file + // path becomes /etc/passwd. With extension .js it would 404; + // without extension, SPA fallback serves index.html. Either way, + // /etc/passwd is never read. + const result = await handleStaticRequest(root, '/../../../etc/passwd.js'); + expect(result.status).toBe(404); + }); + + it('blocks null bytes in path', async () => { + const result = await handleStaticRequest(root, '/index.html\0.js'); + expect(result.status).toBe(403); + }); + + it('blocks encoded null bytes in path', async () => { + const result = await handleStaticRequest(root, '/index.html%00.js'); + expect(result.status).toBe(403); + }); + + it('returns 403 for malformed percent-encoding', async () => { + const result = await handleStaticRequest(root, '/bad%ZZpath'); + expect(result.status).toBe(403); + }); + + it('contains encoded traversal inside root', async () => { + const result = await handleStaticRequest(root, '/%2e%2e/%2e%2e/etc/passwd.js'); + expect(result.status).toBe(404); + }); + + it('blocks symlinks pointing outside the static root', async () => { + // Create a temp file outside the static root + const outsideDir = await mkdtemp(join(tmpdir(), 'outside-')); + await writeFile(join(outsideDir, 'secret.txt'), 'sensitive data'); + // Create a symlink inside the static root pointing to the outside file + await symlink(join(outsideDir, 'secret.txt'), join(root, 'escape.txt')); + + const result = await handleStaticRequest(root, '/escape.txt'); + // The symlink target is outside root — must not serve it + expect(result.status).toBe(404); + + await rm(join(root, 'escape.txt')); + await rm(outsideDir, { recursive: true, force: true }); + }); + + it('allows symlinks that resolve within the static root', async () => { + // Symlink from link.html → index.html (both inside root) + await symlink(join(root, 'index.html'), join(root, 'link.html')); + + const result = await handleStaticRequest(root, '/link.html'); + expect(result.status).toBe(200); + expect(new TextDecoder().decode(/** @type {Uint8Array} */ (result.body))).toContain('Hello'); + + await rm(join(root, 'link.html')); + }); + + it('uses application/octet-stream for unknown extensions', async () => { + await writeFile(join(root, 'data.xyz'), 'binary'); + const result = await handleStaticRequest(root, '/data.xyz'); + expect(result.status).toBe(200); + expect(result.headers['content-type']).toBe('application/octet-stream'); + }); +}); diff --git a/test/unit/infrastructure/adapters/wsAdapterUtils.test.js b/test/unit/infrastructure/adapters/wsAdapterUtils.test.js new file mode 100644 index 00000000..73b27cdc --- /dev/null +++ b/test/unit/infrastructure/adapters/wsAdapterUtils.test.js @@ -0,0 +1,70 @@ +import { describe, it, expect } from 'vitest'; +import { + DEFAULT_HOST, + normalizeHost, + assertNotListening, + messageToString, +} from '../../../../src/infrastructure/adapters/wsAdapterUtils.js'; + +describe('wsAdapterUtils', () => { + describe('DEFAULT_HOST', () => { + it('is loopback', () => { + expect(DEFAULT_HOST).toBe('127.0.0.1'); + }); + }); + + describe('normalizeHost', () => { + it('returns the provided host when truthy', () => { + expect(normalizeHost('0.0.0.0')).toBe('0.0.0.0'); + expect(normalizeHost('::1')).toBe('::1'); + }); + + it('falls back to DEFAULT_HOST for empty string', () => { + expect(normalizeHost('')).toBe(DEFAULT_HOST); + }); + + it('falls back to DEFAULT_HOST for undefined', () => { + expect(normalizeHost(undefined)).toBe(DEFAULT_HOST); + }); + }); + + describe('assertNotListening', () => { + it('does nothing when server is null', () => { + expect(() => assertNotListening(null)).not.toThrow(); + }); + + it('does nothing when server is undefined', () => { + expect(() => assertNotListening(undefined)).not.toThrow(); + }); + + it('throws when server is truthy', () => { + expect(() => assertNotListening({})).toThrow('Server already listening'); + }); + }); + + describe('messageToString', () => { + it('passes strings through unchanged', () => { + expect(messageToString('hello')).toBe('hello'); + }); + + it('decodes Uint8Array to UTF-8', () => { + const bytes = new TextEncoder().encode('café'); + expect(messageToString(bytes)).toBe('café'); + }); + + it('decodes ArrayBuffer to UTF-8', () => { + const bytes = new TextEncoder().encode('test'); + expect(messageToString(bytes.buffer)).toBe('test'); + }); + + it('decodes Buffer (Node) to UTF-8', () => { + const buf = Buffer.from('node buffer'); + expect(messageToString(buf)).toBe('node buffer'); + }); + + it('merges Buffer[] fragments', () => { + const chunks = [Buffer.from('hello '), Buffer.from('world')]; + expect(messageToString(chunks)).toBe('hello world'); + }); + }); +}); diff --git a/test/unit/infrastructure/codecs/CborCodec.test.js b/test/unit/infrastructure/codecs/CborCodec.test.js index db88c27a..b3abfff3 100644 --- a/test/unit/infrastructure/codecs/CborCodec.test.js +++ b/test/unit/infrastructure/codecs/CborCodec.test.js @@ -139,7 +139,7 @@ describe('CborCodec', () => { const result1 = encode(obj); const result2 = encode(obj); - expect(result1.equals(result2)).toBe(true); + expect(result1).toEqual(result2); }); it('different key insertion order produces identical bytes', () => { @@ -152,8 +152,8 @@ describe('CborCodec', () => { const result2 = encode(obj2); const result3 = encode(obj3); - expect(result1.equals(result2)).toBe(true); - expect(result2.equals(result3)).toBe(true); + expect(result1).toEqual(result2); + expect(result2).toEqual(result3); }); it('produces identical bytes for complex nested structures', () => { @@ -172,7 +172,7 @@ describe('CborCodec', () => { const result1 = encode(obj1); const result2 = encode(obj2); - expect(result1.equals(result2)).toBe(true); + expect(result1).toEqual(result2); }); it('multiple encodes of deep nested structure are identical', () => { @@ -200,7 +200,7 @@ describe('CborCodec', () => { // All results should be identical for (let i = 1; i < results.length; i++) { - expect(results[0].equals(results[i])).toBe(true); + expect(results[0]).toEqual(results[i]); } }); @@ -211,7 +211,7 @@ describe('CborCodec', () => { const result1 = encode(obj1); const result2 = encode(obj2); - expect(result1.equals(result2)).toBe(false); + expect(result1).not.toEqual(result2); }); }); @@ -319,8 +319,8 @@ describe('CborCodec', () => { const enc3 = encode(obj3); // All encodings should be identical (deterministic) - expect(enc1.equals(enc2)).toBe(true); - expect(enc2.equals(enc3)).toBe(true); + expect(enc1).toEqual(enc2); + expect(enc2).toEqual(enc3); // Decoding should preserve the data const result = decode(enc1); @@ -345,7 +345,7 @@ describe('CborCodec', () => { const result = /** @type {any} */ (decode(encode(obj))); expect(result.data).toBeInstanceOf(Uint8Array); - expect(Buffer.from(result.data).equals(buffer)).toBe(true); + expect(Buffer.from(result.data)).toEqual(buffer); }); }); diff --git a/test/unit/ports/BlobStoragePort.test.js b/test/unit/ports/BlobStoragePort.test.js new file mode 100644 index 00000000..64b20e0e --- /dev/null +++ b/test/unit/ports/BlobStoragePort.test.js @@ -0,0 +1,14 @@ +import { describe, it, expect } from 'vitest'; +import BlobStoragePort from '../../../src/ports/BlobStoragePort.js'; + +describe('BlobStoragePort', () => { + it('store() throws not implemented', async () => { + const port = new BlobStoragePort(); + await expect(port.store(new Uint8Array())).rejects.toThrow('not implemented'); + }); + + it('retrieve() throws not implemented', async () => { + const port = new BlobStoragePort(); + await expect(port.retrieve('oid')).rejects.toThrow('not implemented'); + }); +}); diff --git a/test/unit/ports/WebSocketServerPort.test.js b/test/unit/ports/WebSocketServerPort.test.js new file mode 100644 index 00000000..aad8d85f --- /dev/null +++ b/test/unit/ports/WebSocketServerPort.test.js @@ -0,0 +1,9 @@ +import { describe, it, expect } from 'vitest'; +import WebSocketServerPort from '../../../src/ports/WebSocketServerPort.js'; + +describe('WebSocketServerPort', () => { + it('throws on direct call to createServer()', () => { + const port = new WebSocketServerPort(); + expect(() => port.createServer(() => {})).toThrow('not implemented'); + }); +}); diff --git a/vitest.config.js b/vitest.config.js index 9fb58f65..daae42a3 100644 --- a/vitest.config.js +++ b/vitest.config.js @@ -11,6 +11,11 @@ export default defineConfig({ '**/*.{test,spec}.?(c|m)[jt]s?(x)', '**/benchmark/*.benchmark.js', ], + exclude: [ + '**/node_modules/**', + '**/dist/**', + 'test/runtime/deno/**', + ], testTimeout: 60000, // 60s timeout for benchmark tests server: { deps: {