diff --git a/CHANGELOG.md b/CHANGELOG.md
index b6cd7120..aeb07b03 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,11 +7,122 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+### Fixed
+
+- **`_readPatchBlob` null-guard** — `readBlob()` returning null (corrupt/missing blob) now throws `PersistenceError` with `E_MISSING_OBJECT` instead of passing null to the CBOR decoder.
+- **`browser.d.ts` missing exports** — Added `WarpError`, `createVersionVector`, and `generateWriterId` type declarations to match `browser.js` runtime exports. Fixed `WarpGraph` re-export from default to named.
+- **`package.json` files array missing type declarations** — Added `browser.d.ts` and `sha1sync.d.ts` to the `files` array so npm consumers receive browser/sha1sync type definitions.
+- **`isLoopback()` wildcard address documentation** — Added JSDoc and test coverage to explicitly document that wildcard bind addresses (`0.0.0.0`, `::`, `0:0:0:0:0:0:0:0`) are not treated as loopback and require `--expose`.
+- **Browser and sha1sync subpath exports missing `types` field** — `package.json` `"./browser"` and `"./sha1sync"` exports now include `"types"` entries pointing to `browser.d.ts` and `sha1sync.d.ts`, enabling TypeScript resolution for subpath consumers.
+- **`jsr.json` missing `browser.js` in publish.include** — JSR consumers importing `@git-stunts/git-warp/browser` now receive the file.
+- **`git warp serve` help text missing `--port`, `--host`, `--expose` flags** — All serve-specific options now appear in `--help` output.
+- **`WarpServeService` non-integer seek ceiling** — Fractional ceilings (e.g. `3.5`) are now rejected with `E_INVALID_PAYLOAD`. `Infinity` is intentionally accepted (treated as head).
+- **`WarpServeService` oversized message guard** — Messages exceeding 1 MiB are rejected with `E_MESSAGE_TOO_LARGE` before `JSON.parse`, preventing OOM on malicious payloads.
+- **`WarpServeService` oversized property value guard** — Wildcard-typed mutation args exceeding 64 KiB are rejected with `E_INVALID_ARGS`.
+- **`SyncProtocol` / `WormholeService` null blob guard** — `readBlob()` / `retrieve()` results are now null-checked, throwing `PersistenceError(E_MISSING_OBJECT)` instead of passing `null` to the codec.
+- **`hexDecode` regex replaced with charCode loop** — Direct character code validation avoids regex backtracking on large inputs.
+- **WS adapter pre-handler message buffering** — Messages arriving before `onMessage(handler)` is called are now buffered and flushed when the handler is set. Prevents message loss in all WS adapters (Node, Bun, Deno) when connection setup is asynchronous.
+- **NodeWsAdapter `onError` callback** — Constructor now accepts an optional `onError` callback that surfaces runtime server errors instead of silently swallowing them.
+- **`wsAdapterUtils.messageToString()` TextDecoder reuse** — Hoisted `TextDecoder` to module level, avoiding per-call allocation.
+- **Static file handler response objects frozen** — `FORBIDDEN` and `NOT_FOUND` response constants are now `Object.freeze()`d to prevent accidental mutation.
+- **`sha1sync` comment clarification** — Updated misleading comment about the `>= 0x20000000` guard to explain it ensures `msg.length * 8` fits in uint32.
+- **`_broadcastDiff` Set mutation during iteration** — Deleting dead clients from `this._clients` mid-`for...of` could skip the next entry. Dead connections are now collected and evicted after the loop completes.
+- **Double-SIGINT re-entrancy in `serve` shutdown** — Rapid Ctrl+C fired `shutdown()` concurrently twice, racing `close()` and `process.exit()`. Added a `closing` guard.
+- **Catch-all error envelope double-parsing** — The last-resort `.catch()` on `_onMessage` re-parsed the raw JSON to extract the correlation `id`. The ID is now extracted before the async call, avoiding double-parse and ensuring availability even if the raw message was consumed.
+- **`WarpServeService` bare `Function` types** — Replaced loose `Function` JSDoc types in `resolveGraph`, constructor, and `_applyMutateOps` with a typed `GraphHandle` typedef carrying specific method signatures.
+- **`jsr.json` missing `./browser` and `./sha1sync` exports** — Subpath exports added to `package.json` were not mirrored in `jsr.json`. JSR consumers can now import both.
+- **`CasBlobAdapter` JSDoc `Buffer|Uint8Array`** — Narrowed `encryptionKey` type to `Uint8Array` per project convention.
+- **`WarpServeService.listen()` double-call guard** — Calling `listen()` twice no longer silently creates duplicate subscriptions. Second call throws `"Server is already listening"`.
+- **`WarpServeService.close()` dangling sockets** — Active WebSocket connections are now closed during shutdown instead of being silently abandoned.
+- **`WarpServeService._handleOpen()` premature openGraphs add** — Graph is now marked as open only after materialization succeeds, preventing stale entries on failure.
+- **`WarpServeService._applyMutateOps()` interleaved validation** — All ops in a batch are validated before `createPatch()` is called, avoiding wasted patch allocations on invalid input.
+- **`base64Decode` silent garbage acceptance** — Malformed base64 input now throws `RangeError` instead of silently decoding to wrong output.
+- **`NodeWsAdapter` state leak on failed start** — `listen()` failures now reset internal state (`_wss`, `_httpServer`), unblocking subsequent retry attempts.
+- **`isLoopback()` incomplete range** — Now recognizes the full `127.0.0.0/8` range, not just `127.0.0.1`.
+- **`buildSeekCacheKey` outside try/catch** — Cache key generation failure (e.g., crypto unavailable) is now caught and treated as a cache miss instead of breaking materialization.
+- **`BunWsAdapter` test `globalThis.Bun` leak** — Tests now save and restore the original `globalThis.Bun` instead of deleting it unconditionally.
+- **`vi.waitFor()` boolean callbacks in serve tests** — Replaced 22 boolean-returning callbacks with assertion-based ones to prevent premature resolution.
+- **`WarpServeService.listen()` leaked subscriptions on bind failure** — If `server.listen()` rejected (e.g., EADDRINUSE), graph subscriptions were already registered and never cleaned up, causing ghost broadcast handlers. `listen()` now defers `_server` assignment and subscription registration until bind succeeds, and cleans up on failure.
+- **`_onConnection` catch leaked internal error details** — The last-resort catch handler sent raw `err.message` (which could contain file paths, stack traces, etc.) to untrusted WebSocket clients. Now sends a generic `"Internal error"` message.
+- **`git warp serve` silent blob data loss** — Mutation ops like `attachContent` and `attachEdgeContent` are async (they write blobs), but `_applyMutateOps` was not awaiting them. `patch.commit()` could fire before the blob write completed. Now all ops are awaited.
+- **DenoWsAdapter port-0 resolution** — When binding to port 0 (OS-assigned), `onListen` resolved with the requested port (0) instead of the actual assigned port. Now reads `server.addr.port`, matching Node and Bun adapter behavior.
+- **Static file handler symlink traversal** — A symlink inside `staticDir` pointing outside the root could bypass `safePath()` and serve arbitrary files. `tryReadFile` now resolves symlinks with `realpath()` and re-checks the prefix before reading.
+- **`base64Encode` / `base64Decode` memory overhead** — Replaced intermediate binary string approach (`String.fromCharCode` / `charCodeAt` via `btoa`/`atob`) with direct table-based base64 encoding/decoding, eliminating memory spikes on large buffers (e.g., StreamingBitmapIndexBuilder shards).
+- **Static file handler null-byte bypass** — `safePath()` now re-checks for `\0` after `decodeURIComponent()` (prevents `%00` bypass) and catches malformed percent-encoding (e.g., `%ZZ`) instead of throwing.
+- **`git warp serve` writerId validation** — The auto-generated writerId (`serve:host:port`) contained colons, which are not allowed by `validateWriterId`. Now sanitizes to `serve-host-port` by replacing invalid characters with dashes.
+- **`git warp serve` port-0 writerId collision** — When binding to port 0 (OS-assigned ephemeral port), every invocation produced the same writerId `serve-127.0.0.1-0`. Now includes a timestamp and PID component (`ephemeral--`) to prevent collisions even across concurrent invocations in the same millisecond.
+- **`git warp serve` IPv6 URL bracketing** — IPv6 addresses like `::1` are now bracketed in WebSocket and HTTP URLs (`ws://[::1]:3000`) per RFC 3986.
+- **Inspector WebSocket default URL** — Hardcoded `ws://localhost:3000` replaced with `window.location`-derived URL, so `--static` serving on any port connects correctly without needing `?server=` param.
+- **JSDoc type annotations** — Resolved 39 pre-existing `tsc --noEmit` strict-mode errors across 17 source files. Added missing `encrypted`, `blobStorage`, and `patchBlobStorage` fields to JSDoc `@param`/`@typedef` types; created `WarpGraphWithMixins` typedef for mixin methods calling `_readPatchBlob`; installed `@types/ws` for Node WebSocket adapter; fixed `Uint8Array` assignability issues; narrowed `chunking.strategy` literal types for CAS adapters; added type annotations to callback parameters in WS adapters.
+- **Inspector: "Go live" after time-travel** — `setCeiling(Infinity)` now calls `socket.open()` to re-materialize at head instead of sending `seek` with no ceiling. The server also now accepts `Infinity` as a ceiling value (treating it as "materialize at head") for robustness.
+- **Inspector: localStorage persistence timing** — Server URL is now persisted to `localStorage` only after a successful connection, preventing a bad URL from locking users into a reconnect loop on reload.
+- **CasBlobAdapter error propagation** — `retrieve()` now uses `CasError.code` (`MANIFEST_NOT_FOUND`, `GIT_ERROR`) from `@git-stunts/git-cas` to identify legacy blob fallback cases, with message-based matching as a fallback for non-CasError exceptions. Previously used brittle string matching on all error messages.
+- **Dead `writerIds` code removed** — `WarpServeService` no longer stores per-session `writerIds` from `open` messages. The field was populated but never consumed — all mutations use the server's writer identity.
+- **`_broadcastDiff` dead-client resilience** — A single dead WebSocket connection in `_broadcastDiff` could abort the loop, preventing remaining subscribed clients from receiving the diff. Each `send()` is now wrapped in try/catch; dead connections are evicted.
+- **`attachContent`/`attachEdgeContent` wire validation** — Mutation arg validation now requires string content for `attachContent` and `attachEdgeContent` over WebSocket JSON. Previously accepted any type via wildcard (`*`), but `Uint8Array` cannot survive JSON serialization.
+- **BunWsAdapter `close()` fire-and-forget** — `BunWsAdapter.close()` used `void server.stop()` and returned immediately. Now awaits the `stop()` promise, ensuring graceful shutdown.
+- **EncryptionError unused `code` option** — Removed `code` from the constructor options typedef. The error code is always `E_ENCRYPTED_PATCH`; the option was dead.
+- **CasBlobAdapter `Buffer.from` → `TextEncoder`** — Replaced `Buffer.from(content, 'utf8')` with `new TextEncoder().encode(content)` for consistency with the Uint8Array domain boundary.
+- **Crypto adapter hmac wrapping** — Replaced `new Uint8Array(result.buffer, result.byteOffset, result.byteLength)` with `new Uint8Array(result)` in both `defaultCrypto` and `NodeCryptoAdapter.hmac()`, preventing shared ArrayBuffer pool aliasing.
+- **Test `Buffer` usage cleanup** — Replaced `Buffer.from()` in type-check consumer test and `Buffer.from(result.buffer)` in CasSeekCacheAdapter test with `TextEncoder`/`TextDecoder`.
+- **Duplicate `open()` in encryption test** — Consolidated redundant second `WarpGraph.open()` call in `WarpGraph.encryption.test.js` into a second assertion on the same promise.
+
+### Changed
+
+- **BREAKING: Uint8Array migration** — All domain-layer and port contract types narrowed from `Buffer|Uint8Array` to `Uint8Array`. Return types of `readBlob()`, `hmac()`, `serialize()`, `getContent()`, `getEdgeContent()`, and all bitmap index methods now return `Uint8Array` instead of `Buffer`. Downstream TypeScript consumers using Buffer-specific APIs (`.toString('hex')`, `.equals()`) on return values must migrate to `hexEncode()`/`textDecode()` from `domain/utils/bytes.js` and standard comparison operators. Buffer is now confined to infrastructure adapters only.
+- **`TrustCrypto` re-export shim deleted** — `src/domain/trust/TrustCrypto.js` (which re-exported from infrastructure) has been removed. Import directly from `src/infrastructure/adapters/TrustCryptoAdapter.js`. The domain layer no longer contains any infrastructure imports.
+- **`buildSeekCacheKey` is now async** — Replaced direct `node:crypto` import with domain-local `defaultCrypto.hash()`, eliminating a hexagonal boundary violation. Both call sites were already async.
+- **`process.stdout.columns` removed from visualization layer** — Terminal width is now injected from the CLI presenter (composition root). The visualization layer no longer references Node-only globals.
+- **HTTP adapter DRY cleanup** — Shared `toPortRequest()`, error body constants, and pre-encoded byte arrays extracted into `httpAdapterUtils.js`. BunHttpAdapter and DenoHttpAdapter now import from the shared module.
+- **Lazy CAS init extracted** — The duplicated lazy-promise-with-error-reset pattern in `CasBlobAdapter._getCas()` and `CasSeekCacheAdapter._getCas()` replaced with shared `createLazyCas()` factory in `lazyCasInit.js`.
+- **`computeRecordId()` and `verifyRecordId()` are now async** — These functions in `TrustCanonical.js` now use the injected `CryptoPort` instead of importing `node:crypto` directly. Callers must `await` the result.
+- **`hmac()` returns `Uint8Array`** — `NodeCryptoAdapter.hmac()`, `WebCryptoAdapter.hmac()`, and `defaultCrypto.hmac()` now return `Uint8Array` instead of `Buffer`. The raw HMAC digest bytes are identical; only the wrapper type changed.
+- **`@git-stunts/git-cas` v3.0.0 → v5.2.4** — Two major version jump. New capabilities now available: `ObservabilityPort` (replaces EventEmitter), streaming restore, CDC chunking (98.4% chunk reuse), envelope encryption (DEK/KEK), key rotation. No breaking changes for git-warp's usage — `CasSeekCacheAdapter` continues to work as-is.
+- **CDC chunking for seek cache** — `CasSeekCacheAdapter` now uses content-defined chunking (`CdcChunker`) instead of fixed-size chunking. Consecutive seek snapshots share most content; CDC's rolling-hash boundaries yield ~98.4% chunk reuse on incremental edits, significantly reducing Git object storage for the seek cache.
+- **Encrypted seek cache** — `CasSeekCacheAdapter` accepts an optional `encryptionKey` constructor param. When set, cached state snapshots are encrypted at rest using AES-256-GCM via git-cas.
+- **CAS observability bridge** — New `LoggerObservabilityBridge` adapter translates git-cas `ObservabilityPort` calls (metric, log, span) into git-warp `LoggerPort` calls. `CasSeekCacheAdapter` accepts an optional `logger` param to surface CAS operations through git-warp's structured logging.
+- **Blob attachments via CAS (B160)** — New `BlobStoragePort` and `CasBlobAdapter` provide a hexagonal abstraction for content blob storage. When `blobStorage` is injected, `attachContent()`/`attachEdgeContent()` store blobs via git-cas (CDC-chunked, optionally encrypted) instead of raw Git blobs. `getContent()`/`getEdgeContent()` retrieve via CAS with automatic fallback to raw Git blobs for backward compatibility with pre-CAS content.
+- **Streaming seek cache restore (B163)** — `CasSeekCacheAdapter.get()` now prefers `cas.restoreStream()` (git-cas v4+) for I/O pipelining — chunk reads overlap with buffer accumulation. Falls back to `cas.restore()` for older git-cas versions.
+- **Graph encryption at rest (B164)** — New `patchBlobStorage` option on `WarpGraph.open()`. When a `BlobStoragePort` (e.g. `CasBlobAdapter` with encryption key) is injected, patch CBOR is encrypted before writing to Git and decrypted on read. An `eg-encrypted: true` commit trailer marks encrypted patches. All 6 patch read sites and the write path are threaded. `EncryptionError` is thrown when attempting to read encrypted patches without a key. Mixed encrypted and unencrypted patches are fully supported — plain patches read via `persistence.readBlob()`, encrypted via `patchBlobStorage.retrieve()`.
+
### Added
+- **`--writer-id` flag for `git warp serve`** — Allows setting an explicit, stable writer identity instead of the auto-derived `serve--` value. Useful for reproducible testing and multi-instance orchestration where deterministic writer identities are needed.
+- **`src/domain/utils/bytes.js`** — Portable byte-manipulation utilities replacing Node.js Buffer methods: `hexEncode`, `hexDecode`, `base64Encode`, `base64Decode`, `concatBytes`, `textEncode`, `textDecode`. Works identically on Node, Bun, Deno, and browsers.
+- **ESLint `no-restricted-globals` for Buffer** — `Buffer` is now banned in `src/domain/**/*.js` via ESLint. Future regressions are caught at lint time.
+- **`git warp serve --expose` flag** — Binding to a non-loopback address now requires `--expose` to prevent accidental network exposure. Without the flag, the command exits with a usage error.
+- **`wsAdapterUtils.js`** — Shared utilities for WebSocket adapters (`normalizeHost`, `assertNotListening`, `messageToString`), following the `httpAdapterUtils.js` pattern. All three WS adapters (Bun, Deno, Node) now use these instead of duplicating host normalization, listen guards, and message decoding.
+- **Inspector: architecture pivot to WebSocket** — Rewired the Vue app from in-memory `WarpGraph` instances to a live WebSocket connection via `WarpSocket`. The browser now connects to `git warp serve` and views/edits a real Git-backed graph. Replaced the 4-viewport multi-writer demo with a single-viewport, single-connection model. All mutations go through `socket.mutate()` and state updates arrive via server-pushed diffs.
+- **Bun + Deno WebSocket adapters** — `git warp serve` now auto-detects the runtime and uses native WebSocket APIs on all three platforms. `BunWsAdapter` uses `Bun.serve()` with the `websocket` handler option; `DenoWsAdapter` uses `Deno.serve()` + `Deno.upgradeWebSocket()`. The `serve` CLI command dynamically imports only the relevant adapter via `createWsAdapter()`, so the `ws` npm package is never loaded on Bun/Deno.
+- **Static file serving** — `git warp serve --static ` serves a built SPA (or any static directory) over HTTP on the same port as the WebSocket server. Supports SPA client-side routing fallback, correct MIME types for common web assets, and path traversal protection.
+- **Browser-compatible `InMemoryGraphAdapter`** — Replaced hard `node:crypto` and `node:stream` imports with lazy-loaded fallbacks. A new `hash` constructor option lets callers inject a synchronous SHA-1 function for environments where `node:crypto` is unavailable (e.g. browsers). `node:stream` is now dynamically imported only in `logNodesStream()`.
+- **Browser-safe `defaultCrypto`** — The domain-level crypto default now lazy-loads `node:crypto` via top-level `await import()` with a try/catch, so importing `WarpGraph` in a browser no longer crashes at module evaluation time. Callers must inject a CryptoPort explicitly when `node:crypto` is unavailable.
+- **`sha1sync` utility** (`@git-stunts/git-warp/sha1sync`) — Minimal synchronous SHA-1 implementation (~110 LOC) for browser content addressing with `InMemoryGraphAdapter`. Not for security — only for Git object ID computation.
+- **`browser.js` entry point** (`@git-stunts/git-warp/browser`) — Curated re-export of browser-safe code: `WarpGraph`, `InMemoryGraphAdapter`, `WebCryptoAdapter`, CRDT primitives, errors, and `generateWriterId`. No `node:` imports in the critical path.
- **Documentation enhancements in README.md** — Added a high-level Documentation Map, a detailed Graph Traversal Directory, an expanded Time-Travel (Seek) guide, and updated Runtime Compatibility information (Node.js, Bun, Deno).
- **Local-First Applications use-case** — Added git-warp as a backend for LoFi software.
+### Removed
+
+- **Inspector extracted to standalone repo** — The Git WARP Inspector (formerly `demo/browsa/`) has been extracted to [git-stunts/git-warp-web-inspector](https://github.com/git-stunts/git-warp-web-inspector). The `demo/` directory, `test/unit/browsa/`, and `TASKS.md` have been removed from this repository.
+- **Inspector: scenario runner** — Removed `ScenarioPanel.vue` and all scenario infrastructure. Multi-writer scenarios don't apply to the single-connection WebSocket model.
+- **Inspector: in-memory sync** — Removed `InProcessSyncBus.js` and `InsecureCryptoAdapter.js`. No in-memory sync or browser-side crypto needed with the server-backed architecture.
+- **Inspector: multi-viewport grid** — Removed 4-viewport layout, sync buttons, and online/offline toggles. Multiple browser windows serve the multi-writer use case instead.
+- **Inspector: Vite stubs** — Removed `src/stubs/` directory (empty.js, node-crypto.js, node-stream.js, node-module.js), `trailerCodecBufferShim()` plugin, and all resolve aliases. The browser no longer imports git-warp — it communicates via WebSocket only.
+
+### Security
+
+- **WebSocket mutation op allowlist** — `WarpServeService._handleMutate` now validates mutation ops against `ALLOWED_MUTATE_OPS` (`addNode`, `removeNode`, `addEdge`, `removeEdge`, `setProperty`, `setEdgeProperty`, `attachContent`, `attachEdgeContent`). Previously, any method on the `PatchBuilderV2` prototype could be invoked by a WebSocket client, including internal methods.
+- **WebSocket mutation arg validation** — `WarpServeService._applyMutateOps` now validates argument count and types per-op before calling `patch[op](...args)`. Untrusted JSON args with wrong types or counts are rejected with `E_INVALID_ARGS`.
+- **Protocol payload validation** — All `WarpServeService` message handlers (`open`, `mutate`, `inspect`, `seek`) now validate incoming payloads for required fields and correct types before processing. Invalid payloads receive `E_INVALID_PAYLOAD` error envelopes.
+- **`hexDecode` input validation** — `hexDecode()` now throws `RangeError` on odd-length or non-hex input instead of silently coercing invalid characters to `0x00`.
+- **WarpSocket request timeout** — `WarpSocket._request()` now enforces a configurable timeout (default 30s). Pending requests that receive no server response reject with a timeout error instead of leaking forever.
+- **Vite `allowedHosts` scoped** — Inspector dev server no longer sets `allowedHosts: true`. Restricted to `localhost` and `127.0.0.1` to prevent DNS rebinding.
+
+### Documentation
+
+- **README `git warp serve` flags** — Added `--expose` and `--writer-id` to the CLI usage example.
+
## [13.1.0] - 2026-03-04
### Added
diff --git a/README.md b/README.md
index e0e53378..a64a9311 100644
--- a/README.md
+++ b/README.md
@@ -550,6 +550,9 @@ git warp history --writer alice
# Check graph health, status, and GC metrics
git warp check
+
+# Start WebSocket server for browser viewer
+git warp serve [--port 3000] [--host 127.0.0.1] [--static ] [--expose] [--writer-id ]
```
### Time-Travel (Seek)
@@ -591,6 +594,12 @@ All commands accept `--repo ` to target a specific Git repository, `--json
+### Git WARP Inspector
+
+The [Git WARP Inspector](https://github.com/git-stunts/git-warp-web-inspector) is an interactive browser-based graph viewer that connects to a live `git warp serve` instance over WebSocket. It renders graphs using ELK layout, supports time-travel via seek, and shows real-time diffs as the graph changes.
+
+See the [git-warp-web-inspector](https://github.com/git-stunts/git-warp-web-inspector) repository for setup and development instructions.
+
## Architecture
```mermaid
diff --git a/ROADMAP.md b/ROADMAP.md
index cfa2816b..347c537d 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -1,6 +1,6 @@
# ROADMAP — @git-stunts/git-warp
-> **Current version:** v13.0.0
+> **Current version:** v14.0.0
> **Last reconciled:** 2026-03-04 (priority triage: 45 standalone items sorted into P0–P6 tiers, wave-based execution order, dependency chains mapped)
> **Completed milestones:** [docs/ROADMAP/COMPLETED.md](docs/ROADMAP/COMPLETED.md)
@@ -189,10 +189,13 @@ No dependencies. Do these first.
### P1 — Correctness & Test Infrastructure
-B36 and B37 improve velocity for all future test work — do them early. B19 + B22 batch as one PR (Conformance Property Pack).
+B36 and B37 improve velocity for all future test work — do them early. B19 + B22 batch as one PR (Conformance Property Pack). B165 and B167 completed (Defensive Hardening Sprint); B166 remains.
| ID | Item | Effort |
|----|------|--------|
+| B165 | ✅ **WARPSERVESERVICE `listen()` DEFERRED STATE MUTATION** — `listen()` now defers `_server` assignment and subscription registration until bind succeeds; on failure, cleans up subscriptions. `_onConnection` catch now sends generic `"Internal error"` instead of raw `err.message`. **File:** `src/domain/services/WarpServeService.js` | S |
+| B166 | **ADAPTER CLEANUP CONTRACTS** — `NodeWsAdapter.close()` doesn't reset `state.wss`/`state.httpServer`/remove listeners after shutdown; `listenWithHttp` error path leaks partial state. **File:** `src/infrastructure/adapters/NodeWsAdapter.js` | M |
+| B167 | ✅ **SERVE TEST COVERAGE GAPS** — Added tests for: listen-failure cleanup (leaked subscriptions), double-listen guard, error sanitization (no internal detail leakage), `attachContent`/`attachEdgeContent` smoke tests through mutation pipeline. **File:** `test/unit/domain/services/WarpServeService.test.js` | S |
| B36 | **FLUENT STATE BUILDER FOR TESTS** — `StateBuilder` helper replacing manual `WarpStateV5` literals | M |
| B37 | **SHARED MOCK PERSISTENCE FIXTURE** — dedup `createMockPersistence()` across trust test files | S |
| B48 | **ESLINT BAN `= {}` CONSTRUCTOR DEFAULTS WITH REQUIRED PARAMS** — catches the pattern where `= {}` silently makes required options optional at the type level (found in CommitDagTraversalService, DagTraversal, DagPathFinding, DagTopology, BitmapIndexReader) | S |
@@ -250,6 +253,7 @@ No hard dependencies. Pick up opportunistically after P2.
|----|------|--------|
| B155 | **`levels()` AS LIGHTWEIGHT `--view` LAYOUT** — `levels()` is exactly the Y-axis assignment a layered DAG layout needs. For simple DAGs, `levels()` + left-to-right X sweep could produce clean layouts without the 2.5MB ELK import. Offer `--view --layout=levels` as an instant rendering mode, reserving ELK for complex graphs. **Files:** `src/visualization/layouts/`, `bin/cli/commands/view.js` | M |
| B156 | **STRUCTURAL DIFF VIA TRANSITIVE REDUCTION** — compute `transitiveReduction(stateA)` vs `transitiveReduction(stateB)` to produce a compact structural diff that strips implied edges and shows only "load-bearing" changes. Natural fit for H1 (Time-Travel Delta Engine) as `warp diff --mode=structural`. | L |
+| B157 | ✅ **BROWSER COMPATIBILITY (Phase 1-3)** — Make `InMemoryGraphAdapter` and `defaultCrypto` browser-safe by lazy-loading `node:crypto`/`node:stream`. New `sha1sync` utility for browser content addressing. New `browser.js` entry point and `./browser`+`./sha1sync` package exports. | M |
### P6 — Documentation & Process
@@ -267,6 +271,20 @@ Low urgency. Fold into PRs that already touch related files.
| B129 | **CONTRIBUTOR REVIEW-LOOP HYGIENE GUIDE** — add section to `CONTRIBUTING.md` covering commit sizing, CodeRabbit cooldown strategy, and when to request bot review. From BACKLOG 2026-02-27. | S |
| B147 | **RFC FIELD COUNT DRIFT DETECTOR** — script that counts WarpGraph instance fields (grep `this._` in constructor) and warns if design RFC field counts diverge. Prevents stale numbers in `warpgraph-decomposition.md`. From B145 PR review. **Depends on:** B143 RFC (exists) | S |
+### P7 — git-cas Modernization
+
+Upgrade from `@git-stunts/git-cas` v3.0.0 to v5.2.4 and leverage new capabilities. Currently git-warp only uses git-cas for the seek cache (`CasSeekCacheAdapter`). The v4.x/v5.x releases add ObservabilityPort, streaming restore, CDC chunking (98.4% chunk reuse on edits), envelope encryption (DEK/KEK), and key rotation.
+
+| ID | Item | Effort |
+|----|------|--------|
+| B158 | ✅ **UPGRADE `@git-stunts/git-cas` TO v5** — bumped `^3.0.0` → `^5.2.4`. 4872 tests pass, zero regressions. | S |
+| B159 | ✅ **CDC CHUNKING FOR SEEK CACHE** — `CasSeekCacheAdapter._initCas()` now constructs CAS with `chunking: { strategy: 'cdc' }`. ~98% chunk reuse on incremental snapshots. | S |
+| B160 | ✅ **BLOB ATTACHMENTS VIA CAS** — New `BlobStoragePort` + `CasBlobAdapter` provide a hexagonal abstraction for content blob storage. `PatchBuilderV2.attachContent()`/`attachEdgeContent()` use CAS (chunked, CDC-deduped, optionally encrypted) when `blobStorage` is injected; fall back to raw `persistence.writeBlob()` without it. `getContent()`/`getEdgeContent()` retrieve via `blobStorage.retrieve()` with automatic fallback to raw Git blobs for pre-CAS content. Wired through `WarpGraph`, `Writer`, and all patch creation paths. 16 new tests (4909 total). | M |
+| B161 | ✅ **ENCRYPTED SEEK CACHE** — `CasSeekCacheAdapter` now accepts optional `encryptionKey` constructor param. When set, all `store()` and `restore()` calls pass the key to git-cas for AES-256-GCM encryption/decryption. 6 new tests (52 total). | S |
+| B162 | ✅ **OBSERVABILITY ALIGNMENT** — new `LoggerObservabilityBridge` adapter translates git-cas `ObservabilityPort` calls (metric, log, span) into git-warp `LoggerPort` calls. `CasSeekCacheAdapter` accepts optional `logger` param; when provided, CAS operations surface through git-warp's structured logging. 7 new bridge tests + 2 adapter tests. | M |
+| B163 | ✅ **STREAMING RESTORE FOR LARGE STATES** — `CasSeekCacheAdapter.get()` now prefers `cas.restoreStream()` (git-cas v4+) for I/O pipelining, accumulating chunks via async iterator. Falls back to `cas.restore()` for older git-cas. 2 new tests (58 total). | M |
+| B164 | ✅ **GRAPH ENCRYPTION AT REST** — New `patchBlobStorage` option on `WarpGraph.open()`. When a `BlobStoragePort` (e.g. `CasBlobAdapter` with encryption key) is injected, patch CBOR is written/read via CAS instead of raw Git blobs. `eg-encrypted: true` trailer marks encrypted commits. All 6 read sites + write path threaded. `EncryptionError` thrown when reading encrypted patches without key. Mixed encrypted/plain patches supported via backward-compatible fallback. 14 new tests (4969 total). | L |
+
### Uncategorized / Platform
| ID | Item | Effort |
@@ -352,6 +370,16 @@ Internal chain: B97 (P0, Wave 1) → B85 → B57. B123 is the largest — may sp
18. **B156** — structural diff (if H1 is in play)
19. Docs/process items (B34, B35, B76, B79, B102–B104, B129, B147) folded into related PRs
+#### Wave 7: git-cas Modernization (P7)
+
+20. **B158** — upgrade `@git-stunts/git-cas` to v5 (unblocks all P7 items)
+21. **B159** — CDC chunking for seek cache (quick win after B158)
+22. **B161** — encrypted seek cache
+23. **B160** — blob attachments via CAS
+24. **B162** — observability alignment
+25. **B163** — streaming restore for large states
+26. **B164** — graph encryption at rest (largest, last)
+
### Dependency Chains
```text
@@ -366,6 +394,13 @@ B154 (P0) ─────┘ adjList dedup (quick fix)
B151 (P4) ──→ B152 (P4) closure streaming → full async generator API
B36 (P1) ──→ (improves velocity for B99, B19, B22, future tests)
+
+B158 (P7) ──→ B159 (P7) CDC seek cache
+ ├──→ B160 (P7) blob attachments
+ ├──→ B161 (P7) encrypted seek cache
+ ├──→ B162 (P7) observability alignment
+ ├──→ B163 (P7) streaming restore
+ └──→ B164 (P7) graph encryption at rest
```
---
@@ -381,11 +416,11 @@ B36 (P1) ──→ (improves velocity for B99, B19, B22, future tests)
| **Milestone (M12)** | 18 | B66, B67, B70, B73, B75, B105–B115, B117, B118 |
| **Milestone (M13)** | 1 | B116 (internal: DONE; wire-format: DEFERRED) |
| **Milestone (M14)** | 16 | B130–B145 |
-| **Standalone** | 45 | B12, B19, B22, B28, B34–B37, B43, B48, B49, B53, B54, B57, B76, B79–B81, B83, B85–B88, B95–B99, B102–B104, B119, B123, B127–B129, B147, B149–B156 |
-| **Standalone (done)** | 29 | B26, B44, B46, B47, B50–B52, B55, B71, B72, B77, B78, B82, B84, B89–B94, B100, B120–B122, B124, B125, B126, B146, B148 |
+| **Standalone** | 46 | B12, B19, B22, B28, B34–B37, B43, B48, B49, B53, B54, B57, B76, B79–B81, B83, B85–B88, B95–B99, B102–B104, B119, B123, B127–B129, B147, B149–B156, B166 |
+| **Standalone (done)** | 39 | B26, B44, B46, B47, B50–B52, B55, B71, B72, B77, B78, B82, B84, B89–B94, B100, B120–B122, B124, B125, B126, B146, B148, B157, B158, B159, B160, B161, B162, B163, B164, B165, B167 |
| **Deferred** | 7 | B4, B7, B16, B20, B21, B27, B101 |
| **Rejected** | 7 | B5, B6, B13, B17, B18, B25, B45 |
-| **Total tracked** | **133** total; 29 standalone done | |
+| **Total tracked** | **144** total; 39 standalone done | |
### STANK.md Cross-Reference
diff --git a/bin/cli/commands/registry.js b/bin/cli/commands/registry.js
index 5b04ae94..d925f8b3 100644
--- a/bin/cli/commands/registry.js
+++ b/bin/cli/commands/registry.js
@@ -15,6 +15,7 @@ import handleTrust from './trust.js';
import handlePatch from './patch.js';
import handleTree from './tree.js';
import handleBisect from './bisect.js';
+import handleServe from './serve.js';
/** @type {Map} */
export const COMMANDS = new Map(/** @type {[string, Function][]} */ ([
@@ -35,4 +36,5 @@ export const COMMANDS = new Map(/** @type {[string, Function][]} */ ([
['bisect', handleBisect],
['view', handleView],
['install-hooks', handleInstallHooks],
+ ['serve', handleServe],
]));
diff --git a/bin/cli/commands/seek.js b/bin/cli/commands/seek.js
index 916804b5..72a5b0da 100644
--- a/bin/cli/commands/seek.js
+++ b/bin/cli/commands/seek.js
@@ -1,5 +1,6 @@
import { summarizeOps } from '../../../src/visualization/renderers/ascii/history.js';
import { diffStates } from '../../../src/domain/services/StateDiff.js';
+import { textEncode } from '../../../src/domain/utils/bytes.js';
import {
buildCursorActiveRef,
buildCursorSavedRef,
@@ -68,7 +69,7 @@ async function readSavedCursor(persistence, graphName, name) {
async function writeSavedCursor(persistence, graphName, name, cursor) {
const ref = buildCursorSavedRef(graphName, name);
const json = JSON.stringify(cursor);
- const oid = await persistence.writeBlob(Buffer.from(json, 'utf8'));
+ const oid = await persistence.writeBlob(textEncode(json));
await persistence.updateRef(ref, oid);
}
diff --git a/bin/cli/commands/serve.js b/bin/cli/commands/serve.js
new file mode 100644
index 00000000..032a9818
--- /dev/null
+++ b/bin/cli/commands/serve.js
@@ -0,0 +1,208 @@
+import process from 'node:process';
+import { resolve } from 'node:path';
+import { stat } from 'node:fs/promises';
+import { parseCommandArgs, usageError, notFoundError } from '../infrastructure.js';
+import { serveSchema } from '../schemas.js';
+import { createPersistence, listGraphNames } from '../shared.js';
+import WarpGraph from '../../../src/domain/WarpGraph.js';
+import WebCryptoAdapter from '../../../src/infrastructure/adapters/WebCryptoAdapter.js';
+import WarpServeService from '../../../src/domain/services/WarpServeService.js';
+
+/**
+ * Creates the appropriate WebSocket adapter for the current runtime.
+ *
+ * @param {string|null} [staticDir]
+ * @returns {Promise}
+ */
+async function createWsAdapter(staticDir) {
+ const opts = staticDir ? { staticDir } : {};
+ if (globalThis.Bun) {
+ const { default: BunWsAdapter } = await import(
+ '../../../src/infrastructure/adapters/BunWsAdapter.js'
+ );
+ return new BunWsAdapter(opts);
+ }
+ if (globalThis.Deno) {
+ const { default: DenoWsAdapter } = await import(
+ '../../../src/infrastructure/adapters/DenoWsAdapter.js'
+ );
+ return new DenoWsAdapter(opts);
+ }
+ const { default: NodeWsAdapter } = await import(
+ '../../../src/infrastructure/adapters/NodeWsAdapter.js'
+ );
+ return new NodeWsAdapter(opts);
+}
+
+/**
+ * Returns true when the host string resolves to the loopback interface.
+ *
+ * Wildcard addresses (`0.0.0.0`, `::`, `0:0:0:0:0:0:0:0`) bind to ALL
+ * interfaces — including public ones — and are intentionally NOT treated
+ * as loopback. {@link assertExposeSafety} will require `--expose` for them.
+ *
+ * @param {string} h
+ * @returns {boolean}
+ */
+function isLoopback(h) {
+ return h === '127.0.0.1' || h === '::1' || h === 'localhost' || h.startsWith('127.');
+}
+
+/** @typedef {import('../types.js').CliOptions} CliOptions */
+
+const SERVE_OPTIONS = {
+ port: { type: 'string', default: '3000' },
+ host: { type: 'string', default: '127.0.0.1' },
+ static: { type: 'string' },
+ expose: { type: 'boolean', default: false },
+ 'writer-id': { type: 'string' },
+};
+
+/**
+ * Opens WarpGraph instances for the specified graph names.
+ *
+ * @param {import('../types.js').Persistence} persistence
+ * @param {string[]} graphNames
+ * @param {string} writerId
+ * @returns {Promise>}
+ */
+async function openGraphs(persistence, graphNames, writerId) {
+ const graphs = [];
+ for (const graphName of graphNames) {
+ const graph = await WarpGraph.open({
+ persistence: /** @type {import('../../../src/domain/types/WarpPersistence.js').CorePersistence} */ (/** @type {unknown} */ (persistence)),
+ graphName,
+ writerId,
+ crypto: new WebCryptoAdapter(),
+ });
+ graphs.push(graph);
+ }
+ return graphs;
+}
+
+/**
+ * Resolve and validate the `--static` directory, if provided.
+ *
+ * @param {string|undefined} raw
+ * @returns {Promise}
+ */
+async function resolveStaticDir(raw) {
+ if (!raw) {
+ return null;
+ }
+ const dir = resolve(raw);
+ const st = await stat(dir).catch(() => null);
+ if (!st || !st.isDirectory()) {
+ throw usageError(`--static path is not a directory: ${raw}`);
+ }
+ return dir;
+}
+
+/**
+ * Determine which graphs to serve and validate the selection.
+ *
+ * @param {import('../types.js').Persistence} persistence
+ * @param {string|null} [graphOption]
+ * @returns {Promise}
+ */
+async function resolveTargetGraphs(persistence, graphOption) {
+ const graphNames = await listGraphNames(persistence);
+ if (graphNames.length === 0) {
+ throw usageError('No WARP graphs found in this repository');
+ }
+ if (graphOption && !graphNames.includes(graphOption)) {
+ throw notFoundError(`Graph not found: ${graphOption}`);
+ }
+ return graphOption ? [graphOption] : graphNames;
+}
+
+/**
+ * Build a unique writerId from the host and requested port.
+ * When port is 0 the OS assigns an ephemeral port, so a timestamp
+ * component prevents collisions across successive invocations.
+ *
+ * @param {string} host
+ * @param {number} port
+ * @returns {string}
+ */
+function deriveWriterId(host, port) {
+ const portLabel = port === 0
+ ? `ephemeral-${Date.now().toString(36)}-${process.pid}`
+ : String(port);
+ return `serve-${host}-${portLabel}`.replace(/[^A-Za-z0-9._-]/g, '-');
+}
+
+/**
+ * Bracket an IPv6 host for use in URLs.
+ *
+ * @param {string} h
+ * @returns {string}
+ */
+function bracketHost(h) {
+ return h.includes(':') ? `[${h}]` : h;
+}
+
+/**
+ * Guards against binding to a non-loopback address without --expose.
+ *
+ * @param {string} host
+ * @param {boolean} expose
+ */
+function assertExposeSafety(host, expose) {
+ if (!isLoopback(host) && !expose) {
+ throw usageError(
+ `Binding to non-loopback address '${host}' exposes the server to the network. ` +
+ 'Pass --expose to confirm this is intentional.',
+ );
+ }
+}
+
+/**
+ * Logs startup information to stderr.
+ *
+ * @param {{url: string, targetGraphs: string[], staticDir: string|null, urlHost: string, port: number}} info
+ */
+function logStartup({ url, targetGraphs, staticDir, urlHost, port }) {
+ process.stderr.write(`Listening on ${url}\n`);
+ process.stderr.write(`Serving graph(s): ${targetGraphs.join(', ')}\n`);
+ if (staticDir) {
+ process.stderr.write(`Serving static files from ${staticDir}\n`);
+ process.stderr.write(`Open http://${urlHost}:${port} in your browser\n`);
+ }
+}
+
+/**
+ * Handles the `serve` command: starts a WebSocket server exposing
+ * graph(s) in the repository for browser-based viewing and mutation.
+ *
+ * @param {{options: CliOptions, args: string[]}} params
+ * @returns {Promise<{payload: {url: string, host: string, port: number, graphs: string[]}, close: () => Promise}>}
+ */
+export default async function handleServe({ options, args }) {
+ const { values } = parseCommandArgs(args, SERVE_OPTIONS, serveSchema, { allowPositionals: false });
+ const { port, host, expose, writerId: explicitWriterId } = values;
+ assertExposeSafety(host, expose);
+
+ const staticDir = await resolveStaticDir(values.static);
+ const { persistence } = await createPersistence(options.repo);
+ const targetGraphs = await resolveTargetGraphs(persistence, options.graph);
+
+ const writerId = explicitWriterId || deriveWriterId(host, port);
+ const graphs = await openGraphs(persistence, targetGraphs, writerId);
+ const wsPort = await createWsAdapter(staticDir);
+ const service = new WarpServeService({ wsPort, graphs });
+ const addr = await service.listen(port, host);
+
+ const urlHost = bracketHost(addr.host);
+ const url = `ws://${urlHost}:${addr.port}`;
+ logStartup({ url, targetGraphs, staticDir, urlHost, port: addr.port });
+
+ return {
+ payload: { url, host: addr.host, port: addr.port, graphs: targetGraphs },
+ // WarpServeService.close() unsubscribes all graph subscriptions and
+ // shuts down the WebSocket server. WarpGraph/GitGraphAdapter hold no
+ // long-lived resources beyond in-memory state, so process exit is
+ // sufficient for their cleanup.
+ close: () => service.close(),
+ };
+}
diff --git a/bin/cli/infrastructure.js b/bin/cli/infrastructure.js
index bcd2f21e..3e257ce4 100644
--- a/bin/cli/infrastructure.js
+++ b/bin/cli/infrastructure.js
@@ -50,6 +50,12 @@ Commands:
patch Decode and inspect raw patches
tree ASCII tree traversal from root nodes
bisect Binary search for first bad patch in writer history
+ serve Start WebSocket server for browser-based graph viewer
+ --port Port to bind (default: 3000, 0 = OS-assigned)
+ --host Bind address (default: 127.0.0.1)
+ --expose Allow binding to non-loopback addresses
+ --static Serve static files (SPA) on the same port
+ --writer-id Explicit writer identity (default: derived from host:port)
view Interactive TUI graph browser (requires @git-stunts/git-warp-tui)
install-hooks Install post-merge git hook
@@ -154,7 +160,7 @@ export function notFoundError(message) {
return new CliError(message, { code: 'E_NOT_FOUND', exitCode: EXIT_CODES.NOT_FOUND });
}
-export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'doctor', 'materialize', 'seek', 'verify-audit', 'verify-index', 'reindex', 'trust', 'patch', 'tree', 'bisect', 'install-hooks', 'view'];
+export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'doctor', 'materialize', 'seek', 'verify-audit', 'verify-index', 'reindex', 'trust', 'patch', 'tree', 'bisect', 'install-hooks', 'serve', 'view'];
const BASE_OPTIONS = {
repo: { type: 'string', short: 'r' },
diff --git a/bin/cli/schemas.js b/bin/cli/schemas.js
index 773788db..67dfe39e 100644
--- a/bin/cli/schemas.js
+++ b/bin/cli/schemas.js
@@ -204,3 +204,21 @@ export const verifyIndexSchema = z.object({
// No command-level options; schema exists for forward compatibility
export const reindexSchema = z.object({}).strict();
+
+// ============================================================================
+// Serve
+// ============================================================================
+
+export const serveSchema = z.object({
+ port: z.coerce.number().int().min(0).max(65535).default(3000),
+ host: z.string().min(1).default('127.0.0.1'),
+ static: z.string().min(1, 'Missing value for --static').optional(),
+ expose: z.boolean().default(false),
+ 'writer-id': z.string().min(1, 'Missing value for --writer-id').regex(/^[A-Za-z0-9._-]+$/, 'writer-id must contain only [A-Za-z0-9._-]').optional(),
+}).strict().transform((val) => ({
+ port: val.port,
+ host: val.host,
+ static: val.static,
+ expose: val.expose,
+ writerId: val['writer-id'],
+}));
diff --git a/bin/cli/shared.js b/bin/cli/shared.js
index 7bfbd3c0..16e35d3a 100644
--- a/bin/cli/shared.js
+++ b/bin/cli/shared.js
@@ -3,6 +3,7 @@ import path from 'node:path';
import process from 'node:process';
import readline from 'node:readline';
import { execFileSync } from 'node:child_process';
+import { textEncode } from '../../src/domain/utils/bytes.js';
// @ts-expect-error — no type declarations for @git-stunts/plumbing
import GitPlumbing, { ShellRunnerFactory } from '@git-stunts/plumbing';
import WarpGraph from '../../src/domain/WarpGraph.js';
@@ -27,7 +28,7 @@ import { usageError, notFoundError } from './infrastructure.js';
export async function createPersistence(repoPath) {
const runner = ShellRunnerFactory.create();
const plumbing = new GitPlumbing({ cwd: repoPath, runner });
- const persistence = new GitGraphAdapter({ plumbing });
+ const persistence = /** @type {Persistence} */ (/** @type {unknown} */ (new GitGraphAdapter({ plumbing })));
const ping = await persistence.ping();
if (!ping.ok) {
throw usageError(`Repository not accessible: ${repoPath}`);
@@ -161,7 +162,7 @@ export async function readActiveCursor(persistence, graphName) {
export async function writeActiveCursor(persistence, graphName, cursor) {
const ref = buildCursorActiveRef(graphName);
const json = JSON.stringify(cursor);
- const oid = await persistence.writeBlob(Buffer.from(json, 'utf8'));
+ const oid = await persistence.writeBlob(textEncode(json));
await persistence.updateRef(ref, oid);
}
diff --git a/bin/cli/types.js b/bin/cli/types.js
index 54144106..c8550d05 100644
--- a/bin/cli/types.js
+++ b/bin/cli/types.js
@@ -4,8 +4,8 @@
* @property {(ref: string) => Promise} readRef
* @property {(ref: string, oid: string) => Promise} updateRef
* @property {(ref: string) => Promise} deleteRef
- * @property {(oid: string) => Promise} readBlob
- * @property {(buf: Buffer) => Promise} writeBlob
+ * @property {(oid: string) => Promise} readBlob
+ * @property {(buf: Uint8Array) => Promise} writeBlob
* @property {(sha: string) => Promise<{date?: string|null}>} getNodeInfo
* @property {(sha: string) => Promise} nodeExists
* @property {(sha: string, coverageSha: string) => Promise} isAncestor
diff --git a/bin/presenters/index.js b/bin/presenters/index.js
index 277b06d4..6de413e0 100644
--- a/bin/presenters/index.js
+++ b/bin/presenters/index.js
@@ -106,7 +106,7 @@ const VIEW_RENDERERS = new Map(/** @type {[string, function(unknown): string][]}
['info', renderInfoView],
['check', renderCheckView],
['history', renderHistoryView],
- ['path', renderPathView],
+ ['path', (/** @type {Parameters[0]} */ payload) => renderPathView(payload, { terminalWidth: process.stdout.columns })],
['materialize', renderMaterializeView],
['seek', renderSeekView],
]));
diff --git a/bin/warp-graph.js b/bin/warp-graph.js
index 39c5ee86..2f49da32 100755
--- a/bin/warp-graph.js
+++ b/bin/warp-graph.js
@@ -70,6 +70,26 @@ async function main() {
const format = options.ndjson ? 'ndjson' : options.json ? 'json' : 'text';
present(/** @type {Record} */ (normalized.payload), { format, command, view: /** @type {string | null | boolean} */ (options.view ?? null) });
}
+
+ // Long-running commands (e.g. serve) return a `close` function.
+ // Wait for SIGINT/SIGTERM instead of exiting immediately.
+ const close = result && typeof result === 'object' && 'close' in /** @type {Record} */ (result)
+ ? /** @type {() => Promise} */ (/** @type {Record} */ (result).close)
+ : null;
+
+ if (close) {
+ let closing = false;
+ const shutdown = async () => {
+ if (closing) { return; }
+ closing = true;
+ await close();
+ process.exit(EXIT_CODES.OK);
+ };
+ process.on('SIGINT', () => { shutdown().catch(() => process.exit(1)); });
+ process.on('SIGTERM', () => { shutdown().catch(() => process.exit(1)); });
+ return; // Keep the process alive
+ }
+
// Use process.exit() to avoid waiting for fire-and-forget I/O (e.g. seek cache writes).
process.exit(normalized.exitCode ?? EXIT_CODES.OK);
}
diff --git a/browser.d.ts b/browser.d.ts
new file mode 100644
index 00000000..2faea164
--- /dev/null
+++ b/browser.d.ts
@@ -0,0 +1,42 @@
+/**
+ * Browser entry point for @git-stunts/git-warp.
+ *
+ * Re-exports only browser-safe code — no node:crypto, node:stream,
+ * or @git-stunts/plumbing imports.
+ */
+
+export { default as WarpGraph } from './index';
+export {
+ GraphNode,
+ InMemoryGraphAdapter,
+ WebCryptoAdapter,
+ EncryptionError,
+ ForkError,
+ QueryError,
+ StorageError,
+ TraversalError,
+ SyncError,
+} from './index';
+
+/**
+ * Base error class for all WARP domain errors.
+ */
+export class WarpError extends Error {
+ readonly name: string;
+ readonly code: string;
+ readonly context: Record;
+ constructor(message: string, options?: { code?: string; context?: Record });
+}
+
+/**
+ * Creates an empty VersionVector (Map).
+ */
+export function createVersionVector(): Map;
+
+/**
+ * Generates a new canonical writer ID.
+ *
+ * @param options - Options with optional custom RNG for testing
+ * @returns A canonical writer ID (e.g., 'w_0123456789abcdefghjkmnpqrs')
+ */
+export function generateWriterId(options?: { randomBytes?: (n: number) => Uint8Array }): string;
diff --git a/browser.js b/browser.js
new file mode 100644
index 00000000..2d2094ef
--- /dev/null
+++ b/browser.js
@@ -0,0 +1,54 @@
+/**
+ * Browser entry point for @git-stunts/git-warp.
+ *
+ * Re-exports only browser-safe code — no node:crypto, node:stream,
+ * or @git-stunts/plumbing imports. Use with InMemoryGraphAdapter
+ * and WebCryptoAdapter for fully in-browser WARP graph operation.
+ *
+ * @module browser
+ *
+ * @example
+ * ```js
+ * import {
+ * WarpGraph,
+ * InMemoryGraphAdapter,
+ * WebCryptoAdapter,
+ * generateWriterId,
+ * } from '@git-stunts/git-warp/browser';
+ * import { sha1sync } from '@git-stunts/git-warp/sha1sync';
+ *
+ * const adapter = new InMemoryGraphAdapter({ hash: sha1sync });
+ * const crypto = new WebCryptoAdapter();
+ * const graph = await WarpGraph.open({
+ * persistence: adapter,
+ * graphName: 'demo',
+ * writerId: generateWriterId(),
+ * crypto,
+ * });
+ * ```
+ */
+
+// Core API
+export { default as WarpGraph } from './src/domain/WarpGraph.js';
+export { default as GraphNode } from './src/domain/entities/GraphNode.js';
+
+// Browser-compatible adapters
+export { default as InMemoryGraphAdapter } from './src/infrastructure/adapters/InMemoryGraphAdapter.js';
+export { default as WebCryptoAdapter } from './src/infrastructure/adapters/WebCryptoAdapter.js';
+
+// CRDT primitives
+export { createVersionVector } from './src/domain/crdt/VersionVector.js';
+
+// Errors
+export { default as WarpError } from './src/domain/errors/WarpError.js';
+export {
+ EncryptionError,
+ ForkError,
+ QueryError,
+ StorageError,
+ TraversalError,
+ SyncError,
+} from './src/domain/errors/index.js';
+
+// Utilities
+export { generateWriterId } from './src/domain/utils/WriterId.js';
diff --git a/contracts/type-surface.m8.json b/contracts/type-surface.m8.json
index eb89c7b4..06507b8b 100644
--- a/contracts/type-surface.m8.json
+++ b/contracts/type-surface.m8.json
@@ -1,8 +1,781 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
- "$comment": "M8 IRONCLAD type surface manifest — source of truth for T3 (index.d.ts) and T9 (consumer test)",
+ "$comment": "M8 IRONCLAD type surface manifest \u2014 source of truth for T3 (index.d.ts) and T9 (consumer test)",
"version": 1,
"exports": {
+ "AggregateResult": {
+ "kind": "interface"
+ },
+ "AggregateSpec": {
+ "kind": "interface"
+ },
+ "AncestorOptions": {
+ "kind": "interface"
+ },
+ "ApplySyncResult": {
+ "kind": "interface"
+ },
+ "BTR": {
+ "kind": "interface"
+ },
+ "BTRVerificationResult": {
+ "kind": "interface"
+ },
+ "BisectResult": {
+ "kind": "type"
+ },
+ "BisectService": {
+ "kind": "class"
+ },
+ "BitmapIndexBuilder": {
+ "kind": "class"
+ },
+ "BitmapIndexReader": {
+ "kind": "class"
+ },
+ "BlobStoragePort": {
+ "kind": "class"
+ },
+ "BulkNodeSpec": {
+ "kind": "interface"
+ },
+ "BunHttpAdapter": {
+ "kind": "class"
+ },
+ "CONTENT_PROPERTY_KEY": {
+ "kind": "const"
+ },
+ "ClockAdapter": {
+ "kind": "class"
+ },
+ "ClockPort": {
+ "kind": "abstract-class"
+ },
+ "CommitDagTraversalService": {
+ "kind": "class"
+ },
+ "CommonAncestorsOptions": {
+ "kind": "interface"
+ },
+ "ComposeWormholesOptions": {
+ "kind": "interface"
+ },
+ "ConsoleLogger": {
+ "kind": "class"
+ },
+ "CreateBTROptions": {
+ "kind": "interface"
+ },
+ "CreateNodeOptions": {
+ "kind": "interface"
+ },
+ "CreateWormholeOptions": {
+ "kind": "interface"
+ },
+ "CryptoPort": {
+ "kind": "abstract-class"
+ },
+ "DenoHttpAdapter": {
+ "kind": "class"
+ },
+ "EdgeChange": {
+ "kind": "interface"
+ },
+ "EdgeWeightFn": {
+ "kind": "type"
+ },
+ "EncryptionError": {
+ "kind": "class"
+ },
+ "EventId": {
+ "kind": "interface"
+ },
+ "ForkError": {
+ "kind": "class"
+ },
+ "GCExecuteResult": {
+ "kind": "interface"
+ },
+ "GCMetrics": {
+ "kind": "interface"
+ },
+ "GCPolicyConfig": {
+ "kind": "interface"
+ },
+ "GitGraphAdapter": {
+ "kind": "class"
+ },
+ "GitPlumbing": {
+ "kind": "interface"
+ },
+ "GraphNode": {
+ "kind": "class"
+ },
+ "GraphPersistencePort": {
+ "kind": "abstract-class"
+ },
+ "HealthCheckService": {
+ "kind": "class"
+ },
+ "HealthResult": {
+ "kind": "interface"
+ },
+ "HealthStatus": {
+ "kind": "const"
+ },
+ "HopOptions": {
+ "kind": "interface"
+ },
+ "HttpServerPort": {
+ "kind": "abstract-class"
+ },
+ "InMemoryGraphAdapter": {
+ "kind": "class"
+ },
+ "IndexError": {
+ "kind": "class"
+ },
+ "IndexHealth": {
+ "kind": "interface"
+ },
+ "IndexRebuildService": {
+ "kind": "class"
+ },
+ "IndexStoragePort": {
+ "kind": "abstract-class"
+ },
+ "IterateNodesOptions": {
+ "kind": "interface"
+ },
+ "JoinReceipt": {
+ "kind": "interface"
+ },
+ "ListNodesOptions": {
+ "kind": "interface"
+ },
+ "LoadOptions": {
+ "kind": "interface"
+ },
+ "LogLevel": {
+ "kind": "const"
+ },
+ "LogLevelValue": {
+ "kind": "type"
+ },
+ "LoggerPort": {
+ "kind": "abstract-class"
+ },
+ "LogicalTraversal": {
+ "kind": "interface"
+ },
+ "MaybeGCResult": {
+ "kind": "interface"
+ },
+ "NoOpLogger": {
+ "kind": "class"
+ },
+ "NodeCryptoAdapter": {
+ "kind": "class"
+ },
+ "NodeInfo": {
+ "kind": "interface"
+ },
+ "NodeWeightFn": {
+ "kind": "type"
+ },
+ "ObserverConfig": {
+ "kind": "interface"
+ },
+ "ObserverView": {
+ "kind": "class",
+ "instance": {
+ "hasNode": {
+ "async": true,
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "Promise"
+ },
+ "getNodes": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ },
+ "getNodeProps": {
+ "async": true,
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "Promise | null>"
+ },
+ "getEdges": {
+ "async": true,
+ "params": [],
+ "returns": "Promise }>>"
+ },
+ "query": {
+ "params": [],
+ "returns": "QueryBuilder"
+ }
+ },
+ "properties": {
+ "name": {
+ "type": "string",
+ "readonly": true
+ },
+ "traverse": {
+ "type": "LogicalTraversal"
+ }
+ }
+ },
+ "OpEdgeAdd": {
+ "kind": "interface"
+ },
+ "OpEdgeTombstone": {
+ "kind": "interface"
+ },
+ "OpNodeAdd": {
+ "kind": "interface"
+ },
+ "OpNodeTombstone": {
+ "kind": "interface"
+ },
+ "OpOutcome": {
+ "kind": "interface"
+ },
+ "OpPropSet": {
+ "kind": "interface"
+ },
+ "OperationAbortedError": {
+ "kind": "class"
+ },
+ "PatchBuilderV2": {
+ "kind": "class",
+ "note": "Not directly exported from index.js but referenced as return type of createPatch()",
+ "instance": {
+ "addNode": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "removeNode": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "addEdge": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "removeEdge": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "setProperty": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "key",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "type": "unknown"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "setEdgeProperty": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ },
+ {
+ "name": "key",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "type": "unknown"
+ }
+ ],
+ "returns": "PatchBuilderV2"
+ },
+ "attachContent": {
+ "async": true,
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "content",
+ "type": "Buffer | string"
+ }
+ ],
+ "returns": "Promise"
+ },
+ "attachEdgeContent": {
+ "async": true,
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ },
+ {
+ "name": "content",
+ "type": "Buffer | string"
+ }
+ ],
+ "returns": "Promise"
+ },
+ "build": {
+ "params": [],
+ "returns": "PatchV2"
+ },
+ "commit": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ }
+ },
+ "properties": {
+ "opCount": {
+ "type": "number",
+ "readonly": true
+ }
+ }
+ },
+ "PatchEntry": {
+ "kind": "interface"
+ },
+ "PatchError": {
+ "kind": "class"
+ },
+ "PatchSession": {
+ "kind": "class",
+ "instance": {
+ "addNode": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "this"
+ },
+ "removeNode": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
+ "returns": "this"
+ },
+ "addEdge": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
+ ],
+ "returns": "this"
+ },
+ "removeEdge": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
+ ],
+ "returns": "this"
+ },
+ "setProperty": {
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "key",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "type": "unknown"
+ }
+ ],
+ "returns": "this"
+ },
+ "setEdgeProperty": {
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ },
+ {
+ "name": "key",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "type": "unknown"
+ }
+ ],
+ "returns": "this"
+ },
+ "attachContent": {
+ "async": true,
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "content",
+ "type": "Buffer | string"
+ }
+ ],
+ "returns": "Promise"
+ },
+ "attachEdgeContent": {
+ "async": true,
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ },
+ {
+ "name": "content",
+ "type": "Buffer | string"
+ }
+ ],
+ "returns": "Promise"
+ },
+ "build": {
+ "params": [],
+ "returns": "PatchV2"
+ },
+ "commit": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ }
+ },
+ "properties": {
+ "opCount": {
+ "type": "number",
+ "readonly": true
+ }
+ }
+ },
+ "PatchV2": {
+ "kind": "interface"
+ },
+ "PathOptions": {
+ "kind": "interface"
+ },
+ "PathResult": {
+ "kind": "interface"
+ },
+ "PingResult": {
+ "kind": "interface"
+ },
+ "PropRemoved": {
+ "kind": "interface"
+ },
+ "PropSet": {
+ "kind": "interface"
+ },
+ "ProvenanceIndex": {
+ "kind": "class"
+ },
+ "ProvenancePayload": {
+ "kind": "class"
+ },
+ "QueryBuilder": {
+ "kind": "class",
+ "instance": {
+ "match": {
+ "params": [
+ {
+ "name": "pattern",
+ "type": "string"
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "where": {
+ "params": [
+ {
+ "name": "fn",
+ "type": "((node: QueryNodeSnapshot) => boolean) | Record"
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "outgoing": {
+ "params": [
+ {
+ "name": "label",
+ "type": "string",
+ "optional": true
+ },
+ {
+ "name": "options",
+ "type": "HopOptions",
+ "optional": true
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "incoming": {
+ "params": [
+ {
+ "name": "label",
+ "type": "string",
+ "optional": true
+ },
+ {
+ "name": "options",
+ "type": "HopOptions",
+ "optional": true
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "select": {
+ "params": [
+ {
+ "name": "fields",
+ "type": "Array<'id' | 'props'>",
+ "optional": true
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "aggregate": {
+ "params": [
+ {
+ "name": "spec",
+ "type": "AggregateSpec"
+ }
+ ],
+ "returns": "QueryBuilder"
+ },
+ "run": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ }
+ }
+ },
+ "QueryError": {
+ "kind": "class"
+ },
+ "QueryNodeSnapshot": {
+ "kind": "interface"
+ },
+ "QueryResultV1": {
+ "kind": "interface"
+ },
+ "RebuildOptions": {
+ "kind": "interface"
+ },
+ "RepositoryHealth": {
+ "kind": "interface"
+ },
+ "SchemaUnsupportedError": {
+ "kind": "class"
+ },
+ "SeekCachePort": {
+ "kind": "abstract-class"
+ },
+ "ShardCorruptionError": {
+ "kind": "class"
+ },
+ "ShardLoadError": {
+ "kind": "class"
+ },
+ "ShardValidationError": {
+ "kind": "class"
+ },
+ "StateDiffResult": {
+ "kind": "interface"
+ },
+ "StorageError": {
+ "kind": "class"
+ },
+ "SyncAuthClientOptions": {
+ "kind": "interface"
+ },
+ "SyncAuthServerOptions": {
+ "kind": "interface"
+ },
+ "SyncError": {
+ "kind": "class"
+ },
+ "SyncRequest": {
+ "kind": "interface"
+ },
+ "SyncResponse": {
+ "kind": "interface"
+ },
+ "TICK_RECEIPT_OP_TYPES": {
+ "kind": "const"
+ },
+ "TICK_RECEIPT_RESULT_TYPES": {
+ "kind": "const"
+ },
+ "TemporalNodeSnapshot": {
+ "kind": "interface"
+ },
+ "TemporalQuery": {
+ "kind": "interface"
+ },
+ "TickReceipt": {
+ "kind": "interface"
+ },
+ "TickReceiptOpType": {
+ "kind": "type"
+ },
+ "TickReceiptResult": {
+ "kind": "type"
+ },
+ "TopologicalSortOptions": {
+ "kind": "interface"
+ },
+ "TranslationCostBreakdown": {
+ "kind": "interface"
+ },
+ "TranslationCostResult": {
+ "kind": "interface"
+ },
+ "TraversalDirection": {
+ "kind": "type"
+ },
+ "TraversalError": {
+ "kind": "class"
+ },
+ "TraversalNode": {
+ "kind": "interface"
+ },
+ "TraversalOptions": {
+ "kind": "interface"
+ },
+ "TraversalService": {
+ "kind": "class",
+ "deprecated": true,
+ "alias": "CommitDagTraversalService"
+ },
+ "TraverseFacadeOptions": {
+ "kind": "interface"
+ },
+ "ValueRef": {
+ "kind": "type"
+ },
+ "ValueRefBlob": {
+ "kind": "interface"
+ },
+ "ValueRefInline": {
+ "kind": "interface"
+ },
+ "VerifyBTROptions": {
+ "kind": "interface"
+ },
"WarpGraph": {
"kind": "class",
"default": true,
@@ -27,85 +800,166 @@
"patch": {
"async": true,
"params": [
- { "name": "build", "type": "(patch: PatchBuilderV2) => void | Promise" }
+ {
+ "name": "build",
+ "type": "(patch: PatchBuilderV2) => void | Promise"
+ }
],
"returns": "Promise"
},
"materialize": {
"async": true,
"params": [
- { "name": "options", "type": "{ receipts?: boolean; ceiling?: number }", "optional": true }
+ {
+ "name": "options",
+ "type": "{ receipts?: boolean; ceiling?: number }",
+ "optional": true
+ }
],
"returns": "Promise",
"overloads": [
{
- "params": [{ "name": "options", "type": "{ receipts: true; ceiling?: number }" }],
+ "params": [
+ {
+ "name": "options",
+ "type": "{ receipts: true; ceiling?: number }"
+ }
+ ],
"returns": "Promise<{ state: WarpStateV5; receipts: TickReceipt[] }>"
},
{
- "params": [{ "name": "options", "type": "{ receipts?: false; ceiling?: number }", "optional": true }],
+ "params": [
+ {
+ "name": "options",
+ "type": "{ receipts?: false; ceiling?: number }",
+ "optional": true
+ }
+ ],
"returns": "Promise"
}
]
},
"materializeAt": {
"async": true,
- "params": [{ "name": "checkpointSha", "type": "string" }],
+ "params": [
+ {
+ "name": "checkpointSha",
+ "type": "string"
+ }
+ ],
"returns": "Promise"
},
"hasNode": {
"async": true,
- "params": [{ "name": "nodeId", "type": "string" }],
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
"returns": "Promise"
},
"getNodeProps": {
"async": true,
- "params": [{ "name": "nodeId", "type": "string" }],
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
"returns": "Promise | null>"
},
"getEdgeProps": {
"async": true,
"params": [
- { "name": "from", "type": "string" },
- { "name": "to", "type": "string" },
- { "name": "label", "type": "string" }
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
],
"returns": "Promise | null>"
},
"getContentOid": {
"async": true,
- "params": [{ "name": "nodeId", "type": "string" }],
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
"returns": "Promise"
},
"getContent": {
"async": true,
- "params": [{ "name": "nodeId", "type": "string" }],
+ "params": [
+ {
+ "name": "nodeId",
+ "type": "string"
+ }
+ ],
"returns": "Promise"
},
"getEdgeContentOid": {
"async": true,
"params": [
- { "name": "from", "type": "string" },
- { "name": "to", "type": "string" },
- { "name": "label", "type": "string" }
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
],
"returns": "Promise"
},
"getEdgeContent": {
"async": true,
"params": [
- { "name": "from", "type": "string" },
- { "name": "to", "type": "string" },
- { "name": "label", "type": "string" }
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ }
],
"returns": "Promise"
},
"neighbors": {
"async": true,
"params": [
- { "name": "nodeId", "type": "string" },
- { "name": "direction", "type": "'outgoing' | 'incoming' | 'both'", "optional": true },
- { "name": "edgeLabel", "type": "string", "optional": true }
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "direction",
+ "type": "'outgoing' | 'incoming' | 'both'",
+ "optional": true
+ },
+ {
+ "name": "edgeLabel",
+ "type": "string",
+ "optional": true
+ }
],
"returns": "Promise>"
},
@@ -157,16 +1011,28 @@
"observer": {
"async": true,
"params": [
- { "name": "name", "type": "string" },
- { "name": "config", "type": "ObserverConfig" }
+ {
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "config",
+ "type": "ObserverConfig"
+ }
],
"returns": "Promise"
},
"translationCost": {
"async": true,
"params": [
- { "name": "configA", "type": "ObserverConfig" },
- { "name": "configB", "type": "ObserverConfig" }
+ {
+ "name": "configA",
+ "type": "ObserverConfig"
+ },
+ {
+ "name": "configB",
+ "type": "ObserverConfig"
+ }
],
"returns": "Promise"
},
@@ -183,8 +1049,15 @@
"syncWith": {
"async": true,
"params": [
- { "name": "remote", "type": "string | WarpGraph" },
- { "name": "options", "type": "SyncWithOptions", "optional": true }
+ {
+ "name": "remote",
+ "type": "string | WarpGraph"
+ },
+ {
+ "name": "options",
+ "type": "SyncWithOptions",
+ "optional": true
+ }
],
"returns": "Promise<{ applied: number; attempts: number; skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>; state?: WarpStateV5 }>"
},
@@ -201,35 +1074,65 @@
"createWormhole": {
"async": true,
"params": [
- { "name": "fromSha", "type": "string" },
- { "name": "toSha", "type": "string" }
+ {
+ "name": "fromSha",
+ "type": "string"
+ },
+ {
+ "name": "toSha",
+ "type": "string"
+ }
],
"returns": "Promise"
},
"patchesFor": {
"async": true,
- "params": [{ "name": "entityId", "type": "string" }],
+ "params": [
+ {
+ "name": "entityId",
+ "type": "string"
+ }
+ ],
"returns": "Promise"
},
"materializeSlice": {
"async": true,
"params": [
- { "name": "nodeId", "type": "string" },
- { "name": "options", "type": "{ receipts?: boolean }", "optional": true }
+ {
+ "name": "nodeId",
+ "type": "string"
+ },
+ {
+ "name": "options",
+ "type": "{ receipts?: boolean }",
+ "optional": true
+ }
],
"returns": "Promise<{ state: WarpStateV5; patchCount: number; receipts?: TickReceipt[] }>"
},
"getWriterPatches": {
"async": true,
"params": [
- { "name": "writerId", "type": "string" },
- { "name": "stopAtSha", "type": "string | null", "optional": true }
+ {
+ "name": "writerId",
+ "type": "string"
+ },
+ {
+ "name": "stopAtSha",
+ "type": "string | null",
+ "optional": true
+ }
],
"returns": "Promise>"
},
"join": {
"async": false,
- "params": [{ "name": "otherState", "type": "WarpStateV5" }],
+ "params": [
+ {
+ "name": "otherState",
+ "type": "WarpStateV5"
+ }
+ ],
"returns": "{ state: WarpStateV5; receipt: JoinReceipt }"
},
"subscribe": {
@@ -245,7 +1148,10 @@
"watch": {
"async": false,
"params": [
- { "name": "pattern", "type": "string" },
+ {
+ "name": "pattern",
+ "type": "string"
+ },
{
"name": "options",
"type": "{ onChange: (diff: StateDiffResult) => void; onError?: (error: Error) => void; poll?: number }"
@@ -260,29 +1166,54 @@
},
"processSyncRequest": {
"async": true,
- "params": [{ "name": "request", "type": "SyncRequest" }],
+ "params": [
+ {
+ "name": "request",
+ "type": "SyncRequest"
+ }
+ ],
"returns": "Promise"
},
"applySyncResponse": {
"async": false,
- "params": [{ "name": "response", "type": "SyncResponse" }],
+ "params": [
+ {
+ "name": "response",
+ "type": "SyncResponse"
+ }
+ ],
"returns": "ApplySyncResult"
},
"syncNeeded": {
"async": true,
- "params": [{ "name": "remoteFrontier", "type": "Map" }],
+ "params": [
+ {
+ "name": "remoteFrontier",
+ "type": "Map"
+ }
+ ],
"returns": "Promise"
},
"writer": {
"async": true,
- "params": [{ "name": "writerId", "type": "string", "optional": true }],
+ "params": [
+ {
+ "name": "writerId",
+ "type": "string",
+ "optional": true
+ }
+ ],
"returns": "Promise"
},
"createWriter": {
"async": true,
"deprecated": true,
"params": [
- { "name": "opts", "type": "{ persist?: 'config' | 'none'; alias?: string }", "optional": true }
+ {
+ "name": "opts",
+ "type": "{ persist?: 'config' | 'none'; alias?: string }",
+ "optional": true
+ }
],
"returns": "Promise"
},
@@ -313,255 +1244,261 @@
}
},
"properties": {
- "graphName": { "type": "string", "readonly": true },
- "writerId": { "type": "string", "readonly": true },
- "seekCache": { "type": "SeekCachePort | null", "readonly": true },
- "provenanceIndex": { "type": "ProvenanceIndex | null", "readonly": true },
- "persistence": { "type": "GraphPersistencePort", "readonly": true, "getter": true },
- "onDeleteWithData": { "type": "'reject' | 'cascade' | 'warn'", "readonly": true, "getter": true },
- "gcPolicy": { "type": "GCPolicyConfig", "readonly": true, "getter": true },
- "temporal": { "type": "TemporalQuery", "readonly": true, "getter": true },
- "traverse": { "type": "LogicalTraversal" },
+ "graphName": {
+ "type": "string",
+ "readonly": true
+ },
+ "writerId": {
+ "type": "string",
+ "readonly": true
+ },
+ "seekCache": {
+ "type": "SeekCachePort | null",
+ "readonly": true
+ },
+ "provenanceIndex": {
+ "type": "ProvenanceIndex | null",
+ "readonly": true
+ },
+ "persistence": {
+ "type": "GraphPersistencePort",
+ "readonly": true,
+ "getter": true
+ },
+ "onDeleteWithData": {
+ "type": "'reject' | 'cascade' | 'warn'",
+ "readonly": true,
+ "getter": true
+ },
+ "gcPolicy": {
+ "type": "GCPolicyConfig",
+ "readonly": true,
+ "getter": true
+ },
+ "temporal": {
+ "type": "TemporalQuery",
+ "readonly": true,
+ "getter": true
+ },
+ "traverse": {
+ "type": "LogicalTraversal"
+ },
"setSeekCache": {
- "params": [{ "name": "cache", "type": "SeekCachePort | null" }],
+ "params": [
+ {
+ "name": "cache",
+ "type": "SeekCachePort | null"
+ }
+ ],
"returns": "void"
}
}
},
- "PatchBuilderV2": {
- "kind": "class",
- "note": "Not directly exported from index.js but referenced as return type of createPatch()",
- "instance": {
- "addNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "PatchBuilderV2" },
- "removeNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "PatchBuilderV2" },
- "addEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "PatchBuilderV2" },
- "removeEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "PatchBuilderV2" },
- "setProperty": { "params": [{ "name": "nodeId", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "PatchBuilderV2" },
- "setEdgeProperty": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "PatchBuilderV2" },
- "attachContent": { "async": true, "params": [{ "name": "nodeId", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" },
- "attachEdgeContent": { "async": true, "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" },
- "build": { "params": [], "returns": "PatchV2" },
- "commit": { "async": true, "params": [], "returns": "Promise" }
- },
- "properties": {
- "opCount": { "type": "number", "readonly": true }
- }
+ "WarpGraphStatus": {
+ "kind": "interface"
},
- "PatchSession": {
- "kind": "class",
- "instance": {
- "addNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "this" },
- "removeNode": { "params": [{ "name": "nodeId", "type": "string" }], "returns": "this" },
- "addEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "this" },
- "removeEdge": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }], "returns": "this" },
- "setProperty": { "params": [{ "name": "nodeId", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "this" },
- "setEdgeProperty": { "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "key", "type": "string" }, { "name": "value", "type": "unknown" }], "returns": "this" },
- "attachContent": { "async": true, "params": [{ "name": "nodeId", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" },
- "attachEdgeContent": { "async": true, "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "content", "type": "Buffer | string" }], "returns": "Promise" },
- "build": { "params": [], "returns": "PatchV2" },
- "commit": { "async": true, "params": [], "returns": "Promise" }
- },
- "properties": {
- "opCount": { "type": "number", "readonly": true }
- }
+ "WarpStateIndexBuilder": {
+ "kind": "class"
+ },
+ "WarpStateV5": {
+ "kind": "interface"
+ },
+ "WebCryptoAdapter": {
+ "kind": "class"
+ },
+ "WeightedCostSelector": {
+ "kind": "type"
+ },
+ "WormholeEdge": {
+ "kind": "interface"
+ },
+ "WormholeError": {
+ "kind": "class"
},
"Writer": {
"kind": "class",
"instance": {
- "head": { "async": true, "params": [], "returns": "Promise" },
- "beginPatch": { "async": true, "params": [], "returns": "Promise" },
- "commitPatch": { "async": true, "params": [{ "name": "build", "type": "(p: PatchSession) => void | Promise" }], "returns": "Promise" }
+ "head": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ },
+ "beginPatch": {
+ "async": true,
+ "params": [],
+ "returns": "Promise"
+ },
+ "commitPatch": {
+ "async": true,
+ "params": [
+ {
+ "name": "build",
+ "type": "(p: PatchSession) => void | Promise"
+ }
+ ],
+ "returns": "Promise"
+ }
},
"properties": {
- "writerId": { "type": "string", "readonly": true },
- "graphName": { "type": "string", "readonly": true }
+ "writerId": {
+ "type": "string",
+ "readonly": true
+ },
+ "graphName": {
+ "type": "string",
+ "readonly": true
+ }
}
},
- "QueryBuilder": {
- "kind": "class",
- "instance": {
- "match": { "params": [{ "name": "pattern", "type": "string" }], "returns": "QueryBuilder" },
- "where": { "params": [{ "name": "fn", "type": "((node: QueryNodeSnapshot) => boolean) | Record" }], "returns": "QueryBuilder" },
- "outgoing": { "params": [{ "name": "label", "type": "string", "optional": true }, { "name": "options", "type": "HopOptions", "optional": true }], "returns": "QueryBuilder" },
- "incoming": { "params": [{ "name": "label", "type": "string", "optional": true }, { "name": "options", "type": "HopOptions", "optional": true }], "returns": "QueryBuilder" },
- "select": { "params": [{ "name": "fields", "type": "Array<'id' | 'props'>", "optional": true }], "returns": "QueryBuilder" },
- "aggregate": { "params": [{ "name": "spec", "type": "AggregateSpec" }], "returns": "QueryBuilder" },
- "run": { "async": true, "params": [], "returns": "Promise" }
- }
+ "WriterError": {
+ "kind": "class"
},
- "ObserverView": {
- "kind": "class",
- "instance": {
- "hasNode": { "async": true, "params": [{ "name": "nodeId", "type": "string" }], "returns": "Promise" },
- "getNodes": { "async": true, "params": [], "returns": "Promise" },
- "getNodeProps": { "async": true, "params": [{ "name": "nodeId", "type": "string" }], "returns": "Promise | null>" },
- "getEdges": { "async": true, "params": [], "returns": "Promise }>>" },
- "query": { "params": [], "returns": "QueryBuilder" }
- },
- "properties": {
- "name": { "type": "string", "readonly": true },
- "traverse": { "type": "LogicalTraversal" }
- }
+ "buildWarpStateIndex": {
+ "kind": "function",
+ "async": true
+ },
+ "checkAborted": {
+ "kind": "function",
+ "params": [
+ {
+ "name": "signal",
+ "type": "AbortSignal",
+ "optional": true
+ },
+ {
+ "name": "operation",
+ "type": "string",
+ "optional": true
+ }
+ ],
+ "returns": "void"
+ },
+ "composeWormholes": {
+ "kind": "function",
+ "async": true
+ },
+ "computeStateHashV5": {
+ "kind": "function",
+ "async": true
+ },
+ "computeTranslationCost": {
+ "kind": "function"
+ },
+ "createBTR": {
+ "kind": "function",
+ "async": true
+ },
+ "createBlobValue": {
+ "kind": "function"
+ },
+ "createEdgeAdd": {
+ "kind": "function"
+ },
+ "createEdgeTombstone": {
+ "kind": "function"
+ },
+ "createEventId": {
+ "kind": "function"
+ },
+ "createInlineValue": {
+ "kind": "function"
+ },
+ "createNodeAdd": {
+ "kind": "function"
+ },
+ "createNodeTombstone": {
+ "kind": "function"
+ },
+ "createPropSet": {
+ "kind": "function"
+ },
+ "createTickReceipt": {
+ "kind": "function"
+ },
+ "createTimeoutSignal": {
+ "kind": "function",
+ "params": [
+ {
+ "name": "ms",
+ "type": "number"
+ }
+ ],
+ "returns": "AbortSignal"
+ },
+ "createWormhole": {
+ "kind": "function",
+ "async": true
+ },
+ "decodeEdgePropKey": {
+ "kind": "function",
+ "params": [
+ {
+ "name": "encoded",
+ "type": "string"
+ }
+ ],
+ "returns": "{ from: string; to: string; label: string; propKey: string }"
+ },
+ "deserializeBTR": {
+ "kind": "function"
+ },
+ "deserializeWormhole": {
+ "kind": "function"
+ },
+ "encodeEdgePropKey": {
+ "kind": "function",
+ "params": [
+ {
+ "name": "from",
+ "type": "string"
+ },
+ {
+ "name": "to",
+ "type": "string"
+ },
+ {
+ "name": "label",
+ "type": "string"
+ },
+ {
+ "name": "propKey",
+ "type": "string"
+ }
+ ],
+ "returns": "string"
+ },
+ "isEdgePropKey": {
+ "kind": "function",
+ "params": [
+ {
+ "name": "key",
+ "type": "string"
+ }
+ ],
+ "returns": "boolean"
+ },
+ "migrateV4toV5": {
+ "kind": "function"
+ },
+ "replayBTR": {
+ "kind": "function",
+ "async": true
+ },
+ "replayWormhole": {
+ "kind": "function"
+ },
+ "serializeBTR": {
+ "kind": "function"
+ },
+ "serializeWormhole": {
+ "kind": "function"
+ },
+ "tickReceiptCanonicalJson": {
+ "kind": "function"
},
- "ProvenancePayload": { "kind": "class" },
- "ProvenanceIndex": { "kind": "class" },
- "GitGraphAdapter": { "kind": "class" },
- "InMemoryGraphAdapter": { "kind": "class" },
- "GraphNode": { "kind": "class" },
- "BitmapIndexBuilder": { "kind": "class" },
- "BitmapIndexReader": { "kind": "class" },
- "IndexRebuildService": { "kind": "class" },
- "HealthCheckService": { "kind": "class" },
- "BisectService": { "kind": "class" },
- "CommitDagTraversalService": { "kind": "class" },
- "GraphPersistencePort": { "kind": "abstract-class" },
- "IndexStoragePort": { "kind": "abstract-class" },
- "LoggerPort": { "kind": "abstract-class" },
- "ClockPort": { "kind": "abstract-class" },
- "SeekCachePort": { "kind": "abstract-class" },
- "NoOpLogger": { "kind": "class" },
- "ConsoleLogger": { "kind": "class" },
- "ClockAdapter": { "kind": "class" },
- "HealthStatus": { "kind": "const" },
- "LogLevel": { "kind": "const" },
- "TraversalService": { "kind": "class", "deprecated": true, "alias": "CommitDagTraversalService" },
-
- "ForkError": { "kind": "class" },
- "IndexError": { "kind": "class" },
- "QueryError": { "kind": "class" },
- "SchemaUnsupportedError": { "kind": "class" },
- "ShardLoadError": { "kind": "class" },
- "ShardCorruptionError": { "kind": "class" },
- "ShardValidationError": { "kind": "class" },
- "StorageError": { "kind": "class" },
- "TraversalError": { "kind": "class" },
- "OperationAbortedError": { "kind": "class" },
- "SyncError": { "kind": "class" },
- "WormholeError": { "kind": "class" },
-
- "checkAborted": { "kind": "function", "params": [{ "name": "signal", "type": "AbortSignal", "optional": true }, { "name": "operation", "type": "string", "optional": true }], "returns": "void" },
- "createTimeoutSignal": { "kind": "function", "params": [{ "name": "ms", "type": "number" }], "returns": "AbortSignal" },
- "encodeEdgePropKey": { "kind": "function", "params": [{ "name": "from", "type": "string" }, { "name": "to", "type": "string" }, { "name": "label", "type": "string" }, { "name": "propKey", "type": "string" }], "returns": "string" },
- "decodeEdgePropKey": { "kind": "function", "params": [{ "name": "encoded", "type": "string" }], "returns": "{ from: string; to: string; label: string; propKey: string }" },
- "isEdgePropKey": { "kind": "function", "params": [{ "name": "key", "type": "string" }], "returns": "boolean" },
- "CONTENT_PROPERTY_KEY": { "kind": "const" },
- "computeTranslationCost": { "kind": "function" },
- "migrateV4toV5": { "kind": "function" },
-
- "createNodeAdd": { "kind": "function" },
- "createNodeTombstone": { "kind": "function" },
- "createEdgeAdd": { "kind": "function" },
- "createEdgeTombstone": { "kind": "function" },
- "createPropSet": { "kind": "function" },
- "createInlineValue": { "kind": "function" },
- "createBlobValue": { "kind": "function" },
- "createEventId": { "kind": "function" },
-
- "createTickReceipt": { "kind": "function" },
- "tickReceiptCanonicalJson": { "kind": "function" },
- "TICK_RECEIPT_OP_TYPES": { "kind": "const" },
- "TICK_RECEIPT_RESULT_TYPES": { "kind": "const" },
-
- "createBTR": { "kind": "function", "async": true },
- "verifyBTR": { "kind": "function", "async": true },
- "replayBTR": { "kind": "function", "async": true },
- "serializeBTR": { "kind": "function" },
- "deserializeBTR": { "kind": "function" },
-
- "createWormhole": { "kind": "function", "async": true },
- "composeWormholes": { "kind": "function", "async": true },
- "replayWormhole": { "kind": "function" },
- "serializeWormhole": { "kind": "function" },
- "deserializeWormhole": { "kind": "function" },
-
- "CryptoPort": { "kind": "abstract-class" },
- "NodeCryptoAdapter": { "kind": "class" },
- "WebCryptoAdapter": { "kind": "class" },
- "HttpServerPort": { "kind": "abstract-class" },
- "BunHttpAdapter": { "kind": "class" },
- "DenoHttpAdapter": { "kind": "class" },
- "WarpStateIndexBuilder": { "kind": "class" },
- "PatchError": { "kind": "class" },
- "WriterError": { "kind": "class" },
-
- "buildWarpStateIndex": { "kind": "function", "async": true },
- "computeStateHashV5": { "kind": "function", "async": true },
-
- "PingResult": { "kind": "interface" },
- "RepositoryHealth": { "kind": "interface" },
- "IndexHealth": { "kind": "interface" },
- "HealthResult": { "kind": "interface" },
- "CreateNodeOptions": { "kind": "interface" },
- "BulkNodeSpec": { "kind": "interface" },
- "ListNodesOptions": { "kind": "interface" },
- "IterateNodesOptions": { "kind": "interface" },
- "RebuildOptions": { "kind": "interface" },
- "LoadOptions": { "kind": "interface" },
- "TraversalNode": { "kind": "interface" },
- "PathResult": { "kind": "interface" },
- "QueryNodeSnapshot": { "kind": "interface" },
- "QueryResultV1": { "kind": "interface" },
- "AggregateSpec": { "kind": "interface" },
- "AggregateResult": { "kind": "interface" },
- "HopOptions": { "kind": "interface" },
- "TraverseFacadeOptions": { "kind": "interface" },
- "LogicalTraversal": { "kind": "interface" },
- "TraversalOptions": { "kind": "interface" },
- "AncestorOptions": { "kind": "interface" },
- "PathOptions": { "kind": "interface" },
- "CommonAncestorsOptions": { "kind": "interface" },
- "TopologicalSortOptions": { "kind": "interface" },
- "NodeInfo": { "kind": "interface" },
- "GitPlumbing": { "kind": "interface" },
- "ObserverConfig": { "kind": "interface" },
- "TranslationCostBreakdown": { "kind": "interface" },
- "TranslationCostResult": { "kind": "interface" },
- "EdgeChange": { "kind": "interface" },
- "PropSet": { "kind": "interface" },
- "PropRemoved": { "kind": "interface" },
- "StateDiffResult": { "kind": "interface" },
- "TemporalNodeSnapshot": { "kind": "interface" },
- "TemporalQuery": { "kind": "interface" },
- "PatchV2": { "kind": "interface" },
- "GCPolicyConfig": { "kind": "interface" },
- "GCExecuteResult": { "kind": "interface" },
- "GCMetrics": { "kind": "interface" },
- "MaybeGCResult": { "kind": "interface" },
- "SyncRequest": { "kind": "interface" },
- "SyncResponse": { "kind": "interface" },
- "ApplySyncResult": { "kind": "interface" },
- "SyncAuthServerOptions": { "kind": "interface" },
- "SyncAuthClientOptions": { "kind": "interface" },
- "WarpGraphStatus": { "kind": "interface" },
- "JoinReceipt": { "kind": "interface" },
- "OpOutcome": { "kind": "interface" },
- "TickReceipt": { "kind": "interface" },
- "OpNodeAdd": { "kind": "interface" },
- "OpNodeTombstone": { "kind": "interface" },
- "OpEdgeAdd": { "kind": "interface" },
- "OpEdgeTombstone": { "kind": "interface" },
- "OpPropSet": { "kind": "interface" },
- "ValueRefInline": { "kind": "interface" },
- "ValueRefBlob": { "kind": "interface" },
- "EventId": { "kind": "interface" },
- "PatchEntry": { "kind": "interface" },
- "WarpStateV5": { "kind": "interface" },
- "BTR": { "kind": "interface" },
- "BisectResult": { "kind": "type" },
- "BTRVerificationResult": { "kind": "interface" },
- "CreateBTROptions": { "kind": "interface" },
- "VerifyBTROptions": { "kind": "interface" },
- "WormholeEdge": { "kind": "interface" },
- "CreateWormholeOptions": { "kind": "interface" },
- "ComposeWormholesOptions": { "kind": "interface" },
-
- "TraversalDirection": { "kind": "type" },
- "EdgeWeightFn": { "kind": "type" },
- "NodeWeightFn": { "kind": "type" },
- "WeightedCostSelector": { "kind": "type" },
- "LogLevelValue": { "kind": "type" },
- "TickReceiptOpType": { "kind": "type" },
- "TickReceiptResult": { "kind": "type" },
- "ValueRef": { "kind": "type" }
+ "verifyBTR": {
+ "kind": "function",
+ "async": true
+ }
}
}
diff --git a/eslint.config.js b/eslint.config.js
index 0521617e..1c400569 100644
--- a/eslint.config.js
+++ b/eslint.config.js
@@ -49,6 +49,7 @@ export default tseslint.config(
AbortSignal: "readonly",
performance: "readonly",
global: "readonly",
+ WebSocket: "readonly",
},
},
plugins: {
@@ -253,6 +254,8 @@ export default tseslint.config(
"src/domain/services/LogicalBitmapIndexBuilder.js",
"src/domain/services/LogicalIndexBuildService.js",
"src/domain/services/IncrementalIndexUpdater.js",
+ "src/domain/services/WormholeService.js",
+ "src/domain/services/WarpServeService.js",
],
rules: {
"complexity": ["error", 35],
@@ -270,6 +273,28 @@ export default tseslint.config(
},
},
+ // ── Domain purity: ban Buffer — use Uint8Array + helpers from domain/utils/bytes.js ──
+ {
+ files: ["src/domain/**/*.js"],
+ rules: {
+ "no-restricted-globals": ["error",
+ { "name": "Buffer", "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters." },
+ ],
+ "no-restricted-imports": ["error", {
+ "paths": [
+ {
+ "name": "node:buffer",
+ "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters.",
+ },
+ {
+ "name": "buffer",
+ "message": "Use Uint8Array + helpers from domain/utils/bytes.js. Buffer is confined to infrastructure adapters.",
+ },
+ ],
+ }],
+ },
+ },
+
// ── Domain purity: ban Date.now(), new Date(), and Date() — use ClockPort ──
{
files: ["src/domain/**/*.js"],
@@ -323,6 +348,12 @@ export default tseslint.config(
TextEncoder: "readonly",
performance: "readonly",
global: "readonly",
+ Headers: "readonly",
+ ReadableStream: "readonly",
+ Request: "readonly",
+ Response: "readonly",
+ WebSocket: "readonly",
+ queueMicrotask: "readonly",
describe: "readonly",
it: "readonly",
expect: "readonly",
diff --git a/index.d.ts b/index.d.ts
index 8ee27844..969d0d36 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -452,11 +452,11 @@ export abstract class GraphPersistencePort {
*/
export abstract class IndexStoragePort {
/** Writes a blob and returns its OID */
- abstract writeBlob(content: Buffer | string): Promise;
+ abstract writeBlob(content: Uint8Array | string): Promise;
/** Writes a tree from entries and returns its OID */
abstract writeTree(entries: string[]): Promise;
/** Reads a blob by OID */
- abstract readBlob(oid: string): Promise;
+ abstract readBlob(oid: string): Promise;
/** Reads a tree and returns a map of path to blob OID */
abstract readTreeOids(treeOid: string): Promise>;
/** Updates a ref to point to an OID */
@@ -484,11 +484,11 @@ export type LogLevelValue = 0 | 1 | 2 | 3 | 4;
*/
export abstract class CryptoPort {
/** Computes a hash digest of the given data */
- abstract hash(algorithm: string, data: string | Buffer | Uint8Array): Promise;
+ abstract hash(algorithm: string, data: string | Uint8Array): Promise;
/** Computes an HMAC of the given data */
- abstract hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise;
- /** Constant-time comparison of two buffers */
- abstract timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean;
+ abstract hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise;
+ /** Constant-time comparison of two byte arrays */
+ abstract timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean;
}
/**
@@ -528,9 +528,9 @@ export class ClockAdapter extends ClockPort {
*/
export abstract class SeekCachePort {
/** Retrieves a cached state buffer by key, or null on miss. */
- abstract get(key: string): Promise<{ buffer: Buffer | Uint8Array; indexTreeOid?: string } | null>;
+ abstract get(key: string): Promise<{ buffer: Uint8Array; indexTreeOid?: string } | null>;
/** Stores a state buffer under the given key. */
- abstract set(key: string, buffer: Buffer | Uint8Array, options?: { indexTreeOid?: string }): Promise;
+ abstract set(key: string, buffer: Uint8Array, options?: { indexTreeOid?: string }): Promise;
/** Checks whether a key exists in the cache index. */
abstract has(key: string): Promise;
/** Lists all keys currently in the cache index. */
@@ -541,6 +541,18 @@ export abstract class SeekCachePort {
abstract clear(): Promise;
}
+/**
+ * Port interface for content blob storage operations.
+ * Abstracts how large binary content is stored and retrieved.
+ * @abstract
+ */
+export abstract class BlobStoragePort {
+ /** Stores content and returns a storage identifier (e.g. CAS tree OID). */
+ abstract store(content: Uint8Array | string, options?: { slug?: string }): Promise;
+ /** Retrieves content by its storage identifier. */
+ abstract retrieve(oid: string): Promise;
+}
+
/**
* Port interface for structured logging operations.
* @abstract
@@ -596,8 +608,8 @@ export class ConsoleLogger extends LoggerPort {
*/
export interface GitPlumbing {
readonly emptyTree: string;
- execute(options: { args: string[]; input?: string | Buffer }): Promise;
- executeStream(options: { args: string[] }): Promise & { collect(opts?: { asString?: boolean }): Promise }>;
+ execute(options: { args: string[]; input?: string | Uint8Array }): Promise;
+ executeStream(options: { args: string[] }): Promise & { collect(opts?: { asString?: boolean }): Promise }>;
}
/**
@@ -607,7 +619,11 @@ export interface GitPlumbing {
* but stores all data in Maps — no real Git I/O required.
*/
export class InMemoryGraphAdapter extends GraphPersistencePort {
- constructor();
+ constructor(options?: {
+ author?: string;
+ clock?: { now: () => number };
+ hash?: (data: Uint8Array) => string;
+ });
get emptyTree(): string;
commitNode(options: CreateNodeOptions): Promise;
@@ -631,11 +647,11 @@ export class GitGraphAdapter extends GraphPersistencePort implements IndexStorag
getNodeInfo(sha: string): Promise;
logNodesStream(options: ListNodesOptions & { format: string }): Promise>;
logNodes(options: ListNodesOptions & { format: string }): Promise;
- writeBlob(content: Buffer | string): Promise;
+ writeBlob(content: Uint8Array | string): Promise;
writeTree(entries: string[]): Promise;
- readTree(treeOid: string): Promise>;
+ readTree(treeOid: string): Promise>;
readTreeOids(treeOid: string): Promise>;
- readBlob(oid: string): Promise;
+ readBlob(oid: string): Promise;
updateRef(ref: string, oid: string): Promise;
readRef(ref: string): Promise;
deleteRef(ref: string): Promise;
@@ -657,9 +673,9 @@ export class GitGraphAdapter extends GraphPersistencePort implements IndexStorag
*/
export class NodeCryptoAdapter extends CryptoPort {
constructor();
- hash(algorithm: string, data: string | Buffer | Uint8Array): Promise;
- hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise;
- timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean;
+ hash(algorithm: string, data: string | Uint8Array): Promise;
+ hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise;
+ timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean;
}
/**
@@ -670,9 +686,9 @@ export class NodeCryptoAdapter extends CryptoPort {
*/
export class WebCryptoAdapter extends CryptoPort {
constructor(options?: { subtle?: SubtleCrypto });
- hash(algorithm: string, data: string | Buffer | Uint8Array): Promise;
- hmac(algorithm: string, key: string | Buffer | Uint8Array, data: string | Buffer | Uint8Array): Promise;
- timingSafeEqual(a: Buffer | Uint8Array, b: Buffer | Uint8Array): boolean;
+ hash(algorithm: string, data: string | Uint8Array): Promise;
+ hmac(algorithm: string, key: string | Uint8Array, data: string | Uint8Array): Promise;
+ timingSafeEqual(a: Uint8Array, b: Uint8Array): boolean;
}
/**
@@ -684,7 +700,7 @@ export abstract class HttpServerPort {
method: string;
url: string;
headers: Record;
- body?: Buffer | Uint8Array;
+ body?: Uint8Array;
}) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): {
listen(port: number, callback?: (err?: Error | null) => void): void;
listen(port: number, host: string, callback?: (err?: Error | null) => void): void;
@@ -704,7 +720,7 @@ export class BunHttpAdapter extends HttpServerPort {
method: string;
url: string;
headers: Record;
- body?: Buffer | Uint8Array;
+ body?: Uint8Array;
}) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): {
listen(port: number, callback?: (err?: Error | null) => void): void;
listen(port: number, host: string, callback?: (err?: Error | null) => void): void;
@@ -724,7 +740,7 @@ export class DenoHttpAdapter extends HttpServerPort {
method: string;
url: string;
headers: Record;
- body?: Buffer | Uint8Array;
+ body?: Uint8Array;
}) => Promise<{ status?: number; headers?: Record; body?: string | Uint8Array }>): {
listen(port: number, callback?: (err?: Error | null) => void): void;
listen(port: number, host: string, callback?: (err?: Error | null) => void): void;
@@ -753,7 +769,7 @@ export class BitmapIndexBuilder {
addEdge(srcSha: string, tgtSha: string): void;
/** Serializes the index to a tree structure of buffers */
- serialize(options?: { frontier?: Map }): Promise>;
+ serialize(options?: { frontier?: Map }): Promise>;
}
/**
@@ -773,7 +789,7 @@ export class WarpStateIndexBuilder {
/**
* Serializes the index to a tree structure of buffers.
*/
- serialize(): Promise>;
+ serialize(): Promise>;
}
/**
@@ -782,7 +798,7 @@ export class WarpStateIndexBuilder {
* Convenience function that creates a WarpStateIndexBuilder, builds from state,
* and returns the serialized tree and stats.
*/
-export function buildWarpStateIndex(state: WarpStateV5, options?: { crypto?: CryptoPort }): Promise<{ tree: Record; stats: { nodes: number; edges: number } }>;
+export function buildWarpStateIndex(state: WarpStateV5, options?: { crypto?: CryptoPort }): Promise<{ tree: Record; stats: { nodes: number; edges: number } }>;
/**
* Computes a deterministic hash of a WarpStateV5 state.
@@ -1438,9 +1454,9 @@ export class PatchBuilderV2 {
/** Sets a property on an edge. */
setEdgeProperty(from: string, to: string, label: string, key: string, value: unknown): PatchBuilderV2;
/** Attaches content to a node (writes blob + sets _content property). */
- attachContent(nodeId: string, content: Buffer | string): Promise;
+ attachContent(nodeId: string, content: Uint8Array | string): Promise;
/** Attaches content to an edge (writes blob + sets _content edge property). */
- attachEdgeContent(from: string, to: string, label: string, content: Buffer | string): Promise;
+ attachEdgeContent(from: string, to: string, label: string, content: Uint8Array | string): Promise;
/** Builds the PatchV2 object without committing. */
build(): PatchV2;
/** Commits the patch to the graph and returns the commit SHA. */
@@ -1472,9 +1488,9 @@ export class PatchSession {
/** Sets a property on an edge. */
setEdgeProperty(from: string, to: string, label: string, key: string, value: unknown): this;
/** Attaches content to a node (writes blob + sets _content property). */
- attachContent(nodeId: string, content: Buffer | string): Promise;
+ attachContent(nodeId: string, content: Uint8Array | string): Promise;
/** Attaches content to an edge (writes blob + sets _content edge property). */
- attachEdgeContent(from: string, to: string, label: string, content: Buffer | string): Promise;
+ attachEdgeContent(from: string, to: string, label: string, content: Uint8Array | string): Promise;
/** Builds the PatchV2 object without committing. */
build(): PatchV2;
/** Commits the patch with CAS protection. */
@@ -1513,6 +1529,18 @@ export class WriterError extends Error {
constructor(code: string, message: string, cause?: Error);
}
+/**
+ * Error thrown when a patch requires decryption but no patchBlobStorage
+ * (with encryption key) is configured.
+ */
+export class EncryptionError extends Error {
+ readonly name: 'EncryptionError';
+ readonly code: string;
+ readonly context: Record;
+
+ constructor(message: string, options?: { context?: Record });
+}
+
// ============================================================================
// GC Types
// ============================================================================
@@ -1672,6 +1700,10 @@ export default class WarpGraph {
crypto?: CryptoPort;
codec?: unknown;
seekCache?: SeekCachePort;
+ /** Content blob storage (for attachContent/attachEdgeContent). */
+ blobStorage?: BlobStoragePort;
+ /** Patch blob storage — when set, patch CBOR is encrypted via this port. */
+ patchBlobStorage?: BlobStoragePort;
}): Promise;
/**
@@ -1763,9 +1795,9 @@ export default class WarpGraph {
/**
* Gets the content blob for a node, or null if none is attached.
- * Returns raw Buffer; call `.toString('utf8')` for text.
+ * Returns raw bytes; use `new TextDecoder().decode(result)` for text.
*/
- getContent(nodeId: string): Promise;
+ getContent(nodeId: string): Promise;
/**
* Gets the content blob OID for an edge, or null if none is attached.
@@ -1774,9 +1806,9 @@ export default class WarpGraph {
/**
* Gets the content blob for an edge, or null if none is attached.
- * Returns raw Buffer; call `.toString('utf8')` for text.
+ * Returns raw bytes; use `new TextDecoder().decode(result)` for text.
*/
- getEdgeContent(from: string, to: string, label: string): Promise;
+ getEdgeContent(from: string, to: string, label: string): Promise;
/**
* Checks if a node exists in the materialized state.
@@ -2099,14 +2131,14 @@ export class ProvenanceIndex {
/**
* Serializes the index to CBOR format for checkpoint storage.
*/
- serialize(): Buffer;
+ serialize(): Uint8Array;
/**
* Deserializes an index from CBOR format.
*
* @throws Error if the buffer contains an unsupported version
*/
- static deserialize(buffer: Buffer): ProvenanceIndex;
+ static deserialize(buffer: Uint8Array): ProvenanceIndex;
/**
* Returns a JSON-serializable representation of this index.
@@ -2650,3 +2682,68 @@ export function deserializeWormhole(json: {
patchCount: number;
payload: PatchEntry[];
}): WormholeEdge;
+
+// ── WebSocket Server Port ────────────────────────────────────────────
+
+/**
+ * A single WebSocket connection.
+ */
+export interface WsConnection {
+ /** Send a text message to the client. */
+ send(message: string): void;
+ /** Register a handler for incoming messages. */
+ onMessage(handler: (message: string) => void): void;
+ /** Register a handler for connection close events. */
+ onClose(handler: (code?: number, reason?: string) => void): void;
+ /** Close the connection. */
+ close(): void;
+}
+
+/**
+ * Handle returned by WebSocketServerPort.createServer().
+ */
+export interface WsServerHandle {
+ /** Start listening on the given port and optional host. */
+ listen(port: number, host?: string): Promise<{ port: number; host: string }>;
+ /** Shut down the server. */
+ close(): Promise;
+}
+
+/**
+ * Port for WebSocket server creation.
+ *
+ * Abstracts platform-specific WebSocket server APIs (Node ws, Bun.serve,
+ * Deno.upgradeWebSocket) so domain code doesn't depend on any runtime.
+ */
+export class WebSocketServerPort {
+ /** Creates a WebSocket server. */
+ createServer(
+ onConnection: (connection: WsConnection) => void
+ ): WsServerHandle;
+}
+
+// ── WarpServeService ─────────────────────────────────────────────────
+
+/**
+ * Domain service that bridges WarpGraph instances to browser clients
+ * over a WebSocketServerPort.
+ */
+export class WarpServeService {
+ constructor(options: {
+ wsPort: WebSocketServerPort;
+ graphs: Array<{
+ graphName: string;
+ materialize: Function;
+ subscribe: Function;
+ getNodeProps: Function;
+ createPatch: Function;
+ query: Function;
+ }>;
+ });
+
+ /** Start listening for WebSocket connections. */
+ listen(port: number, host?: string): Promise<{ port: number; host: string }>;
+
+ /** Shut down the server and clean up subscriptions. */
+ close(): Promise;
+}
diff --git a/index.js b/index.js
index 4483b0ee..2d92f571 100644
--- a/index.js
+++ b/index.js
@@ -38,9 +38,11 @@ import NoOpLogger from './src/infrastructure/adapters/NoOpLogger.js';
import ConsoleLogger, { LogLevel } from './src/infrastructure/adapters/ConsoleLogger.js';
import ClockAdapter from './src/infrastructure/adapters/ClockAdapter.js';
import {
+ EncryptionError,
ForkError,
IndexError,
QueryError,
+ PatchError,
SchemaUnsupportedError,
ShardLoadError,
ShardCorruptionError,
@@ -51,6 +53,14 @@ import {
SyncError,
WormholeError,
} from './src/domain/errors/index.js';
+import WriterError from './src/domain/errors/WriterError.js';
+import BlobStoragePort from './src/ports/BlobStoragePort.js';
+import CryptoPort from './src/ports/CryptoPort.js';
+import HttpServerPort from './src/ports/HttpServerPort.js';
+import NodeCryptoAdapter from './src/infrastructure/adapters/NodeCryptoAdapter.js';
+import WebCryptoAdapter from './src/infrastructure/adapters/WebCryptoAdapter.js';
+import BunHttpAdapter from './src/infrastructure/adapters/BunHttpAdapter.js';
+import DenoHttpAdapter from './src/infrastructure/adapters/DenoHttpAdapter.js';
import { checkAborted, createTimeoutSignal } from './src/domain/utils/cancellation.js';
// Multi-writer graph support (WARP)
@@ -104,6 +114,12 @@ import {
} from './src/domain/services/WormholeService.js';
import BisectService from './src/domain/services/BisectService.js';
+import { PatchBuilderV2 } from './src/domain/services/PatchBuilderV2.js';
+import { PatchSession } from './src/domain/warp/PatchSession.js';
+import { Writer } from './src/domain/warp/Writer.js';
+import { ProvenanceIndex } from './src/domain/services/ProvenanceIndex.js';
+import WarpStateIndexBuilder, { buildWarpStateIndex } from './src/domain/services/WarpStateIndexBuilder.js';
+import { computeStateHashV5 } from './src/domain/services/StateSerializerV5.js';
const TraversalService = CommitDagTraversalService;
@@ -135,7 +151,22 @@ export {
SeekCachePort,
ClockAdapter,
+ // Port contracts
+ BlobStoragePort,
+ CryptoPort,
+ HttpServerPort,
+
+ // Crypto adapters
+ NodeCryptoAdapter,
+ WebCryptoAdapter,
+
+ // HTTP adapters
+ BunHttpAdapter,
+ DenoHttpAdapter,
+
// Error types for integrity failure handling
+ EncryptionError,
+ PatchError,
ForkError,
IndexError,
QueryError,
@@ -148,6 +179,7 @@ export {
OperationAbortedError,
SyncError,
WormholeError,
+ WriterError,
// Cancellation utilities
checkAborted,
@@ -157,6 +189,10 @@ export {
WarpGraph,
QueryBuilder,
ObserverView,
+ PatchBuilderV2,
+ PatchSession,
+ Writer,
+ ProvenanceIndex,
computeTranslationCost,
// WARP type creators
@@ -175,6 +211,11 @@ export {
isEdgePropKey,
CONTENT_PROPERTY_KEY,
+ // State indexing & hashing
+ WarpStateIndexBuilder,
+ buildWarpStateIndex,
+ computeStateHashV5,
+
// WARP migration
migrateV4toV5,
diff --git a/jsr.json b/jsr.json
index 812529a9..4cd74de0 100644
--- a/jsr.json
+++ b/jsr.json
@@ -1,18 +1,23 @@
{
"name": "@git-stunts/git-warp",
- "version": "13.1.0",
+ "version": "14.0.0",
"imports": {
"roaring": "npm:roaring@^2.7.0"
},
"exports": {
".": "./index.js",
"./node": "./src/domain/entities/GraphNode.js",
- "./visualization": "./src/visualization/index.js"
+ "./visualization": "./src/visualization/index.js",
+ "./browser": "./browser.js",
+ "./sha1sync": "./src/infrastructure/adapters/sha1sync.js"
},
"publish": {
"include": [
"index.js",
"index.d.ts",
+ "browser.js",
+ "browser.d.ts",
+ "sha1sync.d.ts",
"src/**/*.js",
"src/**/*.d.ts",
"README.md",
diff --git a/package-lock.json b/package-lock.json
index 69c7d148..c3df96f7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,16 +1,16 @@
{
"name": "@git-stunts/git-warp",
- "version": "13.1.0",
+ "version": "14.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@git-stunts/git-warp",
- "version": "13.1.0",
+ "version": "14.0.0",
"license": "Apache-2.0",
"dependencies": {
"@git-stunts/alfred": "^0.4.0",
- "@git-stunts/git-cas": "^3.0.0",
+ "@git-stunts/git-cas": "^5.2.4",
"@git-stunts/plumbing": "^2.8.0",
"@git-stunts/trailer-codec": "^2.1.1",
"boxen": "^7.1.1",
@@ -23,6 +23,7 @@
"roaring-wasm": "^1.1.0",
"string-width": "^7.1.0",
"wrap-ansi": "^9.0.0",
+ "ws": "^8.19.0",
"zod": "3.24.1"
},
"bin": {
@@ -33,6 +34,7 @@
"@eslint/js": "^9.17.0",
"@git-stunts/docker-guard": "^0.1.0",
"@types/node": "^22.15.29",
+ "@types/ws": "^8.18.1",
"@typescript-eslint/eslint-plugin": "^8.54.0",
"@typescript-eslint/parser": "^8.54.0",
"eslint": "^9.17.0",
@@ -721,6 +723,40 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
+ "node_modules/@flyingrobots/bijou": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@flyingrobots/bijou/-/bijou-0.2.0.tgz",
+ "integrity": "sha512-Oix2Kqq4w87KCkyK2W+8u4E4aGVQiraUy8BF3Bk/NRtT+UlUI0ETs+E7GwpwOyOvHvt0cIOjcMmVPxzKa52P4A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@flyingrobots/bijou-node": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@flyingrobots/bijou-node/-/bijou-node-0.2.0.tgz",
+ "integrity": "sha512-QaIaoBF0OMRHGtLsga1knplfFEmAeC6Lt4SxWkCKIJahMdNqXatCWM3RdzXcbjfcXqRIXyeEpm1agmmwi4gneQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@flyingrobots/bijou": "0.2.0",
+ "chalk": "^5.6.2"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@flyingrobots/bijou-tui": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/@flyingrobots/bijou-tui/-/bijou-tui-0.2.0.tgz",
+ "integrity": "sha512-pXEo/Am6svRIKvez7926avdGUbfVndlSOpidBPc42YjCQHU5ZQrEuJpjI7niJb63N0ruxu0VXHci8N0wzBYSow==",
+ "license": "MIT",
+ "dependencies": {
+ "@flyingrobots/bijou": "0.2.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@git-stunts/alfred": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/@git-stunts/alfred/-/alfred-0.4.0.tgz",
@@ -738,11 +774,14 @@
"license": "Apache-2.0"
},
"node_modules/@git-stunts/git-cas": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@git-stunts/git-cas/-/git-cas-3.0.0.tgz",
- "integrity": "sha512-5uqIsTukE+8f1h317ZmGneYpTJ1ecBxg16QJxvF3kNrfQR3/DcAH4fQyMRkCIQtSHEz2p6UpOwpM10R9dEQm/w==",
+ "version": "5.2.4",
+ "resolved": "https://registry.npmjs.org/@git-stunts/git-cas/-/git-cas-5.2.4.tgz",
+ "integrity": "sha512-8jxOLbeGOsJmCLLxybQ3KRvnCzxhHnrbicBAZOlSgIQBVe0VO5RDZgNDcw/Fk0zIOf9TOb1F8YuArwDVc/jM/A==",
"license": "Apache-2.0",
"dependencies": {
+ "@flyingrobots/bijou": "^0.2.0",
+ "@flyingrobots/bijou-node": "^0.2.0",
+ "@flyingrobots/bijou-tui": "^0.2.0",
"@git-stunts/alfred": "^0.10.0",
"@git-stunts/plumbing": "^2.8.0",
"cbor-x": "^1.6.0",
@@ -1321,6 +1360,16 @@
"undici-types": "~6.21.0"
}
},
+ "node_modules/@types/ws": {
+ "version": "8.18.1",
+ "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
+ "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.54.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz",
@@ -4472,9 +4521,9 @@
}
},
"node_modules/tar": {
- "version": "7.5.9",
- "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.9.tgz",
- "integrity": "sha512-BTLcK0xsDh2+PUe9F6c2TlRp4zOOBMTkoQHQIWSIzI0R7KG46uEwq4OPk2W7bZcprBMsuaeFsqwYr7pjh6CuHg==",
+ "version": "7.5.10",
+ "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz",
+ "integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==",
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
@@ -4956,6 +5005,27 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
+ "node_modules/ws": {
+ "version": "8.19.0",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
+ "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=10.0.0"
+ },
+ "peerDependencies": {
+ "bufferutil": "^4.0.1",
+ "utf-8-validate": ">=5.0.2"
+ },
+ "peerDependenciesMeta": {
+ "bufferutil": {
+ "optional": true
+ },
+ "utf-8-validate": {
+ "optional": true
+ }
+ }
+ },
"node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
diff --git a/package.json b/package.json
index 938d6af9..418d7048 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@git-stunts/git-warp",
- "version": "13.1.0",
+ "version": "14.0.0",
"description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.",
"type": "module",
"license": "Apache-2.0",
@@ -38,6 +38,16 @@
"import": "./src/visualization/index.js",
"default": "./src/visualization/index.js"
},
+ "./browser": {
+ "types": "./browser.d.ts",
+ "import": "./browser.js",
+ "default": "./browser.js"
+ },
+ "./sha1sync": {
+ "types": "./sha1sync.d.ts",
+ "import": "./src/infrastructure/adapters/sha1sync.js",
+ "default": "./src/infrastructure/adapters/sha1sync.js"
+ },
"./package.json": "./package.json"
},
"files": [
@@ -46,7 +56,10 @@
"bin/cli",
"bin/git-warp",
"src",
+ "browser.js",
"index.js",
+ "browser.d.ts",
+ "sha1sync.d.ts",
"index.d.ts",
"README.md",
"LICENSE",
@@ -96,7 +109,7 @@
},
"dependencies": {
"@git-stunts/alfred": "^0.4.0",
- "@git-stunts/git-cas": "^3.0.0",
+ "@git-stunts/git-cas": "^5.2.4",
"@git-stunts/plumbing": "^2.8.0",
"@git-stunts/trailer-codec": "^2.1.1",
"boxen": "^7.1.1",
@@ -109,12 +122,14 @@
"roaring-wasm": "^1.1.0",
"string-width": "^7.1.0",
"wrap-ansi": "^9.0.0",
+ "ws": "^8.19.0",
"zod": "3.24.1"
},
"devDependencies": {
"@eslint/js": "^9.17.0",
"@git-stunts/docker-guard": "^0.1.0",
"@types/node": "^22.15.29",
+ "@types/ws": "^8.18.1",
"@typescript-eslint/eslint-plugin": "^8.54.0",
"@typescript-eslint/parser": "^8.54.0",
"eslint": "^9.17.0",
diff --git a/sha1sync.d.ts b/sha1sync.d.ts
new file mode 100644
index 00000000..02d287ef
--- /dev/null
+++ b/sha1sync.d.ts
@@ -0,0 +1,14 @@
+/**
+ * Synchronous SHA-1 for browser use with InMemoryGraphAdapter.
+ *
+ * NOT used for security — only for Git content addressing.
+ */
+
+/**
+ * Computes a SHA-1 hash of the given data, returning a 40-character
+ * lowercase hex string.
+ *
+ * @param data - The data to hash
+ * @returns 40-character lowercase hex SHA-1 digest
+ */
+export function sha1sync(data: Uint8Array): string;
diff --git a/src/domain/WarpGraph.js b/src/domain/WarpGraph.js
index 1f978bd1..92930883 100644
--- a/src/domain/WarpGraph.js
+++ b/src/domain/WarpGraph.js
@@ -48,9 +48,9 @@ const DEFAULT_ADJACENCY_CACHE_SIZE = 3;
export default class WarpGraph {
/**
* @private
- * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean }} options
+ * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean, blobStorage?: import('../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../ports/BlobStoragePort.js').default }} options
*/
- constructor({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize = DEFAULT_ADJACENCY_CACHE_SIZE, checkpointPolicy, autoMaterialize = true, onDeleteWithData = 'warn', logger, clock, crypto, codec, seekCache, audit = false }) {
+ constructor({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize = DEFAULT_ADJACENCY_CACHE_SIZE, checkpointPolicy, autoMaterialize = true, onDeleteWithData = 'warn', logger, clock, crypto, codec, seekCache, audit = false, blobStorage, patchBlobStorage }) {
/** @type {CorePersistence} */
this._persistence = /** @type {CorePersistence} */ (persistence);
@@ -144,6 +144,12 @@ export default class WarpGraph {
/** @type {import('../ports/SeekCachePort.js').default|null} */
this._seekCache = seekCache || null;
+ /** @type {import('../ports/BlobStoragePort.js').default|null} */
+ this._blobStorage = blobStorage || null;
+
+ /** @type {import('../ports/BlobStoragePort.js').default|null} */
+ this._patchBlobStorage = patchBlobStorage || null;
+
/** @type {boolean} */
this._patchInProgress = false;
@@ -241,7 +247,7 @@ export default class WarpGraph {
/**
* Opens a multi-writer graph.
*
- * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean }} options
+ * @param {{ persistence: CorePersistence, graphName: string, writerId: string, gcPolicy?: Record, adjacencyCacheSize?: number, checkpointPolicy?: {every: number}, autoMaterialize?: boolean, onDeleteWithData?: 'reject'|'cascade'|'warn', logger?: import('../ports/LoggerPort.js').default, clock?: import('../ports/ClockPort.js').default, crypto?: import('../ports/CryptoPort.js').default, codec?: import('../ports/CodecPort.js').default, seekCache?: import('../ports/SeekCachePort.js').default, audit?: boolean, blobStorage?: import('../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../ports/BlobStoragePort.js').default }} options
* @returns {Promise} The opened graph instance
* @throws {Error} If graphName, writerId, checkpointPolicy, or onDeleteWithData is invalid
*
@@ -252,7 +258,7 @@ export default class WarpGraph {
* writerId: 'node-1'
* });
*/
- static async open({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit }) {
+ static async open({ persistence, graphName, writerId, gcPolicy = {}, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit, blobStorage, patchBlobStorage }) {
// Validate inputs
validateGraphName(graphName);
validateWriterId(writerId);
@@ -289,7 +295,7 @@ export default class WarpGraph {
}
}
- const graph = new WarpGraph({ persistence, graphName, writerId, gcPolicy, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit });
+ const graph = new WarpGraph({ persistence, graphName, writerId, gcPolicy, adjacencyCacheSize, checkpointPolicy, autoMaterialize, onDeleteWithData, logger, clock, crypto, codec, seekCache, audit, blobStorage, patchBlobStorage });
// Validate migration boundary
await graph._validateMigrationBoundary();
diff --git a/src/domain/errors/EncryptionError.js b/src/domain/errors/EncryptionError.js
new file mode 100644
index 00000000..0fb981b7
--- /dev/null
+++ b/src/domain/errors/EncryptionError.js
@@ -0,0 +1,24 @@
+import WarpError from './WarpError.js';
+
+/**
+ * Error thrown when a patch requires decryption but no patchBlobStorage
+ * (with encryption key) is configured.
+ *
+ * ## Error Codes
+ *
+ * | Code | Description |
+ * |------|-------------|
+ * | `E_ENCRYPTED_PATCH` | Patch is encrypted but no decryption key is available |
+ *
+ * @class EncryptionError
+ * @extends WarpError
+ */
+export default class EncryptionError extends WarpError {
+ /**
+ * @param {string} message
+ * @param {{ context?: Record }} [options={}]
+ */
+ constructor(message, options = {}) {
+ super(message, 'E_ENCRYPTED_PATCH', options);
+ }
+}
diff --git a/src/domain/errors/index.js b/src/domain/errors/index.js
index 91a63cce..32c0a44b 100644
--- a/src/domain/errors/index.js
+++ b/src/domain/errors/index.js
@@ -5,6 +5,7 @@
*/
export { default as EmptyMessageError } from './EmptyMessageError.js';
+export { default as EncryptionError } from './EncryptionError.js';
export { default as PersistenceError } from './PersistenceError.js';
export { default as WarpError } from './WarpError.js';
export { default as ForkError } from './ForkError.js';
diff --git a/src/domain/services/AuditReceiptService.js b/src/domain/services/AuditReceiptService.js
index c6c1a073..3c004b48 100644
--- a/src/domain/services/AuditReceiptService.js
+++ b/src/domain/services/AuditReceiptService.js
@@ -351,7 +351,7 @@ export class AuditReceiptService {
// Write blob
let blobOid;
try {
- blobOid = await this._persistence.writeBlob(Buffer.from(cborBytes));
+ blobOid = await this._persistence.writeBlob(cborBytes);
} catch (err) {
this._logger?.warn('[warp:audit]', {
code: 'AUDIT_WRITE_BLOB_FAILED',
diff --git a/src/domain/services/BitmapIndexBuilder.js b/src/domain/services/BitmapIndexBuilder.js
index f6dc57ff..62e25fe5 100644
--- a/src/domain/services/BitmapIndexBuilder.js
+++ b/src/domain/services/BitmapIndexBuilder.js
@@ -4,6 +4,7 @@ import { computeChecksum } from '../utils/checksumUtils.js';
import { getRoaringBitmap32, getNativeRoaringAvailable } from '../utils/roaring.js';
import { canonicalStringify } from '../utils/canonicalStringify.js';
import { SHARD_VERSION } from '../utils/shardVersion.js';
+import { textEncode, base64Encode } from '../utils/bytes.js';
// Re-export for backwards compatibility
export { SHARD_VERSION };
@@ -42,7 +43,7 @@ const wrapShard = async (data, crypto) => ({
/**
* Serializes a frontier Map into CBOR and JSON blobs in the given tree.
* @param {Map} frontier - Writer→tip SHA map
- * @param {Record} tree - Target tree to add entries to
+ * @param {Record} tree - Target tree to add entries to
* @param {import('../../ports/CodecPort.js').default} codec - Codec for CBOR serialization
*/
function serializeFrontierToTree(frontier, tree, codec) {
@@ -52,8 +53,8 @@ function serializeFrontierToTree(frontier, tree, codec) {
sorted[key] = frontier.get(key);
}
const envelope = { version: 1, writerCount: frontier.size, frontier: sorted };
- tree['frontier.cbor'] = Buffer.from(codec.encode(envelope));
- tree['frontier.json'] = Buffer.from(canonicalStringify(envelope));
+ tree['frontier.cbor'] = codec.encode(envelope);
+ tree['frontier.json'] = textEncode(canonicalStringify(envelope));
}
/**
@@ -135,10 +136,10 @@ export default class BitmapIndexBuilder {
* Each shard is wrapped in a version/checksum envelope for integrity verification.
*
* @param {{ frontier?: Map }} [options] - Serialization options
- * @returns {Promise>} Map of path → serialized content
+ * @returns {Promise>} Map of path → serialized content
*/
async serialize({ frontier } = {}) {
- /** @type {Record} */
+ /** @type {Record} */
const tree = {};
// Serialize ID mappings (sharded by prefix)
@@ -152,7 +153,7 @@ export default class BitmapIndexBuilder {
idShards[prefix][sha] = id;
}
for (const [prefix, map] of Object.entries(idShards)) {
- tree[`meta_${prefix}.json`] = Buffer.from(JSON.stringify(await wrapShard(map, this._crypto)));
+ tree[`meta_${prefix}.json`] = textEncode(JSON.stringify(await wrapShard(map, this._crypto)));
}
// Serialize bitmaps (sharded by prefix, per-node within shard)
@@ -167,12 +168,12 @@ export default class BitmapIndexBuilder {
bitmapShards[type][prefix] = {};
}
// Encode bitmap as base64 for JSON storage
- bitmapShards[type][prefix][sha] = Buffer.from(bitmap.serialize(true)).toString('base64');
+ bitmapShards[type][prefix][sha] = base64Encode(new Uint8Array(bitmap.serialize(true)));
}
for (const type of ['fwd', 'rev']) {
for (const [prefix, shardData] of Object.entries(bitmapShards[type])) {
- tree[`shards_${type}_${prefix}.json`] = Buffer.from(JSON.stringify(await wrapShard(shardData, this._crypto)));
+ tree[`shards_${type}_${prefix}.json`] = textEncode(JSON.stringify(await wrapShard(shardData, this._crypto)));
}
}
diff --git a/src/domain/services/BitmapIndexReader.js b/src/domain/services/BitmapIndexReader.js
index 28062511..54011d26 100644
--- a/src/domain/services/BitmapIndexReader.js
+++ b/src/domain/services/BitmapIndexReader.js
@@ -5,6 +5,7 @@ import LRUCache from '../utils/LRUCache.js';
import { getRoaringBitmap32 } from '../utils/roaring.js';
import { canonicalStringify } from '../utils/canonicalStringify.js';
import { isValidShardOid } from '../utils/validateShardOid.js';
+import { base64Decode } from '../utils/bytes.js';
/** @typedef {import('../../ports/IndexStoragePort.js').default} IndexStoragePort */
/** @typedef {import('../types/WarpPersistence.js').IndexStorage} IndexStorage */
@@ -202,7 +203,7 @@ export default class BitmapIndexReader {
}
// Decode base64 bitmap and extract IDs
- const buffer = Buffer.from(encoded, 'base64');
+ const buffer = base64Decode(encoded);
let ids;
try {
const RoaringBitmap32 = getRoaringBitmap32();
diff --git a/src/domain/services/GitLogParser.js b/src/domain/services/GitLogParser.js
index d0bda085..7a3e7bd5 100644
--- a/src/domain/services/GitLogParser.js
+++ b/src/domain/services/GitLogParser.js
@@ -1,5 +1,6 @@
import GraphNode from '../entities/GraphNode.js';
import { checkAborted } from '../utils/cancellation.js';
+import { concatBytes, textEncode, textDecode } from '../utils/bytes.js';
/**
* NUL byte (0x00) - Delimits commit records in git log output.
@@ -35,10 +36,8 @@ export const RECORD_SEPARATOR = '\x00';
* testing and alternative implementations.
*
* **Binary-First Processing**: The parser works directly with binary data for
- * performance. Buffer.indexOf(0) is faster than string indexOf('\0') because:
- * - No UTF-8 decoding overhead during scanning
- * - Native C++ implementation in Node.js Buffer
- * - Byte-level comparison vs character-level
+ * performance. Uint8Array.indexOf(0) scans bytes without UTF-8 decoding
+ * overhead, and byte-level comparison is faster than character-level.
*
* UTF-8 decoding only happens once per complete record, not during scanning.
* This is especially beneficial for large commit histories where most of the
@@ -74,9 +73,8 @@ export default class GitLogParser {
* Parses a stream of git log output and yields GraphNode instances.
*
* **Binary-first processing for performance**:
- * - Accepts Buffer, Uint8Array, or string chunks
- * - Finds NUL bytes (0x00) directly in binary using Buffer.indexOf(0)
- * - Buffer.indexOf(0) is faster than string indexOf('\0') - native C++ vs JS
+ * - Accepts Uint8Array or string chunks
+ * - Finds NUL bytes (0x00) directly in binary using Uint8Array.indexOf(0)
* - UTF-8 decoding only happens for complete records, not during scanning
*
* Handles:
@@ -86,8 +84,8 @@ export default class GitLogParser {
* - Backwards compatibility with string chunks
* - Cancellation via AbortSignal
*
- * @param {AsyncIterable} stream - The git log output stream.
- * May yield Buffer, Uint8Array, or string chunks.
+ * @param {AsyncIterable} stream - The git log output stream.
+ * May yield Uint8Array or string chunks.
* @param {{ signal?: AbortSignal }} [options] - Parse options
* @yields {GraphNode} Parsed graph nodes. Invalid records are silently skipped.
* @throws {OperationAbortedError} If signal is aborted during parsing
@@ -106,23 +104,24 @@ export default class GitLogParser {
* }
*/
async *parse(stream, { signal } = {}) {
- let buffer = Buffer.alloc(0); // Binary buffer accumulator
+ /** @type {Uint8Array} */
+ let buffer = new Uint8Array(0); // Binary buffer accumulator
for await (const chunk of stream) {
checkAborted(signal, 'GitLogParser.parse');
- // Convert string chunks to Buffer, keep Buffer chunks as-is
- const chunkBuffer =
+ // Convert string chunks to Uint8Array, keep Uint8Array chunks as-is
+ const chunkBytes =
typeof chunk === 'string'
- ? Buffer.from(chunk, 'utf-8')
- : Buffer.isBuffer(chunk)
+ ? textEncode(chunk)
+ : chunk instanceof Uint8Array
? chunk
- : Buffer.from(chunk); // Uint8Array
+ : Uint8Array.from(chunk);
// Append to accumulator
- buffer = Buffer.concat([buffer, chunkBuffer]);
+ buffer = concatBytes(buffer, chunkBytes);
- // Find NUL bytes (0x00) in binary - faster than string indexOf
+ // Find NUL bytes (0x00) in binary
let nullIndex;
while ((nullIndex = buffer.indexOf(0)) !== -1) {
checkAborted(signal, 'GitLogParser.parse');
@@ -132,7 +131,7 @@ export default class GitLogParser {
buffer = buffer.subarray(nullIndex + 1);
// Only decode UTF-8 for complete records
- const block = recordBytes.toString('utf-8');
+ const block = textDecode(recordBytes);
const node = this.parseNode(block);
if (node) {
yield node;
@@ -142,7 +141,7 @@ export default class GitLogParser {
// Process any remaining data (final record without trailing NUL)
if (buffer.length > 0) {
- const block = buffer.toString('utf-8');
+ const block = textDecode(buffer);
if (block) {
const node = this.parseNode(block);
if (node) {
diff --git a/src/domain/services/HookInstaller.js b/src/domain/services/HookInstaller.js
index 8692d015..9677ff02 100644
--- a/src/domain/services/HookInstaller.js
+++ b/src/domain/services/HookInstaller.js
@@ -9,7 +9,7 @@
/**
* @typedef {Object} FsAdapter
- * @property {(path: string, content: string | Buffer, options?: Object) => void} writeFileSync
+ * @property {(path: string, content: string | Uint8Array, options?: Object) => void} writeFileSync
* @property {(path: string, mode: number) => void} chmodSync
* @property {(path: string, encoding?: string) => string} readFileSync
* @property {(path: string) => boolean} existsSync
diff --git a/src/domain/services/HttpSyncServer.js b/src/domain/services/HttpSyncServer.js
index 569be9cf..9b853a05 100644
--- a/src/domain/services/HttpSyncServer.js
+++ b/src/domain/services/HttpSyncServer.js
@@ -168,7 +168,7 @@ function validateRoute(request, expectedPath, defaultHost) {
/**
* Checks if the request body exceeds the maximum allowed size.
*
- * @param {Buffer | Uint8Array | undefined} body
+ * @param {Uint8Array | undefined} body
* @param {number} maxBytes
* @returns {{ status: number, headers: Record, body: string }|null} Error response or null if within limits
* @private
@@ -184,7 +184,7 @@ function checkBodySize(body, maxBytes) {
* Parses and validates the request body as a sync request.
* Uses Zod-based SyncPayloadSchema for shape + resource limit validation.
*
- * @param {Buffer | Uint8Array | undefined} body
+ * @param {Uint8Array | undefined} body
* @returns {{ error: { status: number, headers: Record, body: string }, parsed: null } | { error: null, parsed: import('./SyncProtocol.js').SyncRequest }}
* @private
*/
@@ -256,7 +256,7 @@ export default class HttpSyncServer {
* In log-only mode both checks record metrics/logs but always return
* null so the request proceeds.
*
- * @param {{ method: string, url: string, headers: Record, body: Buffer | Uint8Array | undefined }} request
+ * @param {{ method: string, url: string, headers: Record, body: Uint8Array | undefined }} request
* @param {Record} parsed - Parsed sync request body
* @returns {Promise<{ status: number, headers: Record, body: string }|null>}
* @private
@@ -300,7 +300,7 @@ export default class HttpSyncServer {
* @private
*/
async _handleRequest(request) {
- /** @type {{ method: string, url: string, headers: Record, body: Buffer | Uint8Array | undefined }} */
+ /** @type {{ method: string, url: string, headers: Record, body: Uint8Array | undefined }} */
const req = { ...request, headers: /** @type {Record} */ (request.headers) };
const contentTypeError = checkContentType(req.headers);
if (contentTypeError) {
diff --git a/src/domain/services/MessageCodecInternal.js b/src/domain/services/MessageCodecInternal.js
index ff81f88a..114dc156 100644
--- a/src/domain/services/MessageCodecInternal.js
+++ b/src/domain/services/MessageCodecInternal.js
@@ -47,6 +47,7 @@ export const TRAILER_KEYS = {
checkpointVersion: 'eg-checkpoint',
dataCommit: 'eg-data-commit',
opsDigest: 'eg-ops-digest',
+ encrypted: 'eg-encrypted',
};
/**
diff --git a/src/domain/services/PatchBuilderV2.js b/src/domain/services/PatchBuilderV2.js
index 9ba09cb8..ea6d3b43 100644
--- a/src/domain/services/PatchBuilderV2.js
+++ b/src/domain/services/PatchBuilderV2.js
@@ -99,9 +99,9 @@ export class PatchBuilderV2 {
/**
* Creates a new PatchBuilderV2.
*
- * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, lamport: number, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, expectedParentSha?: string|null, onCommitSuccess?: ((result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise)|null, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} options
+ * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, lamport: number, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, expectedParentSha?: string|null, onCommitSuccess?: ((result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise)|null, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, blobStorage?: import('../../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options
*/
- constructor({ persistence, graphName, writerId, lamport, versionVector, getCurrentState, expectedParentSha = null, onCommitSuccess = null, onDeleteWithData = 'warn', codec, logger }) {
+ constructor({ persistence, graphName, writerId, lamport, versionVector, getCurrentState, expectedParentSha = null, onCommitSuccess = null, onDeleteWithData = 'warn', codec, logger, blobStorage, patchBlobStorage }) {
/** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} */
this._persistence = /** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} */ (persistence);
@@ -157,6 +157,12 @@ export class PatchBuilderV2 {
*/
this._contentBlobs = [];
+ /** @type {import('../../ports/BlobStoragePort.js').default|null} */
+ this._blobStorage = blobStorage || null;
+
+ /** @type {import('../../ports/BlobStoragePort.js').default|null} */
+ this._patchBlobStorage = patchBlobStorage || null;
+
/**
* Observed operands — entities whose current state was consulted to build
* this patch.
@@ -536,7 +542,9 @@ export class PatchBuilderV2 {
// Validate identifiers before writing blob to avoid orphaned blobs
_assertNoReservedBytes(nodeId, 'nodeId');
_assertNoReservedBytes(CONTENT_PROPERTY_KEY, 'key');
- const oid = await this._persistence.writeBlob(content);
+ const oid = this._blobStorage
+ ? await this._blobStorage.store(content, { slug: `${this._graphName}/${nodeId}` })
+ : await this._persistence.writeBlob(content);
this.setProperty(nodeId, CONTENT_PROPERTY_KEY, oid);
this._contentBlobs.push(oid);
return this;
@@ -559,7 +567,9 @@ export class PatchBuilderV2 {
_assertNoReservedBytes(to, 'to');
_assertNoReservedBytes(label, 'label');
_assertNoReservedBytes(CONTENT_PROPERTY_KEY, 'key');
- const oid = await this._persistence.writeBlob(content);
+ const oid = this._blobStorage
+ ? await this._blobStorage.store(content, { slug: `${this._graphName}/${from}/${to}/${label}` })
+ : await this._persistence.writeBlob(content);
this.setEdgeProperty(from, to, label, CONTENT_PROPERTY_KEY, oid);
this._contentBlobs.push(oid);
return this;
@@ -718,9 +728,11 @@ export class PatchBuilderV2 {
writes: [...this._writes].sort(),
});
- // 6. Encode patch as CBOR and write as a Git blob
+ // 6. Encode patch as CBOR and write as a Git blob (or encrypted CAS asset)
const patchCbor = this._codec.encode(patch);
- const patchBlobOid = await this._persistence.writeBlob(patchCbor);
+ const patchBlobOid = this._patchBlobStorage
+ ? await this._patchBlobStorage.store(patchCbor, { slug: `${this._graphName}/${this._writerId}/patch` })
+ : await this._persistence.writeBlob(patchCbor);
// 7. Create tree with the patch blob + any content blobs (deduplicated)
// Format for mktree: "mode type oid\tpath"
@@ -738,6 +750,10 @@ export class PatchBuilderV2 {
lamport,
patchOid: patchBlobOid,
schema,
+ // "encrypted" is a legacy wire name meaning "patch blob stored externally
+ // via patchBlobStorage" (see ADR-0002). The flag tells readers to retrieve
+ // the blob via BlobStoragePort instead of reading it directly from Git.
+ encrypted: !!this._patchBlobStorage,
});
const parents = parentCommit ? [parentCommit] : [];
const newCommitSha = await this._persistence.commitNodeWithTree({
diff --git a/src/domain/services/PatchMessageCodec.js b/src/domain/services/PatchMessageCodec.js
index 4afa8c43..6718deaa 100644
--- a/src/domain/services/PatchMessageCodec.js
+++ b/src/domain/services/PatchMessageCodec.js
@@ -30,7 +30,7 @@ import {
/**
* Encodes a patch commit message.
*
- * @param {{ graph: string, writer: string, lamport: number, patchOid: string, schema?: number }} options - The patch message options
+ * @param {{ graph: string, writer: string, lamport: number, patchOid: string, schema?: number, encrypted?: boolean }} options - The patch message options
* @returns {string} The encoded commit message
* @throws {Error} If any validation fails
*
@@ -42,7 +42,7 @@ import {
* patchOid: 'abc123...' // 40-char hex
* });
*/
-export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = 2 }) {
+export function encodePatchMessage({ graph, writer, lamport, patchOid, schema = 2, encrypted = false }) {
// Validate inputs
validateGraphName(graph);
validateWriterId(writer);
@@ -51,16 +51,21 @@ export function encodePatchMessage({ graph, writer, lamport, patchOid, schema =
validateSchema(schema);
const codec = getCodec();
+ /** @type {Record} */
+ const trailers = {
+ [TRAILER_KEYS.kind]: 'patch',
+ [TRAILER_KEYS.graph]: graph,
+ [TRAILER_KEYS.writer]: writer,
+ [TRAILER_KEYS.lamport]: String(lamport),
+ [TRAILER_KEYS.patchOid]: patchOid,
+ [TRAILER_KEYS.schema]: String(schema),
+ };
+ if (encrypted) {
+ trailers[TRAILER_KEYS.encrypted] = 'true';
+ }
return codec.encode({
title: MESSAGE_TITLES.patch,
- trailers: {
- [TRAILER_KEYS.kind]: 'patch',
- [TRAILER_KEYS.graph]: graph,
- [TRAILER_KEYS.writer]: writer,
- [TRAILER_KEYS.lamport]: String(lamport),
- [TRAILER_KEYS.patchOid]: patchOid,
- [TRAILER_KEYS.schema]: String(schema),
- },
+ trailers,
});
}
@@ -72,7 +77,7 @@ export function encodePatchMessage({ graph, writer, lamport, patchOid, schema =
* Decodes a patch commit message.
*
* @param {string} message - The raw commit message
- * @returns {{ kind: 'patch', graph: string, writer: string, lamport: number, patchOid: string, schema: number }} The decoded patch message
+ * @returns {{ kind: 'patch', graph: string, writer: string, lamport: number, patchOid: string, schema: number, encrypted: boolean }} The decoded patch message
* @throws {Error} If the message is not a valid patch message
*
* @example
@@ -93,6 +98,8 @@ export function decodePatchMessage(message) {
validateOid(patchOid, 'patchOid');
const schema = parsePositiveIntTrailer(trailers, 'schema', 'patch');
+ const encrypted = trailers[TRAILER_KEYS.encrypted] === 'true';
+
return {
kind: 'patch',
graph,
@@ -100,5 +107,6 @@ export function decodePatchMessage(message) {
lamport,
patchOid,
schema,
+ encrypted,
};
}
diff --git a/src/domain/services/PropertyIndexReader.js b/src/domain/services/PropertyIndexReader.js
index cc89c976..e95bfc46 100644
--- a/src/domain/services/PropertyIndexReader.js
+++ b/src/domain/services/PropertyIndexReader.js
@@ -85,7 +85,7 @@ export default class PropertyIndexReader {
return null;
}
- const buffer = await /** @type {{ readBlob(oid: string): Promise }} */ (this._storage).readBlob(oid);
+ const buffer = await /** @type {{ readBlob(oid: string): Promise }} */ (this._storage).readBlob(oid);
if (buffer === null || buffer === undefined) {
throw new Error(`PropertyIndexReader: missing blob for OID '${oid}' (${path})`);
}
diff --git a/src/domain/services/StreamingBitmapIndexBuilder.js b/src/domain/services/StreamingBitmapIndexBuilder.js
index 07322a55..6deca61d 100644
--- a/src/domain/services/StreamingBitmapIndexBuilder.js
+++ b/src/domain/services/StreamingBitmapIndexBuilder.js
@@ -8,6 +8,7 @@ import { checkAborted } from '../utils/cancellation.js';
import { getRoaringBitmap32 } from '../utils/roaring.js';
import { canonicalStringify } from '../utils/canonicalStringify.js';
import { SHARD_VERSION } from '../utils/shardVersion.js';
+import { textEncode, base64Encode, base64Decode } from '../utils/bytes.js';
/** @typedef {import('../types/WarpPersistence.js').IndexStorage} IndexStorage */
@@ -185,7 +186,7 @@ export default class StreamingBitmapIndexBuilder {
if (!bitmapShards[type][prefix]) {
bitmapShards[type][prefix] = {};
}
- bitmapShards[type][prefix][sha] = Buffer.from(bitmap.serialize(true)).toString('base64');
+ bitmapShards[type][prefix][sha] = base64Encode(new Uint8Array(bitmap.serialize(true)));
}
return bitmapShards;
}
@@ -216,7 +217,7 @@ export default class StreamingBitmapIndexBuilder {
checksum,
data: shardData,
};
- const buffer = Buffer.from(JSON.stringify(envelope));
+ const buffer = textEncode(JSON.stringify(envelope));
const oid = await this.storage.writeBlob(buffer);
if (!this.flushedChunks.has(path)) {
this.flushedChunks.set(path, []);
@@ -326,7 +327,7 @@ export default class StreamingBitmapIndexBuilder {
checksum: await computeChecksum(map, this._crypto),
data: map,
};
- const buffer = Buffer.from(JSON.stringify(envelope));
+ const buffer = textEncode(JSON.stringify(envelope));
const oid = await this.storage.writeBlob(buffer);
return `100644 blob ${oid}\t${path}`;
})
@@ -418,9 +419,9 @@ export default class StreamingBitmapIndexBuilder {
sorted[key] = frontier.get(key);
}
const envelope = { version: 1, writerCount: frontier.size, frontier: sorted };
- const cborOid = await this.storage.writeBlob(Buffer.from(this._codec.encode(envelope)));
+ const cborOid = await this.storage.writeBlob(this._codec.encode(envelope));
flatEntries.push(`100644 blob ${cborOid}\tfrontier.cbor`);
- const jsonOid = await this.storage.writeBlob(Buffer.from(canonicalStringify(envelope)));
+ const jsonOid = await this.storage.writeBlob(textEncode(canonicalStringify(envelope)));
flatEntries.push(`100644 blob ${jsonOid}\tfrontier.json`);
}
@@ -589,7 +590,7 @@ export default class StreamingBitmapIndexBuilder {
_mergeDeserializedBitmap({ merged, sha, base64Bitmap, oid }) {
let bitmap;
try {
- bitmap = this._RoaringBitmap32.deserialize(Buffer.from(base64Bitmap, 'base64'), true);
+ bitmap = this._RoaringBitmap32.deserialize(base64Decode(base64Bitmap), true);
} catch (err) {
throw new ShardCorruptionError('Failed to deserialize bitmap', {
oid,
@@ -652,7 +653,7 @@ export default class StreamingBitmapIndexBuilder {
/** @type {Record} */
const result = {};
for (const [sha, bitmap] of Object.entries(merged)) {
- result[sha] = Buffer.from(bitmap.serialize(true)).toString('base64');
+ result[sha] = base64Encode(new Uint8Array(bitmap.serialize(true)));
}
// Wrap merged result in envelope with version and checksum
@@ -664,7 +665,7 @@ export default class StreamingBitmapIndexBuilder {
let serialized;
try {
- serialized = Buffer.from(JSON.stringify(mergedEnvelope));
+ serialized = textEncode(JSON.stringify(mergedEnvelope));
} catch (err) {
throw new ShardCorruptionError('Failed to serialize merged shard', {
reason: 'serialization_error',
diff --git a/src/domain/services/SyncAuthService.js b/src/domain/services/SyncAuthService.js
index 865469b6..ebbf484c 100644
--- a/src/domain/services/SyncAuthService.js
+++ b/src/domain/services/SyncAuthService.js
@@ -13,6 +13,7 @@ import LRUCache from '../utils/LRUCache.js';
import defaultCrypto from '../utils/defaultCrypto.js';
import nullLogger from '../utils/nullLogger.js';
import { validateWriterId } from '../utils/RefLayout.js';
+import { hexEncode, hexDecode } from '../utils/bytes.js';
const SIG_VERSION = '1';
const SIG_PREFIX = 'warp-v1';
@@ -48,7 +49,7 @@ export function buildCanonicalPayload({ keyId, method, path, timestamp, nonce, c
/**
* Signs an outgoing sync request.
*
- * @param {{ method: string, path: string, contentType: string, body: Buffer|Uint8Array, secret: string, keyId: string }} params
+ * @param {{ method: string, path: string, contentType: string, body: Uint8Array, secret: string, keyId: string }} params
* @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps]
* @returns {Promise>} Auth headers
*/
@@ -71,7 +72,7 @@ export async function signSyncRequest({ method, path, contentType, body, secret,
});
const hmacBuf = await c.hmac(HMAC_ALGO, secret, canonical);
- const signature = Buffer.from(hmacBuf).toString('hex');
+ const signature = hexEncode(hmacBuf);
return {
'x-warp-sig-version': SIG_VERSION,
@@ -267,7 +268,7 @@ export default class SyncAuthService {
/**
* Verifies the HMAC signature against the canonical payload.
*
- * @param {{ request: { method: string, url: string, headers: Record, body?: Buffer|Uint8Array }, secret: string, keyId: string, timestamp: string, nonce: string }} params
+ * @param {{ request: { method: string, url: string, headers: Record, body?: Uint8Array }, secret: string, keyId: string, timestamp: string, nonce: string }} params
* @returns {Promise<{ ok: false, reason: string, status: number } | { ok: true }>}
* @private
*/
@@ -290,9 +291,10 @@ export default class SyncAuthService {
const expectedBuf = await this._crypto.hmac(HMAC_ALGO, secret, canonical);
const receivedHex = request.headers['x-warp-signature'];
+ /** @type {Uint8Array} */
let receivedBuf;
try {
- receivedBuf = Buffer.from(receivedHex, 'hex');
+ receivedBuf = hexDecode(receivedHex);
} catch {
return fail('INVALID_SIGNATURE', 401);
}
@@ -304,7 +306,7 @@ export default class SyncAuthService {
let equal;
try {
equal = this._crypto.timingSafeEqual(
- Buffer.from(expectedBuf),
+ expectedBuf,
receivedBuf,
);
} catch {
@@ -321,7 +323,7 @@ export default class SyncAuthService {
/**
* Verifies an incoming sync request.
*
- * @param {{ method: string, url: string, headers: Record, body?: Buffer|Uint8Array }} request
+ * @param {{ method: string, url: string, headers: Record, body?: Uint8Array }} request
* @returns {Promise<{ ok: true } | { ok: false, reason: string, status: number }>}
*/
async verify(request) {
diff --git a/src/domain/services/SyncController.js b/src/domain/services/SyncController.js
index 789f4034..1072816e 100644
--- a/src/domain/services/SyncController.js
+++ b/src/domain/services/SyncController.js
@@ -48,6 +48,7 @@ import SyncTrustGate from './SyncTrustGate.js';
* @property {import('../../ports/CodecPort.js').default} _codec
* @property {import('../../ports/CryptoPort.js').default} _crypto
* @property {import('../../ports/LoggerPort.js').default|null} _logger
+ * @property {import('../../ports/BlobStoragePort.js').default|null} [_patchBlobStorage]
* @property {number} _patchesSinceCheckpoint
* @property {(op: string, t0: number, opts?: {metrics?: string, error?: Error}) => void} _logTiming
* @property {(options?: Record) => Promise} materialize
@@ -270,7 +271,7 @@ export default class SyncController {
localFrontier,
persistence,
this._host._graphName,
- { codec: this._host._codec, logger: this._host._logger || undefined }
+ { codec: this._host._codec, logger: this._host._logger || undefined, patchBlobStorage: this._host._patchBlobStorage || undefined }
);
}
diff --git a/src/domain/services/SyncProtocol.js b/src/domain/services/SyncProtocol.js
index 945563b6..e89af441 100644
--- a/src/domain/services/SyncProtocol.js
+++ b/src/domain/services/SyncProtocol.js
@@ -41,6 +41,8 @@ import nullLogger from '../utils/nullLogger.js';
import { decodePatchMessage, assertOpsCompatible, SCHEMA_V3 } from './WarpMessageCodec.js';
import { join, cloneStateV5, isKnownRawOp } from './JoinReducer.js';
import SchemaUnsupportedError from '../errors/SchemaUnsupportedError.js';
+import EncryptionError from '../errors/EncryptionError.js';
+import PersistenceError from '../errors/PersistenceError.js';
import { cloneFrontier, updateFrontier } from './Frontier.js';
import { vvDeserialize } from '../crdt/VersionVector.js';
@@ -125,7 +127,7 @@ function objectToFrontier(obj) {
* @param {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default} persistence - Git persistence layer
* (uses CommitPort.showNode() + BlobPort.readBlob() methods)
* @param {string} sha - The 40-character commit SHA to load the patch from
- * @param {{ codec?: import('../../ports/CodecPort.js').default }} [options]
+ * @param {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options]
* @returns {Promise} The decoded and normalized patch object containing:
* - `ops`: Array of patch operations
* - `context`: VersionVector (Map) of causal dependencies
@@ -135,16 +137,35 @@ function objectToFrontier(obj) {
* @throws {Error} If the commit message cannot be decoded (malformed, wrong schema)
* @throws {Error} If the patch blob cannot be read (blob not found, I/O error)
* @throws {Error} If the patch blob cannot be CBOR-decoded (corrupted data)
+ * @throws {EncryptionError} If the patch is encrypted but no patchBlobStorage is provided
* @private
*/
-async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @type {{ codec?: import('../../ports/CodecPort.js').default }} */ ({})) {
+async function loadPatchFromCommit(persistence, sha, { codec: codecOpt, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) {
const codec = codecOpt || defaultCodec;
// Read commit message to extract patch OID
const message = await persistence.showNode(sha);
const decoded = decodePatchMessage(message);
- // Read and decode the patch blob
- const patchBuffer = await persistence.readBlob(decoded.patchOid);
+ // Read the patch blob (encrypted or plain)
+ /** @type {Uint8Array} */
+ let patchBuffer;
+ if (decoded.encrypted) {
+ if (!patchBlobStorage) {
+ throw new EncryptionError(
+ 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key',
+ );
+ }
+ patchBuffer = await patchBlobStorage.retrieve(decoded.patchOid);
+ } else {
+ patchBuffer = await persistence.readBlob(decoded.patchOid);
+ }
+ if (!patchBuffer) {
+ throw new PersistenceError(
+ `Patch blob not found: ${decoded.patchOid}`,
+ PersistenceError.E_MISSING_OBJECT,
+ { context: { oid: decoded.patchOid } },
+ );
+ }
const patch = /** @type {DecodedPatch} */ (codec.decode(patchBuffer));
// Normalize the patch (convert context from object to Map)
@@ -172,7 +193,7 @@ async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @
* @param {string|null} fromSha - Start SHA (exclusive). Pass null to load ALL patches
* for this writer from the beginning of their chain.
* @param {string} toSha - End SHA (inclusive). This is typically the writer's current tip.
- * @param {{ codec?: import('../../ports/CodecPort.js').default }} [options]
+ * @param {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options]
* @returns {Promise>} Array of patch objects in
* chronological order (oldest first). Each entry contains:
* - `patch`: The decoded patch object
@@ -191,7 +212,7 @@ async function loadPatchFromCommit(persistence, sha, { codec: codecOpt } = /** @
* // Load ALL patches for a new writer
* const patches = await loadPatchRange(persistence, 'events', 'new-writer', null, tipSha);
*/
-export async function loadPatchRange(persistence, graphName, writerId, fromSha, toSha, { codec } = /** @type {{ codec?: import('../../ports/CodecPort.js').default }} */ ({})) {
+export async function loadPatchRange(persistence, graphName, writerId, fromSha, toSha, { codec, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) {
const patches = [];
let cur = toSha;
@@ -200,7 +221,7 @@ export async function loadPatchRange(persistence, graphName, writerId, fromSha,
const commitInfo = await persistence.getNodeInfo(cur);
// Load patch from commit
- const patch = await loadPatchFromCommit(persistence, cur, { codec });
+ const patch = await loadPatchFromCommit(persistence, cur, { codec, patchBlobStorage });
patches.unshift({ patch, sha: cur }); // Prepend for chronological order
// Move to parent (first parent in linear chain)
@@ -394,7 +415,7 @@ export function createSyncRequest(frontier) {
* @param {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default} persistence - Git persistence
* layer for loading patches (uses CommitPort + BlobPort methods)
* @param {string} graphName - Graph name for error messages and logging
- * @param {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} [options]
+ * @param {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} [options]
* @returns {Promise} Response containing local frontier and patches.
* Patches are ordered chronologically within each writer.
* @throws {Error} If patch loading fails for reasons other than divergence
@@ -408,7 +429,7 @@ export function createSyncRequest(frontier) {
* res.json(response);
* });
*/
-export async function processSyncRequest(request, localFrontier, persistence, graphName, { codec, logger } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} */ ({})) {
+export async function processSyncRequest(request, localFrontier, persistence, graphName, { codec, logger, patchBlobStorage } = /** @type {{ codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} */ ({})) {
const log = logger || nullLogger;
const remoteFrontier = objectToFrontier(request.frontier);
@@ -452,7 +473,7 @@ export async function processSyncRequest(request, localFrontier, persistence, gr
writerId,
range.from,
range.to,
- { codec }
+ { codec, patchBlobStorage }
);
for (const { patch, sha } of writerPatches) {
diff --git a/src/domain/services/WarpServeService.js b/src/domain/services/WarpServeService.js
new file mode 100644
index 00000000..217eb2c9
--- /dev/null
+++ b/src/domain/services/WarpServeService.js
@@ -0,0 +1,647 @@
+/**
+ * WarpServeService — domain service that bridges WarpGraph instances
+ * to browser clients over a WebSocketServerPort.
+ *
+ * Responsibilities:
+ * - Accepts WebSocket connections and manages client lifecycle
+ * - Sends materialized state on `open` requests
+ * - Forwards graph diffs to subscribed clients in real-time
+ * - Applies mutations (addNode, removeNode, addEdge, etc.) from clients
+ * - Supports time-travel (seek) and node inspection
+ *
+ * @module domain/services/WarpServeService
+ */
+
+import { orsetElements } from '../crdt/ORSet.js';
+import { lwwValue } from '../crdt/LWW.js';
+import { decodePropKey, isEdgePropKey, decodeEdgeKey } from './KeyCodec.js';
+
+const PROTOCOL_VERSION = 1;
+
+/**
+ * Allowlist of PatchBuilderV2 methods that WebSocket clients may invoke.
+ * Prevents arbitrary method calls via untrusted `op` strings.
+ * @type {Set}
+ */
+const ALLOWED_MUTATE_OPS = new Set([
+ 'addNode',
+ 'removeNode',
+ 'addEdge',
+ 'removeEdge',
+ 'setProperty',
+ 'setEdgeProperty',
+ 'attachContent',
+ 'attachEdgeContent',
+]);
+
+/**
+ * Expected argument signatures for each mutation op.
+ * Each entry specifies the count and types of required arguments.
+ * @type {Record}
+ */
+const MUTATE_OP_SIGNATURES = {
+ addNode: ['string'],
+ removeNode: ['string'],
+ addEdge: ['string', 'string', 'string'],
+ removeEdge: ['string', 'string', 'string'],
+ setProperty: ['string', 'string', '*'],
+ setEdgeProperty: ['string', 'string', 'string', 'string', '*'],
+ // Binary content (Uint8Array) cannot survive JSON serialisation — these
+ // require string input over the WebSocket JSON protocol. A future binary
+ // protocol could lift this limitation.
+ attachContent: ['string', 'string'],
+ attachEdgeContent: ['string', 'string', 'string', 'string'],
+};
+
+/** Maximum serialized size for wildcard property values (64 KiB). */
+const MAX_WILDCARD_SIZE = 65_536;
+
+/**
+ * Validates a single wildcard-typed argument against size limits.
+ *
+ * @param {string} op
+ * @param {number} i
+ * @param {unknown} arg
+ * @returns {string|null}
+ */
+function validateWildcardArg(op, i, arg) {
+ if (typeof arg === 'string' && arg.length > MAX_WILDCARD_SIZE) {
+ return `${op}: arg[${i}] exceeds 64 KiB string limit`;
+ }
+ if (typeof arg === 'object' && arg !== null && JSON.stringify(arg).length > MAX_WILDCARD_SIZE) {
+ return `${op}: arg[${i}] exceeds 64 KiB serialized limit`;
+ }
+ return null;
+}
+
+/**
+ * Validates that args match the expected signature for an op.
+ *
+ * @param {string} op
+ * @param {unknown[]} args
+ * @returns {string|null} Error message if invalid, null if valid
+ */
+function validateMutateArgs(op, args) {
+ const sig = MUTATE_OP_SIGNATURES[op];
+ if (!sig) {
+ return `Unknown op: ${op}`;
+ }
+ if (!Array.isArray(args)) {
+ return `${op}: args must be an array`;
+ }
+ if (args.length !== sig.length) {
+ return `${op}: expected ${sig.length} args, got ${args.length}`;
+ }
+ for (let i = 0; i < sig.length; i++) {
+ if (sig[i] === '*') {
+ const err = validateWildcardArg(op, i, args[i]);
+ if (err) { return err; }
+ } else if (typeof args[i] !== sig[i]) {
+ return `${op}: arg[${i}] must be ${sig[i]}, got ${typeof args[i]}`;
+ }
+ }
+ return null;
+}
+
+/**
+ * @typedef {import('../../ports/WebSocketServerPort.js').WsConnection} WsConnection
+ * @typedef {import('../../ports/WebSocketServerPort.js').WsServerHandle} WsServerHandle
+ */
+
+/**
+ * @typedef {Object} ClientSession
+ * @property {WsConnection} conn
+ * @property {Set} openGraphs - Graph names this client has opened
+ */
+
+/**
+ * Minimal duck-typed shape of a WarpGraph instance as consumed by
+ * WarpServeService. Uses the `import()` type directly so tsc can
+ * structurally match WarpGraph without re-declaring its overloaded
+ * signatures.
+ *
+ * @typedef {import('../WarpGraph.js').default} GraphHandle
+ */
+
+/**
+ * Envelope shape for all protocol messages.
+ * @typedef {Object} Envelope
+ * @property {number} v - Protocol version
+ * @property {string} type - Message type
+ * @property {string} [id] - Request correlation ID
+ * @property {unknown} payload - Message-specific data
+ */
+
+/**
+ * Serializes materialized state into a plain object suitable for JSON.
+ *
+ * @param {string} graphName
+ * @param {import('./JoinReducer.js').WarpStateV5} state
+ * @returns {{ graph: string, nodes: Array<{ id: string, props: Record }>, edges: Array<{ from: string, to: string, label: string }>, frontier: Record }}
+ */
+function serializeState(graphName, state) {
+ // Build node-to-props index to avoid O(nodes × props) scan
+ /** @type {Map>} */
+ const nodePropsMap = new Map();
+ for (const [key, reg] of state.prop) {
+ // Edge properties are intentionally omitted in the MVP wire format.
+ // Edges are serialized as {from, to, label} only. A future protocol
+ // version should include edge props alongside node props.
+ // TODO: serialize edge properties when protocol supports them
+ if (isEdgePropKey(key)) { continue; }
+ const decoded = decodePropKey(key);
+ let props = nodePropsMap.get(decoded.nodeId);
+ if (!props) {
+ props = {};
+ nodePropsMap.set(decoded.nodeId, props);
+ }
+ props[decoded.propKey] = lwwValue(reg);
+ }
+
+ const nodes = [];
+ for (const nodeId of orsetElements(state.nodeAlive)) {
+ nodes.push({ id: nodeId, props: nodePropsMap.get(nodeId) || {} });
+ }
+
+ const edges = [];
+ for (const edgeKey of orsetElements(state.edgeAlive)) {
+ const decoded = decodeEdgeKey(edgeKey);
+ edges.push({ from: decoded.from, to: decoded.to, label: decoded.label });
+ }
+
+ /** @type {Record} */
+ const frontier = {};
+ if (state.observedFrontier) {
+ for (const [writer, ts] of state.observedFrontier) {
+ frontier[writer] = ts;
+ }
+ }
+
+ return { graph: graphName, nodes, edges, frontier };
+}
+
+/**
+ * Builds a protocol envelope.
+ *
+ * @param {string} type
+ * @param {unknown} payload
+ * @param {string} [id]
+ * @returns {string}
+ */
+function envelope(type, payload, id) {
+ /** @type {Envelope} */
+ const msg = { v: PROTOCOL_VERSION, type, payload };
+ if (id !== undefined) { msg.id = id; }
+ return JSON.stringify(msg);
+}
+
+/**
+ * Builds an error envelope.
+ *
+ * @param {string} code
+ * @param {string} message
+ * @param {string} [id]
+ * @returns {string}
+ */
+function errorEnvelope(code, message, id) {
+ return envelope('error', { code, message }, id);
+}
+
+/**
+ * Validates payload graph name and resolves the graph object.
+ * Sends an error envelope and returns null on failure.
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @param {{ graphs: Map, requireOpen?: boolean }} opts
+ * @returns {{ graphName: string, graph: GraphHandle }|null}
+ */
+function resolveGraph(session, msg, { graphs, requireOpen = true }) {
+ const { payload } = msg;
+ const graphName = /** @type {Record} */ (payload)?.graph;
+
+ if (typeof graphName !== 'string' || graphName.length === 0) {
+ session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', `${msg.type}: graph must be a non-empty string`, msg.id));
+ return null;
+ }
+ if (requireOpen && !session.openGraphs.has(graphName)) {
+ session.conn.send(errorEnvelope('E_NOT_OPENED', `Graph not opened: ${graphName}`, msg.id));
+ return null;
+ }
+ const graph = graphs.get(graphName);
+ if (!graph) {
+ session.conn.send(errorEnvelope('E_UNKNOWN_GRAPH', `Unknown graph: ${graphName}`, msg.id));
+ return null;
+ }
+ return { graphName, graph };
+}
+
+export default class WarpServeService {
+ /**
+ * @param {{ wsPort: import('../../ports/WebSocketServerPort.js').default, graphs: GraphHandle[] }} options
+ */
+ constructor({ wsPort, graphs }) {
+ if (!wsPort || typeof wsPort.createServer !== 'function') {
+ throw new Error('wsPort must be a WebSocketServerPort');
+ }
+ if (!Array.isArray(graphs) || graphs.length === 0) {
+ throw new Error('At least one graph is required');
+ }
+
+ /** @type {import('../../ports/WebSocketServerPort.js').default} */
+ this._wsPort = wsPort;
+
+ /** @type {Map} */
+ this._graphs = new Map();
+ for (const g of graphs) {
+ this._graphs.set(g.graphName, g);
+ }
+
+ /** @type {Set} */
+ this._clients = new Set();
+
+ /** @type {Map void }>} */
+ this._subscriptions = new Map();
+
+ /** @type {WsServerHandle|null} */
+ this._server = null;
+ }
+
+ /**
+ * Start listening for WebSocket connections.
+ *
+ * @param {number} port
+ * @param {string} [host]
+ * @returns {Promise<{ port: number, host: string }>}
+ */
+ async listen(port, host) {
+ if (this._server) {
+ throw new Error('Server is already listening');
+ }
+
+ const server = this._wsPort.createServer((conn) => this._onConnection(conn));
+
+ // Subscribe to each graph for live diff push.
+ // Subscriptions are created before bind so diffs aren't missed between
+ // bind and subscribe — but we must clean up if bind fails.
+ /** @type {Map void }>} */
+ const subs = new Map();
+ for (const [graphName, graph] of this._graphs) {
+ const sub = graph.subscribe({
+ onChange: (/** @type {unknown} */ diff) => this._broadcastDiff(graphName, diff),
+ });
+ subs.set(graphName, sub);
+ }
+
+ try {
+ const result = await server.listen(port, host);
+ // Bind succeeded — commit state mutations
+ this._server = server;
+ this._subscriptions = subs;
+ return result;
+ } catch (err) {
+ // Bind failed — clean up subscriptions to prevent leaked broadcast handlers
+ for (const [, sub] of subs) {
+ sub.unsubscribe();
+ }
+ throw err;
+ }
+ }
+
+ /**
+ * Shut down the server and clean up subscriptions.
+ *
+ * @returns {Promise}
+ */
+ async close() {
+ for (const [, sub] of this._subscriptions) {
+ sub.unsubscribe();
+ }
+ this._subscriptions.clear();
+
+ for (const client of this._clients) {
+ try {
+ client.conn.close();
+ } catch {
+ // Best-effort — connection may already be dead.
+ }
+ }
+ this._clients.clear();
+
+ if (this._server) {
+ await this._server.close();
+ this._server = null;
+ }
+ }
+
+ /**
+ * Handle a new WebSocket connection.
+ *
+ * @param {WsConnection} conn
+ * @private
+ */
+ _onConnection(conn) {
+ /** @type {ClientSession} */
+ const session = {
+ conn,
+ openGraphs: new Set(),
+ };
+ this._clients.add(session);
+
+ // Send hello
+ conn.send(envelope('hello', {
+ protocol: PROTOCOL_VERSION,
+ graphs: [...this._graphs.keys()],
+ }));
+
+ conn.onMessage((raw) => {
+ // Extract correlation ID before the async call so the catch handler
+ // can correlate the error without re-parsing the raw message.
+ /** @type {string|undefined} */
+ let id;
+ try { id = JSON.parse(raw).id; } catch { /* unparseable — no id */ }
+
+ this._onMessage(session, raw).catch(() => {
+ // Errors are caught and sent as error envelopes inside _onMessage handlers.
+ // This catch prevents unhandled rejection for truly unexpected failures.
+ // Send a generic message to avoid leaking internal details (file paths,
+ // stack traces, etc.) to untrusted WebSocket clients.
+ session.conn.send(errorEnvelope(
+ 'E_INTERNAL',
+ 'Internal error',
+ id,
+ ));
+ });
+ });
+ conn.onClose(() => this._clients.delete(session));
+ }
+
+ /**
+ * Handle an incoming message from a client.
+ *
+ * @param {ClientSession} session
+ * @param {string} raw
+ * @private
+ */
+ async _onMessage(session, raw) {
+ // Approximate check: String.length counts UTF-16 code units, not bytes.
+ // For ASCII-heavy JSON this is close enough; multi-byte characters could
+ // make the actual byte count higher than the code-unit count.
+ if (raw.length > 1_048_576) {
+ session.conn.send(errorEnvelope('E_MESSAGE_TOO_LARGE', 'Message exceeds 1 MiB limit'));
+ return;
+ }
+
+ /** @type {Envelope} */
+ let msg;
+ try {
+ msg = JSON.parse(raw);
+ } catch {
+ session.conn.send(errorEnvelope('E_INVALID_MESSAGE', 'Invalid JSON'));
+ return;
+ }
+
+ if (!msg || typeof msg.type !== 'string') {
+ session.conn.send(errorEnvelope('E_INVALID_MESSAGE', 'Missing type field'));
+ return;
+ }
+
+ if (msg.v !== PROTOCOL_VERSION) {
+ session.conn.send(errorEnvelope(
+ 'E_UNSUPPORTED_VERSION',
+ `Unsupported protocol version: ${msg.v}. Expected: ${PROTOCOL_VERSION}`,
+ msg.id,
+ ));
+ return;
+ }
+
+ switch (msg.type) {
+ case 'open':
+ await this._handleOpen(session, msg);
+ break;
+ case 'mutate':
+ await this._handleMutate(session, msg);
+ break;
+ case 'inspect':
+ await this._handleInspect(session, msg);
+ break;
+ case 'seek':
+ await this._handleSeek(session, msg);
+ break;
+ default:
+ session.conn.send(errorEnvelope(
+ 'E_UNKNOWN_TYPE',
+ `Unknown message type: ${msg.type}`,
+ msg.id,
+ ));
+ }
+ }
+
+ /**
+ * Handle 'open' — client subscribes to a graph.
+ *
+ * `materialize()` is called without `receipts: true`, so the return is
+ * always a plain `WarpStateV5` (not a `MaterializeResult` with receipts).
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @private
+ */
+ async _handleOpen(session, msg) {
+ const resolved = resolveGraph(session, msg, { graphs: this._graphs, requireOpen: false });
+ if (!resolved) { return; }
+ const { graphName, graph } = resolved;
+
+ let state;
+ try {
+ state = await graph.materialize();
+ } catch (err) {
+ session.conn.send(errorEnvelope(
+ 'E_MATERIALIZE_FAILED',
+ err instanceof Error ? err.message : 'Materialization failed',
+ msg.id,
+ ));
+ return;
+ }
+
+ session.openGraphs.add(graphName);
+ const serialized = serializeState(graphName, state);
+ session.conn.send(envelope('state', serialized, msg.id));
+ }
+
+ /**
+ * Handle 'mutate' — client sends graph mutations.
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @private
+ */
+ async _handleMutate(session, msg) {
+ const { payload } = msg;
+ const ops = /** @type {Array<{ op: string, args: unknown[] }>|undefined} */ (
+ /** @type {Record} */ (payload)?.ops
+ );
+
+ if (!Array.isArray(ops)) {
+ session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', 'mutate: ops must be an array', msg.id));
+ return;
+ }
+
+ const resolved = resolveGraph(session, msg, { graphs: this._graphs });
+ if (!resolved) { return; }
+
+ await this._applyMutateOps(session, msg, { graph: resolved.graph, ops });
+ }
+
+ /**
+ * Validate and apply mutation ops for _handleMutate.
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @param {{ graph: GraphHandle, ops: Array<{ op: string, args: unknown[] }> }} ctx
+ * @private
+ */
+ async _applyMutateOps(session, msg, { graph, ops }) {
+ // Pre-validate ALL ops before creating a patch
+ for (const { op, args } of ops) {
+ if (!ALLOWED_MUTATE_OPS.has(op)) {
+ session.conn.send(errorEnvelope('E_INVALID_OP', `Unknown mutation op: ${op}`, msg.id));
+ return;
+ }
+ const argError = validateMutateArgs(op, args);
+ if (argError) {
+ session.conn.send(errorEnvelope('E_INVALID_ARGS', argError, msg.id));
+ return;
+ }
+ }
+
+ try {
+ const patch = await graph.createPatch();
+ for (const { op, args } of ops) {
+ await /** @type {Record Promise>} */ (/** @type {unknown} */ (patch))[op](...args);
+ }
+ const sha = await patch.commit();
+ session.conn.send(envelope('ack', { sha }, msg.id));
+ } catch (err) {
+ session.conn.send(errorEnvelope(
+ 'E_MUTATE_FAILED',
+ err instanceof Error ? err.message : 'Mutation failed',
+ msg.id,
+ ));
+ }
+ }
+
+ /**
+ * Handle 'inspect' — client requests node properties.
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @private
+ */
+ async _handleInspect(session, msg) {
+ const { payload } = msg;
+ const nodeId = /** @type {string} */ (/** @type {Record} */ (payload)?.nodeId);
+
+ if (typeof nodeId !== 'string' || nodeId.length === 0) {
+ session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', 'inspect: nodeId must be a non-empty string', msg.id));
+ return;
+ }
+
+ const resolved = resolveGraph(session, msg, { graphs: this._graphs });
+ if (!resolved) { return; }
+ const { graphName, graph } = resolved;
+
+ try {
+ const props = await graph.getNodeProps(nodeId);
+ session.conn.send(envelope('inspect', { graph: graphName, nodeId, props }, msg.id));
+ } catch (err) {
+ session.conn.send(errorEnvelope(
+ 'E_INSPECT_FAILED',
+ err instanceof Error ? err.message : 'Inspect failed',
+ msg.id,
+ ));
+ }
+ }
+
+ /**
+ * Validates a seek ceiling value. Returns an error message or null.
+ * Infinity is intentionally accepted (treated as "materialize at head").
+ *
+ * @param {unknown} ceiling
+ * @returns {string|null}
+ * @private
+ */
+ _validateCeiling(ceiling) {
+ if (typeof ceiling !== 'number' || ceiling < 0 || Number.isNaN(ceiling)) {
+ return 'seek: ceiling must be a non-negative number';
+ }
+ if (Number.isFinite(ceiling) && !Number.isInteger(ceiling)) {
+ return 'seek: ceiling must be an integer';
+ }
+ return null;
+ }
+
+ /**
+ * Handle 'seek' — client requests time-travel materialization.
+ *
+ * @param {ClientSession} session
+ * @param {Envelope} msg
+ * @private
+ */
+ async _handleSeek(session, msg) {
+ const { payload } = msg;
+ const ceiling = /** @type {number} */ (/** @type {Record} */ (payload)?.ceiling);
+
+ const ceilingError = this._validateCeiling(ceiling);
+ if (ceilingError) {
+ session.conn.send(errorEnvelope('E_INVALID_PAYLOAD', ceilingError, msg.id));
+ return;
+ }
+
+ const resolved = resolveGraph(session, msg, { graphs: this._graphs });
+ if (!resolved) { return; }
+ const { graphName, graph } = resolved;
+
+ try {
+ const opts = Number.isFinite(ceiling) ? { ceiling } : {};
+ const state = await graph.materialize(opts);
+ const serialized = serializeState(graphName, state);
+ session.conn.send(envelope('state', serialized, msg.id));
+ } catch (err) {
+ session.conn.send(errorEnvelope(
+ 'E_SEEK_FAILED',
+ err instanceof Error ? err.message : 'Seek failed',
+ msg.id,
+ ));
+ }
+ }
+
+ /**
+ * Broadcast a diff to all clients subscribed to the given graph.
+ *
+ * @param {string} graphName
+ * @param {unknown} diff
+ * @private
+ */
+ _broadcastDiff(graphName, diff) {
+ const msg = envelope('diff', { graph: graphName, diff });
+ /** @type {ClientSession[]} */
+ const dead = [];
+ for (const client of this._clients) {
+ if (client.openGraphs.has(graphName)) {
+ try {
+ client.conn.send(msg);
+ } catch {
+ // Dead connection — evict after iteration. No logger is
+ // available at this layer; the `onClose` handler also evicts,
+ // but `send()` can throw before `close` fires on a reset
+ // connection. We must not delete from the Set mid-iteration.
+ dead.push(client);
+ }
+ }
+ }
+ for (const client of dead) {
+ this._clients.delete(client);
+ }
+ }
+}
diff --git a/src/domain/services/WarpStateIndexBuilder.js b/src/domain/services/WarpStateIndexBuilder.js
index 3b8cd08d..394032f4 100644
--- a/src/domain/services/WarpStateIndexBuilder.js
+++ b/src/domain/services/WarpStateIndexBuilder.js
@@ -88,7 +88,7 @@ export default class WarpStateIndexBuilder {
/**
* Serializes the index to a tree structure of buffers.
*
- * @returns {Promise>} Map of path → serialized content
+ * @returns {Promise>} Map of path → serialized content
*/
async serialize() {
return await this._builder.serialize();
@@ -109,7 +109,7 @@ export default class WarpStateIndexBuilder {
*
* @param {import('./JoinReducer.js').WarpStateV5} state - The materialized state
* @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [options] - Configuration
- * @returns {Promise<{tree: Record, stats: {nodes: number, edges: number}}>} Serialized index and stats
+ * @returns {Promise<{tree: Record, stats: {nodes: number, edges: number}}>} Serialized index and stats
*
* @example
* import { buildWarpStateIndex } from './WarpStateIndexBuilder.js';
diff --git a/src/domain/services/WormholeService.js b/src/domain/services/WormholeService.js
index 186a162a..bbfc0c61 100644
--- a/src/domain/services/WormholeService.js
+++ b/src/domain/services/WormholeService.js
@@ -23,6 +23,8 @@
import defaultCodec from '../utils/defaultCodec.js';
import ProvenancePayload from './ProvenancePayload.js';
import WormholeError from '../errors/WormholeError.js';
+import EncryptionError from '../errors/EncryptionError.js';
+import PersistenceError from '../errors/PersistenceError.js';
import { detectMessageKind, decodePatchMessage } from './WarpMessageCodec.js';
/**
@@ -61,12 +63,13 @@ async function verifyShaExists(persistence, sha, paramName) {
/**
* Processes a single commit in the wormhole chain.
- * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, sha: string, graphName: string, expectedWriter: string|null, codec?: import('../../ports/CodecPort.js').default }} opts - Options
+ * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, sha: string, graphName: string, expectedWriter: string|null, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} opts - Options
* @returns {Promise<{patch: import('../types/WarpTypesV2.js').PatchV2, sha: string, writerId: string, parentSha: string|null}>}
* @throws {WormholeError} On validation errors
+ * @throws {EncryptionError} If the patch is encrypted but no patchBlobStorage is provided
* @private
*/
-async function processCommit({ persistence, sha, graphName, expectedWriter, codec: codecOpt }) {
+async function processCommit({ persistence, sha, graphName, expectedWriter, codec: codecOpt, patchBlobStorage }) {
const codec = codecOpt || defaultCodec;
const nodeInfo = await persistence.getNodeInfo(sha);
const { message, parents } = nodeInfo;
@@ -95,7 +98,25 @@ async function processCommit({ persistence, sha, graphName, expectedWriter, code
});
}
- const patchBuffer = await persistence.readBlob(patchMeta.patchOid);
+ /** @type {Uint8Array} */
+ let patchBuffer;
+ if (patchMeta.encrypted) {
+ if (!patchBlobStorage) {
+ throw new EncryptionError(
+ 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key',
+ );
+ }
+ patchBuffer = await patchBlobStorage.retrieve(patchMeta.patchOid);
+ } else {
+ patchBuffer = await persistence.readBlob(patchMeta.patchOid);
+ }
+ if (!patchBuffer) {
+ throw new PersistenceError(
+ `Patch blob not found: ${patchMeta.patchOid}`,
+ PersistenceError.E_MISSING_OBJECT,
+ { context: { oid: patchMeta.patchOid } },
+ );
+ }
const patch = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (codec.decode(patchBuffer));
return {
@@ -130,20 +151,21 @@ async function processCommit({ persistence, sha, graphName, expectedWriter, code
* must be an ancestor of `toSha` in the writer's patch chain. Both endpoints
* are inclusive in the wormhole.
*
- * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default }} options - Wormhole creation options
+ * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options - Wormhole creation options
* @returns {Promise} The created wormhole
* @throws {WormholeError} If fromSha or toSha doesn't exist (E_WORMHOLE_SHA_NOT_FOUND)
* @throws {WormholeError} If fromSha is not an ancestor of toSha (E_WORMHOLE_INVALID_RANGE)
* @throws {WormholeError} If commits span multiple writers (E_WORMHOLE_MULTI_WRITER)
* @throws {WormholeError} If a commit is not a patch commit (E_WORMHOLE_NOT_PATCH)
+ * @throws {EncryptionError} If patches are encrypted but no patchBlobStorage is provided
*/
-export async function createWormhole({ persistence, graphName, fromSha, toSha, codec }) {
+export async function createWormhole({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage }) {
validateSha(fromSha, 'fromSha');
validateSha(toSha, 'toSha');
await verifyShaExists(persistence, fromSha, 'fromSha');
await verifyShaExists(persistence, toSha, 'toSha');
- const patches = await collectPatchRange({ persistence, graphName, fromSha, toSha, codec });
+ const patches = await collectPatchRange({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage });
// Reverse to get oldest-first order (as required by ProvenancePayload)
patches.reverse();
@@ -161,18 +183,18 @@ export async function createWormhole({ persistence, graphName, fromSha, toSha, c
* Walks the parent chain from toSha towards fromSha, collecting and
* validating each commit along the way.
*
- * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default }} options
+ * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default, graphName: string, fromSha: string, toSha: string, codec?: import('../../ports/CodecPort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options
* @returns {Promise>} Patches in newest-first order
* @throws {WormholeError} If fromSha is not an ancestor of toSha or range is empty
* @private
*/
-async function collectPatchRange({ persistence, graphName, fromSha, toSha, codec }) {
+async function collectPatchRange({ persistence, graphName, fromSha, toSha, codec, patchBlobStorage }) {
const patches = [];
let currentSha = toSha;
let writerId = null;
while (currentSha) {
- const result = await processCommit({ persistence, sha: currentSha, graphName, expectedWriter: writerId, codec });
+ const result = await processCommit({ persistence, sha: currentSha, graphName, expectedWriter: writerId, codec, patchBlobStorage });
writerId = result.writerId;
patches.push({ patch: result.patch, sha: result.sha, writerId: result.writerId });
diff --git a/src/domain/trust/TrustCanonical.js b/src/domain/trust/TrustCanonical.js
index 10433f6d..b5072c43 100644
--- a/src/domain/trust/TrustCanonical.js
+++ b/src/domain/trust/TrustCanonical.js
@@ -8,35 +8,39 @@
* @see docs/specs/TRUST_V1_CRYPTO.md
*/
-import { createHash } from 'node:crypto';
import { recordIdPayload, signaturePayload } from './canonical.js';
+import defaultCrypto from '../utils/defaultCrypto.js';
+import { textEncode } from '../utils/bytes.js';
/**
* Computes the record ID (SHA-256 hex digest) for a trust record.
*
* @param {Record} record - Full trust record
- * @returns {string} 64-character lowercase hex string
+ * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps] - Optional dependency injection
+ * @returns {Promise} 64-character lowercase hex string
*/
-export function computeRecordId(record) {
- return createHash('sha256').update(recordIdPayload(record)).digest('hex');
+export async function computeRecordId(record, { crypto } = {}) {
+ const c = crypto || defaultCrypto;
+ return await c.hash('sha256', recordIdPayload(record));
}
/**
- * Computes the signature payload as a Buffer (UTF-8 bytes).
+ * Computes the signature payload as UTF-8 bytes.
*
* @param {Record} record - Full trust record (signature will be stripped)
- * @returns {Buffer} UTF-8 encoded bytes of the domain-separated canonical string
+ * @returns {Uint8Array} UTF-8 encoded bytes of the domain-separated canonical string
*/
export function computeSignaturePayload(record) {
- return Buffer.from(signaturePayload(record), 'utf8');
+ return textEncode(signaturePayload(record));
}
/**
* Verifies that a record's recordId matches its content.
*
* @param {Record} record - Trust record with `recordId` field
- * @returns {boolean} true if recordId matches computed value
+ * @param {{ crypto?: import('../../ports/CryptoPort.js').default }} [deps] - Optional dependency injection
+ * @returns {Promise} true if recordId matches computed value
*/
-export function verifyRecordId(record) {
- return record.recordId === computeRecordId(record);
+export async function verifyRecordId(record, { crypto } = {}) {
+ return record.recordId === await computeRecordId(record, { crypto });
}
diff --git a/src/domain/trust/TrustRecordService.js b/src/domain/trust/TrustRecordService.js
index cb62a23f..7bde1316 100644
--- a/src/domain/trust/TrustRecordService.js
+++ b/src/domain/trust/TrustRecordService.js
@@ -71,7 +71,7 @@ export class TrustRecordService {
}
// 2. RecordId integrity
- if (!verifyRecordId(record)) {
+ if (!await verifyRecordId(record)) {
throw new TrustError(
'Trust record recordId does not match content',
{ code: 'E_TRUST_RECORD_ID_MISMATCH' },
@@ -175,9 +175,9 @@ export class TrustRecordService {
* - First record has prev=null
*
* @param {Array>} records - Records in chain order (oldest first)
- * @returns {{valid: boolean, errors: Array<{index: number, error: string}>}}
+ * @returns {Promise<{valid: boolean, errors: Array<{index: number, error: string}>}>}
*/
- verifyChain(records) {
+ async verifyChain(records) {
/** @type {Array<{index: number, error: string}>} */
const errors = [];
const seenIds = new Set();
@@ -193,7 +193,7 @@ export class TrustRecordService {
}
// RecordId integrity
- if (!verifyRecordId(record)) {
+ if (!await verifyRecordId(record)) {
errors.push({ index: i, error: 'RecordId does not match content' });
}
@@ -343,8 +343,7 @@ export class TrustRecordService {
async _persistRecord(ref, record, parentSha) {
// Encode record as CBOR blob
const encoded = this._codec.encode(record);
- // Buffer.from() ensures Uint8Array from codec is accepted by writeBlob
- const blobOid = await this._persistence.writeBlob(Buffer.from(encoded));
+ const blobOid = await this._persistence.writeBlob(encoded);
// Create tree with single entry (mktree format)
const treeOid = await this._persistence.writeTree([`100644 blob ${blobOid}\trecord.cbor`]);
diff --git a/src/domain/utils/bytes.js b/src/domain/utils/bytes.js
new file mode 100644
index 00000000..eb2411be
--- /dev/null
+++ b/src/domain/utils/bytes.js
@@ -0,0 +1,202 @@
+/**
+ * Pure byte-manipulation utilities for the domain layer.
+ *
+ * These functions replace Node.js Buffer methods with portable
+ * Uint8Array-based equivalents that work identically on Node,
+ * Bun, Deno, and browsers.
+ *
+ * @module domain/utils/bytes
+ */
+
+const _encoder = new TextEncoder();
+const _decoder = new TextDecoder();
+
+/** @type {readonly string[]} */
+const HEX_TABLE = Object.freeze(
+ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0'))
+);
+
+/**
+ * Encodes a Uint8Array to a lowercase hex string.
+ *
+ * @param {Uint8Array} bytes
+ * @returns {string} Lowercase hex string
+ */
+export function hexEncode(bytes) {
+ let hex = '';
+ for (let i = 0; i < bytes.length; i++) {
+ hex += HEX_TABLE[bytes[i]];
+ }
+ return hex;
+}
+
+/**
+ * Returns the numeric value of a hex character code, or -1 if invalid.
+ *
+ * @param {number} cc - Character code
+ * @returns {number} 0–15 or -1
+ */
+function hexCharValue(cc) {
+ // 0-9: 0x30–0x39
+ if (cc >= 0x30 && cc <= 0x39) { return cc - 0x30; }
+ // A-F: 0x41–0x46
+ if (cc >= 0x41 && cc <= 0x46) { return cc - 0x41 + 10; }
+ // a-f: 0x61–0x66
+ if (cc >= 0x61 && cc <= 0x66) { return cc - 0x61 + 10; }
+ return -1;
+}
+
+/**
+ * Decodes a hex string to a Uint8Array.
+ *
+ * @param {string} hex - Even-length hex string
+ * @returns {Uint8Array}
+ */
+export function hexDecode(hex) {
+ if (hex.length % 2 !== 0) {
+ throw new RangeError(`Invalid hex string (odd length ${hex.length}): ${hex.length > 20 ? `${hex.slice(0, 20)}…` : hex}`);
+ }
+ const len = hex.length >>> 1;
+ const bytes = new Uint8Array(len);
+ for (let i = 0; i < len; i++) {
+ const hi = hexCharValue(hex.charCodeAt(i * 2));
+ const lo = hexCharValue(hex.charCodeAt(i * 2 + 1));
+ if (hi === -1 || lo === -1) {
+ throw new RangeError(`Invalid hex string (length ${hex.length}): ${hex.length > 20 ? `${hex.slice(0, 20)}…` : hex}`);
+ }
+ bytes[i] = (hi << 4) | lo;
+ }
+ return bytes;
+}
+
+const B64_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
+
+const B64_LOOKUP = new Uint8Array(128);
+for (let i = 0; i < B64_CHARS.length; i++) {
+ B64_LOOKUP[B64_CHARS.charCodeAt(i)] = i;
+}
+
+/**
+ * Encodes a Uint8Array to a base64 string.
+ *
+ * Uses a direct table-based implementation that avoids intermediate binary
+ * strings, preventing memory spikes on large buffers.
+ *
+ * @param {Uint8Array} bytes
+ * @returns {string} Base64-encoded string
+ */
+export function base64Encode(bytes) {
+ let result = '';
+ const len = bytes.length;
+ const remainder = len % 3;
+ const mainLen = len - remainder;
+
+ for (let i = 0; i < mainLen; i += 3) {
+ const n = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2];
+ result += B64_CHARS[(n >>> 18) & 0x3f]
+ + B64_CHARS[(n >>> 12) & 0x3f]
+ + B64_CHARS[(n >>> 6) & 0x3f]
+ + B64_CHARS[n & 0x3f];
+ }
+
+ if (remainder === 1) {
+ const n = bytes[mainLen];
+ result += `${B64_CHARS[(n >>> 2) & 0x3f]}${B64_CHARS[(n << 4) & 0x3f]}==`;
+ } else if (remainder === 2) {
+ const n = (bytes[mainLen] << 8) | bytes[mainLen + 1];
+ result += `${B64_CHARS[(n >>> 10) & 0x3f]}${B64_CHARS[(n >>> 4) & 0x3f]}${B64_CHARS[(n << 2) & 0x3f]}=`;
+ }
+
+ return result;
+}
+
+/**
+ * Validates a base64 string's character set and length.
+ *
+ * @param {string} b64 - Base64-encoded string to validate
+ * @throws {RangeError} If the string contains invalid characters or has an
+ * impossible length (length % 4 === 1 can never represent whole bytes).
+ */
+function validateBase64(b64) {
+ if (!/^[A-Za-z0-9+/]*={0,2}$/.test(b64)) {
+ throw new RangeError(`Invalid base64 string: ${b64.length > 20 ? `${b64.slice(0, 20)}…` : b64}`);
+ }
+ // Length % 4 === 1 is always invalid (a single base64 char encodes only 6 bits,
+ // which cannot form a complete byte). Accept 0, 2, 3 (unpadded) and 0 (padded).
+ if (b64.length % 4 === 1) {
+ throw new RangeError(`Invalid base64 string (bad length ${b64.length}): ${b64.length > 20 ? `${b64.slice(0, 20)}…` : b64}`);
+ }
+}
+
+/**
+ * Decodes a base64 string to a Uint8Array.
+ *
+ * Uses a direct table-based implementation that avoids intermediate binary
+ * strings, preventing memory spikes on large buffers.
+ *
+ * @param {string} b64 - Base64-encoded string
+ * @returns {Uint8Array}
+ */
+export function base64Decode(b64) {
+ validateBase64(b64);
+ let len = b64.length;
+ if (b64[len - 1] === '=') { len--; }
+ if (b64[len - 1] === '=') { len--; }
+
+ const outLen = (len * 3) >>> 2;
+ const bytes = new Uint8Array(outLen);
+ let j = 0;
+
+ for (let i = 0; i < len; i += 4) {
+ const a = B64_LOOKUP[b64.charCodeAt(i)];
+ const b = B64_LOOKUP[b64.charCodeAt(i + 1)];
+ const c = i + 2 < len ? B64_LOOKUP[b64.charCodeAt(i + 2)] : 0;
+ const d = i + 3 < len ? B64_LOOKUP[b64.charCodeAt(i + 3)] : 0;
+
+ bytes[j++] = (a << 2) | (b >>> 4);
+ if (j < outLen) { bytes[j++] = ((b << 4) | (c >>> 2)) & 0xff; }
+ if (j < outLen) { bytes[j++] = ((c << 6) | d) & 0xff; }
+ }
+
+ return bytes;
+}
+
+/**
+ * Concatenates multiple Uint8Arrays into a single Uint8Array.
+ *
+ * @param {...Uint8Array} arrays
+ * @returns {Uint8Array}
+ */
+export function concatBytes(...arrays) {
+ let totalLength = 0;
+ for (let i = 0; i < arrays.length; i++) {
+ totalLength += arrays[i].length;
+ }
+ const result = new Uint8Array(totalLength);
+ let offset = 0;
+ for (let i = 0; i < arrays.length; i++) {
+ result.set(arrays[i], offset);
+ offset += arrays[i].length;
+ }
+ return result;
+}
+
+/**
+ * Encodes a string to UTF-8 bytes.
+ *
+ * @param {string} str
+ * @returns {Uint8Array}
+ */
+export function textEncode(str) {
+ return _encoder.encode(str);
+}
+
+/**
+ * Decodes UTF-8 bytes to a string.
+ *
+ * @param {Uint8Array} bytes
+ * @returns {string}
+ */
+export function textDecode(bytes) {
+ return _decoder.decode(bytes);
+}
diff --git a/src/domain/utils/defaultCrypto.js b/src/domain/utils/defaultCrypto.js
index d199cc5c..f0487ae5 100644
--- a/src/domain/utils/defaultCrypto.js
+++ b/src/domain/utils/defaultCrypto.js
@@ -6,30 +6,52 @@
* the infrastructure layer. This follows the same pattern as
* defaultCodec.js and defaultClock.js.
*
- * Since git-warp requires Git (and therefore Node 22+, Deno, or Bun),
- * node:crypto is always available.
+ * In Node/Bun/Deno, node:crypto loads normally. When the import
+ * fails (e.g., Vite stubs `node:crypto` in browser bundles),
+ * callers must inject a CryptoPort explicitly.
*
* @module domain/utils/defaultCrypto
*/
-import {
- createHash,
- createHmac,
- timingSafeEqual as nodeTimingSafeEqual,
-} from 'node:crypto';
+/** @type {Function|null} */
+let _createHash = null;
+/** @type {Function|null} */
+let _createHmac = null;
+/** @type {Function|null} */
+let _timingSafeEqual = null;
+
+try {
+ const nodeCrypto = await import('node:crypto');
+ _createHash = nodeCrypto.createHash;
+ _createHmac = nodeCrypto.createHmac;
+ _timingSafeEqual = nodeCrypto.timingSafeEqual;
+} catch {
+ // Import failed (bundler stub, unsupported runtime, etc.) —
+ // caller must inject a CryptoPort explicitly.
+}
/** @type {import('../../ports/CryptoPort.js').default} */
const defaultCrypto = {
// eslint-disable-next-line @typescript-eslint/require-await -- async matches CryptoPort contract
async hash(algorithm, data) {
- return createHash(algorithm).update(data).digest('hex');
+ if (!_createHash) {
+ throw new Error('No crypto available. Inject a CryptoPort explicitly.');
+ }
+ return _createHash(algorithm).update(data).digest('hex');
},
// eslint-disable-next-line @typescript-eslint/require-await -- async matches CryptoPort contract
async hmac(algorithm, key, data) {
- return createHmac(algorithm, key).update(data).digest();
+ if (!_createHmac) {
+ throw new Error('No crypto available. Inject a CryptoPort explicitly.');
+ }
+ const result = _createHmac(algorithm, key).update(data).digest();
+ return new Uint8Array(result);
},
timingSafeEqual(a, b) {
- return nodeTimingSafeEqual(a, b);
+ if (!_timingSafeEqual) {
+ throw new Error('No crypto available. Inject a CryptoPort explicitly.');
+ }
+ return _timingSafeEqual(a, b);
},
};
diff --git a/src/domain/utils/seekCacheKey.js b/src/domain/utils/seekCacheKey.js
index a4c03385..201664b2 100644
--- a/src/domain/utils/seekCacheKey.js
+++ b/src/domain/utils/seekCacheKey.js
@@ -10,7 +10,7 @@
* @module domain/utils/seekCacheKey
*/
-import { createHash } from 'node:crypto';
+import defaultCrypto from './defaultCrypto.js';
const KEY_VERSION = 'v1';
@@ -18,15 +18,18 @@ const KEY_VERSION = 'v1';
* Builds a deterministic, collision-resistant cache key from a ceiling tick
* and writer frontier snapshot.
*
+ * This function is intentionally async — WebCrypto's `digest()` is async-only,
+ * and `defaultCrypto.hash()` uses it. Both call sites are already async.
+ *
* @param {number} ceiling - Lamport ceiling tick
* @param {Map} frontier - Map of writerId → tip SHA
- * @returns {string} Cache key, e.g. `v1:t42-a1b2c3d4...` (32+ hex chars in hash)
+ * @returns {Promise} Cache key, e.g. `v1:t42-a1b2c3d4...` (32+ hex chars in hash)
*/
-export function buildSeekCacheKey(ceiling, frontier) {
+export async function buildSeekCacheKey(ceiling, frontier) {
const sorted = [...frontier.entries()].sort((a, b) =>
a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0
);
const payload = sorted.map(([w, sha]) => `${w}:${sha}`).join('\n');
- const hash = createHash('sha256').update(payload).digest('hex');
+ const hash = await defaultCrypto.hash('sha256', payload);
return `${KEY_VERSION}:t${ceiling}-${hash}`;
}
diff --git a/src/domain/warp/Writer.js b/src/domain/warp/Writer.js
index 7f9f9b19..be2e8e2b 100644
--- a/src/domain/warp/Writer.js
+++ b/src/domain/warp/Writer.js
@@ -36,9 +36,9 @@ export class Writer {
/**
* Creates a new Writer instance.
*
- * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, onCommitSuccess?: (result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default }} options
+ * @param {{ persistence: import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default, graphName: string, writerId: string, versionVector: import('../crdt/VersionVector.js').VersionVector, getCurrentState: () => import('../services/JoinReducer.js').WarpStateV5 | null, onCommitSuccess?: (result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void | Promise, onDeleteWithData?: 'reject'|'cascade'|'warn', codec?: import('../../ports/CodecPort.js').default, logger?: import('../../ports/LoggerPort.js').default, blobStorage?: import('../../ports/BlobStoragePort.js').default, patchBlobStorage?: import('../../ports/BlobStoragePort.js').default }} options
*/
- constructor({ persistence, graphName, writerId, versionVector, getCurrentState, onCommitSuccess, onDeleteWithData = 'warn', codec, logger }) {
+ constructor({ persistence, graphName, writerId, versionVector, getCurrentState, onCommitSuccess, onDeleteWithData = 'warn', codec, logger, blobStorage, patchBlobStorage }) {
validateWriterId(writerId);
/** @type {import('../../ports/CommitPort.js').default & import('../../ports/BlobPort.js').default & import('../../ports/TreePort.js').default & import('../../ports/RefPort.js').default} Wider than Writer's own calls; satisfies PatchBuilderV2 constructor. */
@@ -68,6 +68,12 @@ export class Writer {
/** @type {import('../../ports/LoggerPort.js').default} */
this._logger = logger || nullLogger;
+ /** @type {import('../../ports/BlobStoragePort.js').default|null} */
+ this._blobStorage = blobStorage || null;
+
+ /** @type {import('../../ports/BlobStoragePort.js').default|null} */
+ this._patchBlobStorage = patchBlobStorage || null;
+
/** @type {boolean} */
this._commitInProgress = false;
}
@@ -148,6 +154,8 @@ export class Writer {
onDeleteWithData: this._onDeleteWithData,
codec: this._codec,
logger: this._logger,
+ blobStorage: this._blobStorage || undefined,
+ patchBlobStorage: this._patchBlobStorage || undefined,
});
// Return PatchSession wrapping the builder
diff --git a/src/domain/warp/_internal.js b/src/domain/warp/_internal.js
index 6a7d65cd..6fe2c4e4 100644
--- a/src/domain/warp/_internal.js
+++ b/src/domain/warp/_internal.js
@@ -11,6 +11,13 @@
export { default as QueryError } from '../errors/QueryError.js';
export { default as ForkError } from '../errors/ForkError.js';
+/**
+ * Extended WarpGraph type that includes mixin methods wired at runtime.
+ * Use this as the `@this` type in method files that call other mixin methods.
+ *
+ * @typedef {import('../WarpGraph.js').default & { _readPatchBlob(patchMeta: { patchOid: string, encrypted: boolean }): Promise }} WarpGraphWithMixins
+ */
+
// ── Shared constants ────────────────────────────────────────────────────────
export const DEFAULT_ADJACENCY_CACHE_SIZE = 3;
export const E_NO_STATE_MSG = 'No materialized state. Call materialize() before querying, or use autoMaterialize: true (the default). See https://github.com/git-stunts/git-warp#materialization';
diff --git a/src/domain/warp/checkpoint.methods.js b/src/domain/warp/checkpoint.methods.js
index 7cc59ac1..92040a63 100644
--- a/src/domain/warp/checkpoint.methods.js
+++ b/src/domain/warp/checkpoint.methods.js
@@ -258,7 +258,7 @@ export async function _validateMigrationBoundary() {
* typically writes a new tip, so a schema:2+ tip implies the writer has
* been migrated.
*
- * @this {import('../WarpGraph.js').default}
+ * @this {import('./_internal.js').WarpGraphWithMixins}
* @returns {Promise} True if any writer tip is schema:1 (or omits `schema`, treated as legacy v1)
* @private
*/
@@ -279,7 +279,7 @@ export async function _hasSchema1Patches() {
if (kind === 'patch') {
const patchMeta = decodePatchMessage(nodeInfo.message);
- const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid);
+ const patchBuffer = await this._readPatchBlob(patchMeta);
const patch = /** @type {{schema?: number}} */ (this._codec.decode(patchBuffer));
// If any patch has schema:1, we have v1 history
diff --git a/src/domain/warp/materializeAdvanced.methods.js b/src/domain/warp/materializeAdvanced.methods.js
index 74049660..3de3b631 100644
--- a/src/domain/warp/materializeAdvanced.methods.js
+++ b/src/domain/warp/materializeAdvanced.methods.js
@@ -281,9 +281,13 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) {
// Persistent cache check — skip when collectReceipts is requested
let cacheKey;
if (this._seekCache && !collectReceipts) {
- cacheKey = buildSeekCacheKey(ceiling, frontier);
try {
- const cached = await this._seekCache.get(cacheKey);
+ cacheKey = await buildSeekCacheKey(ceiling, frontier);
+ } catch {
+ // crypto unavailable (e.g., browser) — treat as cache miss
+ }
+ try {
+ const cached = cacheKey ? await this._seekCache.get(cacheKey) : undefined;
if (cached) {
try {
const state = deserializeFullStateV5(cached.buffer, { codec: this._codec });
@@ -299,7 +303,9 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) {
return freezePublicState(state);
} catch {
// Corrupted payload — self-heal by removing the bad entry
- try { await this._seekCache.delete(cacheKey); } catch { /* best-effort */ }
+ if (cacheKey) {
+ try { await this._seekCache.delete(cacheKey); } catch { /* best-effort */ }
+ }
}
}
} catch {
@@ -347,12 +353,16 @@ export async function _materializeWithCeiling(ceiling, collectReceipts, t0) {
// Store to persistent cache (fire-and-forget — failure is non-fatal)
if (this._seekCache && !collectReceipts && allPatches.length > 0) {
- if (!cacheKey) {
- cacheKey = buildSeekCacheKey(ceiling, frontier);
+ try {
+ if (!cacheKey) {
+ cacheKey = await buildSeekCacheKey(ceiling, frontier);
+ }
+ const buf = serializeFullStateV5(state, { codec: this._codec });
+ this._persistSeekCacheEntry(cacheKey, buf, state)
+ .catch(() => {});
+ } catch {
+ // crypto unavailable — skip cache write
}
- const buf = serializeFullStateV5(state, { codec: this._codec });
- this._persistSeekCacheEntry(cacheKey, buf, state)
- .catch(() => {});
}
// Skip auto-checkpoint and GC — this is an exploratory read
@@ -430,7 +440,7 @@ export async function _restoreIndexFromCache(indexTreeOid) {
* builds the target frontier from current writer tips, and applies
* incremental patches since the checkpoint.
*
- * @this {import('../WarpGraph.js').default}
+ * @this {import('./_internal.js').WarpGraphWithMixins}
* @param {string} checkpointSha - The checkpoint commit SHA
* @returns {Promise} The materialized graph state at the checkpoint
* @throws {Error} If checkpoint SHA is invalid or not found
@@ -472,7 +482,7 @@ export async function materializeAt(checkpointSha) {
}
const patchMeta = decodePatchMessage(message);
- const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid);
+ const patchBuffer = await this._readPatchBlob(patchMeta);
const patch = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer));
patches.push({ patch, sha: currentSha });
diff --git a/src/domain/warp/patch.methods.js b/src/domain/warp/patch.methods.js
index 5ab95e5c..db66bfb4 100644
--- a/src/domain/warp/patch.methods.js
+++ b/src/domain/warp/patch.methods.js
@@ -17,6 +17,8 @@ import { buildWriterRef, buildWritersPrefix, parseWriterIdFromRef } from '../uti
import { decodePatchMessage, detectMessageKind } from '../services/WarpMessageCodec.js';
import { Writer } from './Writer.js';
import { generateWriterId, resolveWriterId } from '../utils/WriterId.js';
+import EncryptionError from '../errors/EncryptionError.js';
+import PersistenceError from '../errors/PersistenceError.js';
/** @typedef {import('../types/WarpPersistence.js').CorePersistence} CorePersistence */
@@ -48,6 +50,8 @@ export async function createPatch() {
onCommitSuccess: (/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ opts) => this._onPatchCommitted(this._writerId, opts),
codec: this._codec,
logger: this._logger || undefined,
+ blobStorage: this._blobStorage || undefined,
+ patchBlobStorage: this._patchBlobStorage || undefined,
});
}
@@ -171,7 +175,7 @@ export async function _nextLamport() {
* Walks commits from the tip SHA back to the first patch commit,
* collecting all patches along the way.
*
- * @this {import('../WarpGraph.js').default}
+ * @this {import('./_internal.js').WarpGraphWithMixins}
* @param {string} writerId - The writer ID to load patches for
* @param {string|null} [stopAtSha=null] - Stop walking when reaching this SHA (exclusive)
* @returns {Promise>} Array of patches
@@ -202,8 +206,8 @@ export async function _loadWriterPatches(writerId, stopAtSha = null) {
// Decode the patch message to get patchOid
const patchMeta = decodePatchMessage(message);
- // Read the patch blob
- const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid);
+ // Read the patch blob (encrypted or plain)
+ const patchBuffer = await this._readPatchBlob(patchMeta);
const decoded = /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer));
patches.push({ patch: decoded, sha: currentSha });
@@ -326,6 +330,8 @@ export async function writer(writerId) {
onCommitSuccess: /** @type {(result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void} */ ((/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ opts) => this._onPatchCommitted(resolvedWriterId, opts)),
codec: this._codec,
logger: this._logger || undefined,
+ blobStorage: this._blobStorage || undefined,
+ patchBlobStorage: this._patchBlobStorage || undefined,
});
}
@@ -383,6 +389,8 @@ export async function createWriter(opts = {}) {
onCommitSuccess: /** @type {(result: {patch: import('../types/WarpTypesV2.js').PatchV2, sha: string}) => void} */ ((/** @type {{patch?: import('../types/WarpTypesV2.js').PatchV2, sha?: string}} */ commitOpts) => this._onPatchCommitted(freshWriterId, commitOpts)),
codec: this._codec,
logger: this._logger || undefined,
+ blobStorage: this._blobStorage || undefined,
+ patchBlobStorage: this._patchBlobStorage || undefined,
});
}
@@ -414,6 +422,34 @@ export async function _ensureFreshState() {
}
}
+/**
+ * Reads a patch blob, using patchBlobStorage for encrypted patches
+ * and falling back to persistence.readBlob() for plain patches.
+ *
+ * @this {import('./_internal.js').WarpGraphWithMixins}
+ * @param {{ patchOid: string, encrypted: boolean }} patchMeta
+ * @returns {Promise}
+ */
+export async function _readPatchBlob(patchMeta) {
+ if (patchMeta.encrypted) {
+ if (!this._patchBlobStorage) {
+ throw new EncryptionError(
+ 'This graph contains encrypted patches; provide patchBlobStorage with an encryption key',
+ );
+ }
+ return await this._patchBlobStorage.retrieve(patchMeta.patchOid);
+ }
+ const blob = await this._persistence.readBlob(patchMeta.patchOid);
+ if (!blob) {
+ throw new PersistenceError(
+ `Patch blob not found: ${patchMeta.patchOid}`,
+ PersistenceError.E_MISSING_OBJECT,
+ { context: { oid: patchMeta.patchOid } },
+ );
+ }
+ return blob;
+}
+
/**
* Discovers all writers that have written to this graph.
*
diff --git a/src/domain/warp/provenance.methods.js b/src/domain/warp/provenance.methods.js
index 9caab655..170f16a9 100644
--- a/src/domain/warp/provenance.methods.js
+++ b/src/domain/warp/provenance.methods.js
@@ -219,7 +219,7 @@ export async function loadPatchBySha(sha) {
/**
* Loads a single patch by its SHA.
*
- * @this {import('../WarpGraph.js').default}
+ * @this {import('./_internal.js').WarpGraphWithMixins}
* @param {string} sha - The patch commit SHA
* @returns {Promise} The decoded patch object
* @throws {Error} If the commit is not a patch or loading fails
@@ -233,7 +233,7 @@ export async function _loadPatchBySha(sha) {
}
const patchMeta = decodePatchMessage(nodeInfo.message);
- const patchBuffer = await this._persistence.readBlob(patchMeta.patchOid);
+ const patchBuffer = await this._readPatchBlob(patchMeta);
return /** @type {import('../types/WarpTypesV2.js').PatchV2} */ (this._codec.decode(patchBuffer));
}
diff --git a/src/domain/warp/query.methods.js b/src/domain/warp/query.methods.js
index 77dea119..20e6812c 100644
--- a/src/domain/warp/query.methods.js
+++ b/src/domain/warp/query.methods.js
@@ -377,6 +377,9 @@ export async function getContent(nodeId) {
if (!oid) {
return null;
}
+ if (this._blobStorage) {
+ return await this._blobStorage.retrieve(oid);
+ }
return await this._persistence.readBlob(oid);
}
@@ -420,5 +423,8 @@ export async function getEdgeContent(from, to, label) {
if (!oid) {
return null;
}
+ if (this._blobStorage) {
+ return await this._blobStorage.retrieve(oid);
+ }
return await this._persistence.readBlob(oid);
}
diff --git a/src/globals.d.ts b/src/globals.d.ts
index 931ec235..0fcf0f5a 100644
--- a/src/globals.d.ts
+++ b/src/globals.d.ts
@@ -2,9 +2,10 @@
* Minimal ambient declarations for Deno and Bun runtime globals.
*
* These cover ONLY the APIs actually used in this codebase:
- * - Deno.serve() (DenoHttpAdapter.js)
- * - Deno.env.get() (bin/cli/infrastructure.js)
- * - Bun.serve() (BunHttpAdapter.js)
+ * - Deno.serve() (DenoHttpAdapter.js, DenoWsAdapter.js)
+ * - Deno.upgradeWebSocket() (DenoWsAdapter.js)
+ * - Deno.env.get() (bin/cli/infrastructure.js)
+ * - Bun.serve() (BunHttpAdapter.js, BunWsAdapter.js)
*
* Do NOT install @types/deno or @types/bun — this file is intentionally
* narrow to avoid pulling in thousands of unrelated declarations.
@@ -28,7 +29,7 @@ interface DenoServer {
interface DenoServeOptions {
port?: number;
hostname?: string;
- onListen?: () => void;
+ onListen?: (addr?: { port: number; hostname: string }) => void;
}
interface DenoEnv {
@@ -41,22 +42,44 @@ declare namespace Deno {
options: DenoServeOptions,
handler: (request: Request) => Promise | Response,
): DenoServer;
+ function upgradeWebSocket(request: Request): { socket: WebSocket; response: Response };
}
/* ------------------------------------------------------------------ */
/* Bun */
/* ------------------------------------------------------------------ */
+interface BunServerWebSocket {
+ send(data: string | ArrayBuffer | Uint8Array): void;
+ close(code?: number, reason?: string): void;
+ data: T;
+ readyState: number;
+}
+
+interface BunWsData {
+ messageHandler: ((message: string) => void) | null;
+ closeHandler: ((code?: number, reason?: string) => void) | null;
+ messageBuffer: string[];
+}
+
+interface BunWebSocketHandlers {
+ open?(ws: BunServerWebSocket): void;
+ message?(ws: BunServerWebSocket, message: string | ArrayBuffer): void;
+ close?(ws: BunServerWebSocket, code: number, reason: string): void;
+}
+
interface BunServer {
stop(closeActiveConnections?: boolean): Promise;
hostname: string;
port: number;
+ upgrade(req: Request, options?: { data?: T }): boolean;
}
interface BunServeOptions {
port?: number;
hostname?: string;
- fetch: (request: Request) => Promise | Response;
+ fetch: (request: Request, server: BunServer) => Promise | Response | undefined;
+ websocket?: BunWebSocketHandlers