diff --git a/packages/adapter-openclaw/skills/dkg-node/SKILL.md b/packages/adapter-openclaw/skills/dkg-node/SKILL.md index 980d854c0..ee4b4ff41 100644 --- a/packages/adapter-openclaw/skills/dkg-node/SKILL.md +++ b/packages/adapter-openclaw/skills/dkg-node/SKILL.md @@ -52,7 +52,7 @@ Use `dkg_list_paranets` first to check if the paranet already exists. Subscribe to a paranet to receive its data and updates. Subscription is immediate; data sync from peers happens in the background. - `paranet_id` (required): paranet ID to subscribe to -- `include_workspace` (optional): set to `"false"` to skip syncing draft data (default: true) +- `include_workspace` (optional): set to `"false"` to skip syncing shared memory data (default: true) Use `dkg_list_paranets` to check sync status afterward. @@ -130,7 +130,7 @@ Run a read-only SPARQL query (`SELECT`, `CONSTRUCT`, `ASK`, `DESCRIBE`) against - `sparql` (required): SPARQL query string - `paranet_id` (optional): limit query scope to a specific paranet -- `include_workspace` (optional): set to `"true"` to also search workspace (draft/ephemeral) data +- `include_workspace` (optional): set to `"true"` to also search shared memory (working/ephemeral) data Example queries: - list everything: `SELECT ?s ?p ?o WHERE { ?s ?p ?o } LIMIT 20` diff --git a/packages/adapter-openclaw/src/DkgNodePlugin.ts b/packages/adapter-openclaw/src/DkgNodePlugin.ts index 6166dedd4..0d738eafd 100644 --- a/packages/adapter-openclaw/src/DkgNodePlugin.ts +++ b/packages/adapter-openclaw/src/DkgNodePlugin.ts @@ -361,7 +361,7 @@ export class DkgNodePlugin { }, include_shared_memory: { type: 'string', - description: 'Set to "false" to skip syncing shared memory/draft data. Default: true.', + description: 'Set to "false" to skip syncing shared memory data. Default: true.', }, }, required: ['context_graph_id'], @@ -424,7 +424,7 @@ export class DkgNodePlugin { properties: { sparql: { type: 'string', description: 'SPARQL query string (SELECT, CONSTRUCT, ASK, or DESCRIBE)' }, context_graph_id: { type: 'string', description: 'Optional context graph scope — omit to query all data' }, - include_shared_memory: { type: 'string', description: 'Set to "true" to also search shared memory (draft/ephemeral) data. Default: false.' }, + include_shared_memory: { type: 'string', description: 'Set to "true" to also search shared memory (working/ephemeral) data. Default: false.' }, }, required: ['sparql'], }, diff --git a/packages/agent/src/dkg-agent.ts b/packages/agent/src/dkg-agent.ts index 41602e00a..f086842f4 100644 --- a/packages/agent/src/dkg-agent.ts +++ b/packages/agent/src/dkg-agent.ts @@ -1596,7 +1596,9 @@ export class DKGAgent { includeWorkspace?: boolean; operationCtx?: OperationContext; view?: GetView; + agentAddress?: string; verifiedGraph?: string; + assertionName?: string; subGraphName?: string; }, ) { @@ -1615,8 +1617,9 @@ export class DKGAgent { graphSuffix: opts.graphSuffix, includeSharedMemory: opts.includeSharedMemory, view: opts.view, - agentAddress: opts.view === 'working-memory' ? this.peerId : undefined, + agentAddress: opts.agentAddress ?? (opts.view === 'working-memory' ? this.peerId : undefined), verifiedGraph: opts.verifiedGraph, + assertionName: opts.assertionName, subGraphName: opts.subGraphName, }); this.log.info(ctx, `Query returned ${result.bindings?.length ?? 0} bindings`); @@ -2067,8 +2070,8 @@ export class DKGAgent { try { await this.store.dropGraph(uri); } catch { /* graph may not exist */ } } - // Drop assertion/draft graphs under the sub-graph prefix - const sgPrefix = `did:dkg:context-graph:${contextGraphId}/${subGraphName}/draft/`; + // Drop assertion graphs under the sub-graph prefix + const sgPrefix = `did:dkg:context-graph:${contextGraphId}/${subGraphName}/assertion/`; const allGraphs = await this.store.listGraphs(); for (const g of allGraphs) { if (g.startsWith(sgPrefix)) { @@ -3641,25 +3644,25 @@ export class DKGAgent { } } - // ── Working Memory Draft Operations (spec §6) ──────────────────────── + // ── Working Memory Assertion Operations (spec §6) ─────────────────── - get draft() { + get assertion() { const agent = this; const agentAddress = this.peerId; return { - async create(contextGraphId: string, draftName: string, opts?: { subGraphName?: string }): Promise { - return agent.publisher.draftCreate(contextGraphId, draftName, agentAddress, opts?.subGraphName); + async create(contextGraphId: string, name: string, opts?: { subGraphName?: string }): Promise { + return agent.publisher.assertionCreate(contextGraphId, name, agentAddress, opts?.subGraphName); }, /** - * Write triples to a WM draft. Accepts: + * Write triples to a WM assertion. Accepts: * - `Quad[]` — standard quad array (same as publish/share) * - `JsonLdContent` — JSON-LD document, auto-converted to quads - * - `Array<{ subject, predicate, object }>` — legacy triple array (deprecated) + * - `Array<{ subject, predicate, object }>` — simple triple array */ async write( contextGraphId: string, - draftName: string, + name: string, input: import('@origintrail-official/dkg-storage').Quad[] | JsonLdContent | Array<{ subject: string; predicate: string; object: string }>, opts?: { subGraphName?: string }, ): Promise { @@ -3673,17 +3676,17 @@ export class DKGAgent { quads = (input as Array<{ subject: string; predicate: string; object: string }>) .map(t => ({ subject: t.subject, predicate: t.predicate, object: t.object, graph: '' })); } - return agent.publisher.draftWrite(contextGraphId, draftName, agentAddress, quads, opts?.subGraphName); + return agent.publisher.assertionWrite(contextGraphId, name, agentAddress, quads, opts?.subGraphName); }, - async query(contextGraphId: string, draftName: string, opts?: { subGraphName?: string }): Promise { - return agent.publisher.draftQuery(contextGraphId, draftName, agentAddress, opts?.subGraphName); + async query(contextGraphId: string, name: string, opts?: { subGraphName?: string }): Promise { + return agent.publisher.assertionQuery(contextGraphId, name, agentAddress, opts?.subGraphName); }, - async promote(contextGraphId: string, draftName: string, opts?: { entities?: string[] | 'all'; subGraphName?: string }): Promise<{ promotedCount: number }> { - return agent.publisher.draftPromote(contextGraphId, draftName, agentAddress, opts); + async promote(contextGraphId: string, name: string, opts?: { entities?: string[] | 'all'; subGraphName?: string }): Promise<{ promotedCount: number }> { + return agent.publisher.assertionPromote(contextGraphId, name, agentAddress, opts); }, - async discard(contextGraphId: string, draftName: string, opts?: { subGraphName?: string }): Promise { - return agent.publisher.draftDiscard(contextGraphId, draftName, agentAddress, opts?.subGraphName); + async discard(contextGraphId: string, name: string, opts?: { subGraphName?: string }): Promise { + return agent.publisher.assertionDiscard(contextGraphId, name, agentAddress, opts?.subGraphName); }, }; } diff --git a/packages/agent/src/gossip-publish-handler.ts b/packages/agent/src/gossip-publish-handler.ts index 496d3a269..1899a506a 100644 --- a/packages/agent/src/gossip-publish-handler.ts +++ b/packages/agent/src/gossip-publish-handler.ts @@ -91,23 +91,6 @@ export class GossipPublishHandler { } subGraphName = request.subGraphName; await graphManager.ensureSubGraph(request.paranetId, subGraphName); - - // Persist discovery registration so listSubGraphs() works on replicas - const sgUri = contextGraphSubGraphUri(request.paranetId, subGraphName); - const metaGraph = `did:dkg:context-graph:${assertSafeIri(request.paranetId)}/_meta`; - const alreadyRegistered = await this.store.query( - `ASK { GRAPH <${metaGraph}> { <${assertSafeIri(sgUri)}> a } }`, - ); - if (alreadyRegistered.type !== 'boolean' || !alreadyRegistered.value) { - const regQuads = generateSubGraphRegistration({ - contextGraphId: request.paranetId, - subGraphName, - createdBy: request.publisherAddress || 'gossip-discovery', - timestamp: new Date(), - }); - await this.store.insert(regQuads); - this.log.info(ctx, `Auto-registered sub-graph "${subGraphName}" in context graph "${request.paranetId}" from gossip`); - } } const dataGraph = subGraphName @@ -206,6 +189,26 @@ export class GossipPublishHandler { phase?.('validate', 'end'); + // Auto-register sub-graph in _meta AFTER validation passes. + // This prevents polluting metadata when invalid messages are rejected. + if (subGraphName) { + const sgUri = contextGraphSubGraphUri(request.paranetId, subGraphName); + const metaGraph = `did:dkg:context-graph:${assertSafeIri(request.paranetId)}/_meta`; + const alreadyRegistered = await this.store.query( + `ASK { GRAPH <${metaGraph}> { <${assertSafeIri(sgUri)}> a } }`, + ); + if (alreadyRegistered.type !== 'boolean' || !alreadyRegistered.value) { + const regQuads = generateSubGraphRegistration({ + contextGraphId: request.paranetId, + subGraphName, + createdBy: request.publisherAddress || 'gossip-discovery', + timestamp: new Date(), + }); + await this.store.insert(regQuads); + this.log.info(ctx, `Auto-registered sub-graph "${subGraphName}" in context graph "${request.paranetId}" from gossip`); + } + } + phase?.('store', 'start'); if (normalized.length > 0 && !isReplay) { await this.store.insert(normalized); diff --git a/packages/agent/test/e2e-sub-graphs.test.ts b/packages/agent/test/e2e-sub-graphs.test.ts index bb4156057..fbc5e454d 100644 --- a/packages/agent/test/e2e-sub-graphs.test.ts +++ b/packages/agent/test/e2e-sub-graphs.test.ts @@ -322,22 +322,18 @@ describe('Sub-graph replication (two agents)', () => { { contextGraphId: 'sg-replica', subGraphName: 'data' }, ); if (bResult.bindings.length > 0) break; - - // Also check root graph in case it landed there - const rootCheck = await agentB.query( - 'SELECT ?label WHERE { ?s ?label }', - { contextGraphId: 'sg-replica' }, - ); - if (rootCheck.bindings.length > 0) { - // Data replicated but to root graph — still valid for replication test - bResult = rootCheck; - break; - } await sleep(500); } expect(bResult.bindings.length).toBeGreaterThanOrEqual(1); expect(bResult.bindings[0]['label']).toBe('"Replicated Entity"'); + + // Sub-graph isolation on replica: data must NOT be in root graph + const rootCheck = await agentB.query( + 'SELECT ?label WHERE { ?s ?label }', + { contextGraphId: 'sg-replica' }, + ); + expect(rootCheck.bindings).toHaveLength(0); }, 30_000); }); @@ -379,7 +375,7 @@ describe('Sub-graph across memory layers (single agent)', () => { expect(rootResult.bindings).toHaveLength(0); }, 15_000); - it('draft.write accepts Quad[] input', async () => { + it('assertion.write accepts Quad[] input', async () => { const agent = await DKGAgent.create({ name: 'QuadDraftBot', listenPort: 0, @@ -389,24 +385,24 @@ describe('Sub-graph across memory layers (single agent)', () => { agents.push(agent); await agent.start(); - await agent.createContextGraph({ id: 'sg-quad-draft', name: 'Quad Draft', description: '' }); - await agent.createSubGraph('sg-quad-draft', 'code'); + await agent.createContextGraph({ id: 'sg-quad-input', name: 'Quad Input', description: '' }); + await agent.createSubGraph('sg-quad-input', 'code'); - await agent.draft.create('sg-quad-draft', 'quad-test', { subGraphName: 'code' }); + await agent.assertion.create('sg-quad-input', 'quad-test', { subGraphName: 'code' }); // Write using Quad[] (standard format, same as publish/share) - await agent.draft.write('sg-quad-draft', 'quad-test', [ + await agent.assertion.write('sg-quad-input', 'quad-test', [ { subject: 'urn:fn:main', predicate: 'http://ex.org/sig', object: '"main()"', graph: '' }, { subject: 'urn:fn:main', predicate: 'http://ex.org/lang', object: '"TypeScript"', graph: '' }, ], { subGraphName: 'code' }); - const quads = await agent.draft.query('sg-quad-draft', 'quad-test', { subGraphName: 'code' }); + const quads = await agent.assertion.query('sg-quad-input', 'quad-test', { subGraphName: 'code' }); expect(quads).toHaveLength(2); }, 15_000); - it('draft.write accepts JSON-LD input', async () => { + it('assertion.write accepts JSON-LD input', async () => { const agent = await DKGAgent.create({ - name: 'JsonLdDraftBot', + name: 'JsonLdInputBot', listenPort: 0, skills: [], chainAdapter: new MockChainAdapter(), @@ -414,27 +410,27 @@ describe('Sub-graph across memory layers (single agent)', () => { agents.push(agent); await agent.start(); - await agent.createContextGraph({ id: 'sg-jsonld-draft', name: 'JSONLD Draft', description: '' }); - await agent.createSubGraph('sg-jsonld-draft', 'entities'); + await agent.createContextGraph({ id: 'sg-jsonld-input', name: 'JSONLD Input', description: '' }); + await agent.createSubGraph('sg-jsonld-input', 'entities'); - await agent.draft.create('sg-jsonld-draft', 'ld-test', { subGraphName: 'entities' }); + await agent.assertion.create('sg-jsonld-input', 'ld-test', { subGraphName: 'entities' }); // Write using JSON-LD (auto-converted to quads) - await agent.draft.write('sg-jsonld-draft', 'ld-test', { + await agent.assertion.write('sg-jsonld-input', 'ld-test', { '@id': 'urn:entity:alice', 'http://schema.org/name': 'Alice', 'http://schema.org/jobTitle': 'Engineer', }, { subGraphName: 'entities' }); - const quads = await agent.draft.query('sg-jsonld-draft', 'ld-test', { subGraphName: 'entities' }); + const quads = await agent.assertion.query('sg-jsonld-input', 'ld-test', { subGraphName: 'entities' }); expect(quads.length).toBeGreaterThanOrEqual(2); const names = quads.filter(q => q.predicate === 'http://schema.org/name'); expect(names).toHaveLength(1); }, 15_000); - it('WM draft with subGraphName → promote to sub-graph SWM', async () => { + it('WM assertion with subGraphName → promote to sub-graph SWM', async () => { const agent = await DKGAgent.create({ - name: 'DraftSubBot', + name: 'AssertionSubBot', listenPort: 0, skills: [], chainAdapter: new MockChainAdapter(), @@ -445,21 +441,21 @@ describe('Sub-graph across memory layers (single agent)', () => { await agent.createContextGraph({ id: 'sg-wm-layer', name: 'WM Layer', description: '' }); await agent.createSubGraph('sg-wm-layer', 'decisions'); - // Create draft in sub-graph WM - const draftUri = await agent.draft.create('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); - expect(draftUri).toContain('/decisions/draft/'); + // Create assertion in sub-graph WM + const assertionUri = await agent.assertion.create('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); + expect(assertionUri).toContain('/decisions/assertion/'); - // Write to draft - await agent.draft.write('sg-wm-layer', 'arch-review', [ + // Write to assertion + await agent.assertion.write('sg-wm-layer', 'arch-review', [ { subject: 'urn:dec:1', predicate: 'http://ex.org/title', object: '"Use TypeScript"' }, ], { subGraphName: 'decisions' }); - // Query draft - const draftQuads = await agent.draft.query('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); - expect(draftQuads).toHaveLength(1); + // Query assertion + const assertionQuads = await agent.assertion.query('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); + expect(assertionQuads).toHaveLength(1); // Promote to sub-graph SWM - const result = await agent.draft.promote('sg-wm-layer', 'arch-review', { + const result = await agent.assertion.promote('sg-wm-layer', 'arch-review', { entities: 'all', subGraphName: 'decisions', }); @@ -480,12 +476,12 @@ describe('Sub-graph across memory layers (single agent)', () => { ); expect(rootSwm.bindings).toHaveLength(0); - // Draft should be empty after promotion - const emptyDraft = await agent.draft.query('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); - expect(emptyDraft).toHaveLength(0); + // Assertion should be empty after promotion + const emptyAssertion = await agent.assertion.query('sg-wm-layer', 'arch-review', { subGraphName: 'decisions' }); + expect(emptyAssertion).toHaveLength(0); }, 15_000); - it('full pipeline: WM draft → SWM → VM (sub-graph scoped)', async () => { + it('full pipeline: WM assertion → SWM → VM (sub-graph scoped)', async () => { const agent = await DKGAgent.create({ name: 'PipelineBot', listenPort: 0, @@ -498,14 +494,14 @@ describe('Sub-graph across memory layers (single agent)', () => { await agent.createContextGraph({ id: 'sg-pipeline', name: 'Pipeline', description: '' }); await agent.createSubGraph('sg-pipeline', 'code'); - // Step 1: Draft in WM/code - await agent.draft.create('sg-pipeline', 'scan', { subGraphName: 'code' }); - await agent.draft.write('sg-pipeline', 'scan', [ + // Step 1: Assertion in WM/code + await agent.assertion.create('sg-pipeline', 'scan', { subGraphName: 'code' }); + await agent.assertion.write('sg-pipeline', 'scan', [ { subject: 'urn:fn:main', predicate: 'http://ex.org/sig', object: '"main()"' }, ], { subGraphName: 'code' }); // Step 2: Promote WM/code → SWM/code - await agent.draft.promote('sg-pipeline', 'scan', { + await agent.assertion.promote('sg-pipeline', 'scan', { entities: 'all', subGraphName: 'code', }); diff --git a/packages/cli/package.json b/packages/cli/package.json index 7e7406c48..52e14c6ed 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -35,6 +35,7 @@ "files": [ "dist", "network", + "skills", "README.md", "LICENSE" ], diff --git a/packages/cli/skills/dkg-node/SKILL.md b/packages/cli/skills/dkg-node/SKILL.md new file mode 100644 index 000000000..61cd09814 --- /dev/null +++ b/packages/cli/skills/dkg-node/SKILL.md @@ -0,0 +1,200 @@ +--- +name: dkg-node +description: Manage agent memory on the DKG V10 node — store private assertions in Working Memory, share with your team in Shared Working Memory, publish permanently to Verified Memory, build trust through endorsements and M-of-N consensus verification. +--- + +# DKG V10 Node Skill + +You are connected to an **OriginTrail Decentralized Knowledge Graph (DKG) V10** node. +This skill teaches you the full node API surface so you can operate autonomously. + +## 1. Node Info + +> This section is dynamically generated from node state at serve-time. + +- **Node version:** (dynamic) +- **Base URL:** (dynamic) +- **Peer ID:** (dynamic) +- **Node role:** (dynamic — `core` or `edge`) +- **Available extraction pipelines:** (dynamic) +- **Subscribed Context Graphs:** (dynamic) + +## 2. Capabilities Overview + +> **Note:** This skill describes the full DKG V10 API surface. Some endpoints +> may not yet be available on your node depending on its version. Call +> `GET /api/status` to check the node version, and rely on error responses +> (404) to detect unimplemented routes. The node is under active development +> toward V10.0 — endpoints are being shipped incrementally. + +This node provides a three-layer **verifiable memory system** for AI agents: + +| Layer | Scope | Cost | Trust Level | Persistence | +|-------|-------|------|-------------|-------------| +| **Working Memory (WM)** | Private to you | Free | Self-attested | Local, survives restarts | +| **Shared Working Memory (SWM)** | Visible to team | Free | Self-attested (gossip replicated) | TTL-bounded | +| **Verified Memory (VM)** | Permanent, on-chain | TRAC tokens | Self-attested → endorsed → consensus-verified | Permanent | + +**What you can do:** create knowledge assertions, import files (PDF, DOCX, Markdown), +share knowledge with peers, publish to the blockchain, endorse others' knowledge, +propose M-of-N consensus verification, query across all memory layers, and +discover other agents on the network. + +## 3. Quick Start + +**Step 1 — Create a Context Graph:** + +```bash +curl -X POST $BASE_URL/api/context-graph/create \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"id": "my-context-graph", "name": "My Context Graph"}' +``` + +**Step 2 — Write to Shared Memory:** + +```bash +curl -X POST $BASE_URL/api/shared-memory/write \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "contextGraphId": "my-context-graph", + "quads": [ + {"subject": "https://example.org/alice", "predicate": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", "object": "https://schema.org/Person", "graph": ""}, + {"subject": "https://example.org/alice", "predicate": "https://schema.org/name", "object": "\"Alice\"", "graph": ""} + ] + }' +``` + +**Step 3 — Publish to Verified Memory:** + +```bash +curl -X POST $BASE_URL/api/publish \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"contextGraphId": "my-context-graph", "quads": [...]}' +``` + +**Step 4 — Query:** + +```bash +curl -X POST $BASE_URL/api/query \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"sparql": "SELECT * WHERE { ?s ?p ?o } LIMIT 10", "contextGraphId": "my-context-graph", "includeSharedMemory": true}' +``` + +## 4. Authentication + +**Token usage:** Include `Authorization: Bearer $TOKEN` on all requests. +The token is configured in the node's config file or provided at startup. + +**Public endpoints (no auth):** `GET /api/status`, `GET /api/chain/rpc-health`, +`GET /.well-known/skill.md`. + +> **Planned:** Multi-agent registration (`POST /api/agent/register`) with custodial +> and self-sovereign key modes will be available in a future release. + +## 5. Memory Model + +### Shared Working Memory (SWM) — Team-visible + +- `POST /api/shared-memory/write` — write triples to SWM (gossip-replicated) +- `POST /api/shared-memory/publish` — promote SWM triples to Verified Memory + +### Verified Memory (VM) — Permanent, on-chain + +- `POST /api/publish` — publish triples to VM (costs TRAC) +- `POST /api/update` — update an existing Knowledge Asset +- `POST /api/endorse` — endorse a Knowledge Asset ("I vouch for this") +- `POST /api/verify` — propose or approve M-of-N consensus verification + +### Querying + +- `POST /api/query` — SPARQL query with optional `view` (`working-memory`, `shared-working-memory`, `verified-memory`), `agentAddress`, `assertionName`, `verifiedGraph`, `subGraphName`, `includeSharedMemory`, `contextGraphId` parameters +- `POST /api/query-remote` — query a remote peer via P2P + +### Working Memory (WM) — Private assertions (🚧 Planned) + +> The following WM assertion endpoints are planned for a future release: + +- `POST /api/assertion/create` — create a named private assertion +- `PUT /api/assertion/{name}` — write triples to an assertion +- `POST /api/assertion/{name}/import` — import N-Triples/Turtle/JSON-LD +- `POST /api/assertion/{name}/import-file` — import PDF/DOCX/Markdown (multipart) +- `GET /api/assertion/{name}` — read assertion contents +- `DELETE /api/assertion/{name}` — delete assertion +- `POST /api/assertion/{name}/promote` — promote assertion to SWM + +## 6. Context Graphs + +Context Graphs are scoped knowledge domains with configurable access and governance. + +- `POST /api/context-graph/create` — create a context graph +- `GET /api/context-graph/list` — list subscribed context graphs +- `POST /api/context-graph/subscribe` — subscribe to a context graph +- `GET /api/context-graph/exists` — check if a context graph exists +- 🚧 `GET /api/context-graph/{id}` — CG details *(planned)* +- 🚧 `POST /api/context-graph/{id}/ontology` — add ontology *(planned)* +- 🚧 `GET /api/context-graph/{id}/ontology` — list ontologies *(planned)* + +## 7. File Ingestion (🚧 Planned) + +> File ingestion via `import-file` depends on the Working Memory assertion API (§5) +> and will be available when those endpoints ship. The extraction pipeline +> infrastructure (MarkItDown converter) is already in place on the node. + +Supported formats depend on available extraction pipelines (see Node Info §1). +When available, usage will be: + +```bash +curl -X POST $BASE_URL/api/assertion/my-assertion/import-file \ + -H "Authorization: Bearer $TOKEN" \ + -F "file=@paper.pdf" \ + -F "contextGraph=my-context-graph" +``` + +## 8. Node Administration + +- `GET /api/status` (PUBLIC) — node status, peer ID, version, connections +- `GET /api/info` — lightweight health check +- `GET /api/agents` — list known agents +- `GET /api/connections` — transport details +- `GET /api/wallets/balances` — TRAC and ETH balances +- `GET /api/chain/rpc-health` (PUBLIC) — RPC health +- `GET /api/identity` — node identity (DID, identity ID) +- 🚧 `GET /api/agent/profile` — your agent profile *(planned)* + +## 9. Error Reference + +| Status | Meaning | Recovery | +|--------|---------|----------| +| 400 | Bad request — missing fields, invalid SPARQL | Fix the request body | +| 401 | Unauthorized — invalid or missing token | Re-authenticate or refresh token | +| 402 | Insufficient TRAC for publication | Check balances, notify node operator | +| 403 | Forbidden — publishPolicy or allowList violation | Verify CG membership and publish authority | +| 404 | Resource not found | Verify resource identifiers (assertion name, CG ID, UAL) | +| 409 | Conflict — name collision or concurrent modification | Retry with a different name | +| 429 | Rate limited | Wait and retry with backoff | +| 502 | Chain/upstream error | Retry — transient blockchain issue | +| 503 | Service unavailable | Node is starting up or shutting down | + +## 10. Workflow Recipes + +For detailed step-by-step workflow recipes and the full endpoint reference, see +the supporting files in the skill directory: + +- `workflows.md` — 10 workflow recipes with curl examples +- `api-reference.md` — full endpoint reference grouped by workflow +- `examples/sparql-recipes.md` — SPARQL query patterns + +## Appendix: V9 → V10 Migration + +| V9 Concept | V10 Equivalent | +|------------|---------------| +| Paranet | Context Graph | +| Workspace | Shared Working Memory | +| Enshrine | Publish (promote to Verified Memory) | +| `POST /api/workspace/write` | `POST /api/shared-memory/write` | +| `POST /api/workspace/enshrine` | `POST /api/shared-memory/publish` | +| `POST /api/paranet/create` | `POST /api/context-graph/create` | diff --git a/packages/cli/src/auth.ts b/packages/cli/src/auth.ts index e36c537d6..28a8142d1 100644 --- a/packages/cli/src/auth.ts +++ b/packages/cli/src/auth.ts @@ -106,6 +106,7 @@ export function extractBearerToken(headerValue: string | undefined): string | un const PUBLIC_PATHS = new Set([ '/api/status', '/api/chain/rpc-health', + '/.well-known/skill.md', ]); const PUBLIC_PREFIXES = [ diff --git a/packages/cli/src/daemon.ts b/packages/cli/src/daemon.ts index de91e5008..713cc7801 100644 --- a/packages/cli/src/daemon.ts +++ b/packages/cli/src/daemon.ts @@ -1,4 +1,5 @@ import { createServer, type IncomingMessage, type ServerResponse } from 'node:http'; +import { createHash } from 'node:crypto'; import { appendFile, mkdir, readFile, unlink, writeFile } from 'node:fs/promises'; import { execSync, exec, execFile } from 'node:child_process'; import { promisify } from 'node:util'; @@ -11,7 +12,7 @@ import { fileURLToPath } from 'node:url'; import { stat } from 'node:fs/promises'; import { ethers } from 'ethers'; import { DKGAgent, loadOpWallets } from '@origintrail-official/dkg-agent'; -import { computeNetworkId, createOperationContext, DKGEvent, Logger, PayloadTooLargeError } from '@origintrail-official/dkg-core'; +import { computeNetworkId, createOperationContext, DKGEvent, Logger, PayloadTooLargeError, GET_VIEWS } from '@origintrail-official/dkg-core'; import { DashboardDB, MetricsCollector, @@ -51,6 +52,8 @@ import { CLI_NPM_PACKAGE, } from './config.js'; import { loadTokens, httpAuthGuard, extractBearerToken } from './auth.js'; +import { ExtractionPipelineRegistry } from '@origintrail-official/dkg-core'; +import { MarkItDownConverter, isMarkItDownAvailable } from './extraction/index.js'; import { handleCapture, EpcisValidationError, handleEventsQuery, EpcisQueryError, type Publisher as EpcisPublisher } from '@origintrail-official/dkg-epcis'; import { readFileSync } from 'node:fs'; @@ -75,6 +78,55 @@ function getCurrentCommitShort(): string { } catch { return ''; } } } + +// --------------------------------------------------------------------------- +// SKILL.MD serving — Agent Skills standard (https://agentskills.io) +// --------------------------------------------------------------------------- + +let cachedSkillMd: string | null = null; +let cachedSkillEtag: string | null = null; + +function loadSkillTemplate(): string { + if (cachedSkillMd) return cachedSkillMd; + const skillPath = new URL('../skills/dkg-node/SKILL.md', import.meta.url); + const content = readFileSync(skillPath, 'utf-8'); + cachedSkillMd = content; + return content; +} + +function buildSkillMd(opts: { + version: string; + baseUrl: string; + peerId: string; + nodeRole: string; + extractionPipelines: string[]; +}): string { + const template = loadSkillTemplate(); + const dynamicSection = [ + `- **Node version:** ${opts.version}`, + `- **Base URL:** ${opts.baseUrl}`, + `- **Peer ID:** ${opts.peerId}`, + `- **Node role:** ${opts.nodeRole}`, + `- **Available extraction pipelines:** ${opts.extractionPipelines.length > 0 ? opts.extractionPipelines.join(', ') : 'text/markdown'}`, + `- **Subscribed Context Graphs:** use \`GET /api/context-graph/list\` (requires auth)`, + ].join('\n'); + + const staticPlaceholder = + '> This section is dynamically generated from node state at serve-time.\n\n' + + '- **Node version:** (dynamic)\n' + + '- **Base URL:** (dynamic)\n' + + '- **Peer ID:** (dynamic)\n' + + '- **Node role:** (dynamic — `core` or `edge`)\n' + + '- **Available extraction pipelines:** (dynamic)\n' + + '- **Subscribed Context Graphs:** (dynamic)'; + + return template.replace(staticPlaceholder, dynamicSection); +} + +function skillEtag(content: string): string { + return '"' + createHash('md5').update(content).digest('hex').slice(0, 16) + '"'; +} + import { loadApps, handleAppRequest, startAppStaticServer, type LoadedApp } from './app-loader.js'; export const DAEMON_EXIT_CODE_RESTART = 75; @@ -741,11 +793,21 @@ async function runDaemonInner(foreground: boolean, config: Awaited { const url = new URL(req.url ?? '/', `http://${req.headers.host}`); const path = url.pathname; + // GET /.well-known/skill.md — Agent Skills document (PUBLIC, no auth) + if (req.method === 'GET' && path === '/.well-known/skill.md') { + const proto = req.headers['x-forwarded-proto'] ?? 'http'; + const host = req.headers['x-forwarded-host'] ?? req.headers.host ?? `localhost:${config.listenPort ?? 9200}`; + const baseUrl = `${proto}://${host}`; + const pipelines = ['text/markdown', ...extractionRegistry.availableContentTypes()]; + const content = buildSkillMd({ + version: nodeVersion, + baseUrl, + peerId: agent.peerId, + nodeRole: config.nodeRole ?? 'edge', + extractionPipelines: [...new Set(pipelines)], + }); + const etag = skillEtag(content); + if (req.headers['if-none-match'] === etag) { + res.writeHead(304).end(); + return; + } + res.writeHead(200, { + 'Content-Type': 'text/markdown; charset=utf-8', + 'ETag': etag, + 'Cache-Control': 'public, max-age=300', + }); + res.end(content); + return; + } + // GET /api/status if (req.method === 'GET' && path === '/api/status') { const allConns = agent.node.libp2p.getConnections(); @@ -1997,7 +2088,15 @@ async function handleRequest( const contextGraphId = parsed.contextGraphId ?? parsed.paranetId; const graphSuffix = parsed.graphSuffix; const includeSharedMemory = parsed.includeSharedMemory ?? parsed.includeWorkspace; + const view = parsed.view; + const agentAddress = parsed.agentAddress; + const verifiedGraph = parsed.verifiedGraph; + const assertionName = parsed.assertionName; + const subGraphName = parsed.subGraphName; if (!sparql || !String(sparql).trim()) return jsonResponse(res, 400, { error: 'Missing "sparql"' }); + if (view && !(GET_VIEWS as readonly string[]).includes(view)) { + return jsonResponse(res, 400, { error: `Invalid view "${view}". Supported: ${GET_VIEWS.join(', ')}` }); + } const ctx = createOperationContext('query'); tracker.start(ctx, { contextGraphId, details: { sparql: sparql.slice(0, 200) } }); tracker.startPhase(ctx, 'parse'); @@ -2005,7 +2104,7 @@ async function handleRequest( tracker.completePhase(ctx, 'parse'); tracker.startPhase(ctx, 'execute'); const execT0 = Date.now(); - const result = await agent.query(sparql, { contextGraphId, graphSuffix, includeSharedMemory, operationCtx: ctx }); + const result = await agent.query(sparql, { contextGraphId, graphSuffix, includeSharedMemory, view, agentAddress, verifiedGraph, assertionName, subGraphName, operationCtx: ctx }); const execDur = Date.now() - execT0; tracker.completePhase(ctx, 'execute'); tracker.complete(ctx, { tripleCount: result?.bindings?.length ?? 0 }); @@ -2013,7 +2112,13 @@ async function handleRequest( } catch (err: any) { tracker.fail(ctx, err); const msg = err?.message ?? ''; - if (msg.startsWith('SPARQL rejected:') || msg.startsWith('Parse error') || /must start with (SELECT|CONSTRUCT|ASK|DESCRIBE)/i.test(msg)) { + if ( + msg.startsWith('SPARQL rejected:') || msg.startsWith('Parse error') || + /must start with (SELECT|CONSTRUCT|ASK|DESCRIBE)/i.test(msg) || + msg.includes('was removed in V10') || + msg.includes('requires agentAddress') || msg.includes('requires contextGraphId') || + msg.includes('cannot be combined with') + ) { return jsonResponse(res, 400, { error: msg }); } throw err; diff --git a/packages/cli/src/extraction/index.ts b/packages/cli/src/extraction/index.ts new file mode 100644 index 000000000..a4b72e041 --- /dev/null +++ b/packages/cli/src/extraction/index.ts @@ -0,0 +1 @@ +export { MarkItDownConverter, isMarkItDownAvailable, MARKITDOWN_CONTENT_TYPES } from './markitdown-converter.js'; diff --git a/packages/cli/src/extraction/markitdown-converter.ts b/packages/cli/src/extraction/markitdown-converter.ts new file mode 100644 index 000000000..1ccb15616 --- /dev/null +++ b/packages/cli/src/extraction/markitdown-converter.ts @@ -0,0 +1,98 @@ +/** + * MarkItDown converter — file-to-Markdown using the standalone MarkItDown binary. + * + * Microsoft MarkItDown (MIT license) converts PDF, DOCX, PPTX, XLSX, CSV, HTML, + * images, EPUB, XML, and JSON to Markdown. The binary is a PyInstaller-compiled + * standalone executable shipped with the DKG node. + * + * Spec: 05_PROTOCOL_EXTENSIONS.md §6.5.1 + */ + +import { execFile, execFileSync } from 'node:child_process'; +import { existsSync } from 'node:fs'; +import { resolve, join } from 'node:path'; +import { platform, arch } from 'node:process'; +import { fileURLToPath } from 'node:url'; +import type { ExtractionPipeline, ExtractionInput, ExtractionOutput } from '@origintrail-official/dkg-core'; + +const MAX_OUTPUT_BYTES = 50 * 1024 * 1024; // 50 MB + +function resolveMarkItDownBin(): string | null { + const suffix = platform === 'win32' ? '.exe' : ''; + const binaryName = `markitdown-${platform}-${arch}${suffix}`; + const binDir = resolve(fileURLToPath(new URL('../../bin', import.meta.url))); + const candidate = join(binDir, binaryName); + if (existsSync(candidate)) return candidate; + + // Fallback: check if markitdown is on PATH + const pathBin = `markitdown${suffix}`; + try { + const whichCmd = platform === 'win32' ? 'where' : 'which'; + execFileSync(whichCmd, [pathBin], { encoding: 'utf-8', stdio: 'pipe' }); + return pathBin; + } catch { + return null; + } +} + +let cachedBinPath: string | null | undefined; + +function getMarkItDownBin(): string | null { + if (cachedBinPath !== undefined) return cachedBinPath; + cachedBinPath = resolveMarkItDownBin(); + return cachedBinPath; +} + +export function isMarkItDownAvailable(): boolean { + return getMarkItDownBin() !== null; +} + +async function runMarkItDown(filePath: string): Promise { + const bin = getMarkItDownBin(); + if (!bin) { + throw new Error( + 'MarkItDown binary not found. Document extraction unavailable. ' + + 'Install markitdown or place the standalone binary in the node bin/ directory.', + ); + } + + return new Promise((resolve, reject) => { + execFile(bin, [filePath], { maxBuffer: MAX_OUTPUT_BYTES }, (err, stdout, stderr) => { + if (err) { + const msg = stderr?.trim() || err.message; + reject(new Error(`MarkItDown conversion failed: ${msg}`)); + } else { + resolve(stdout); + } + }); + }); +} + +export const MARKITDOWN_CONTENT_TYPES = [ + 'application/pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + 'text/csv', + 'text/html', + 'application/epub+zip', + 'application/xml', + 'text/xml', +] as const; + +export class MarkItDownConverter implements ExtractionPipeline { + readonly contentTypes = [...MARKITDOWN_CONTENT_TYPES]; + + async extract(input: ExtractionInput): Promise { + const markdown = await runMarkItDown(input.filePath); + + // Phase 2 (markdown → triples) is handled by the Markdown extraction pipeline + // which runs separately. This converter only does phase 1: file → Markdown. + // Return the intermediate with empty triples; the caller chains the MD pipeline. + return { + mdIntermediate: markdown, + triples: [], + provenance: [], + }; + } +} diff --git a/packages/cli/test/document-processor-e2e.test.ts b/packages/cli/test/document-processor-e2e.test.ts new file mode 100644 index 000000000..551c89d35 --- /dev/null +++ b/packages/cli/test/document-processor-e2e.test.ts @@ -0,0 +1,340 @@ +/** + * E2E tests for the document processing pipeline. + * + * Tests the full flow: file on disk → ExtractionPipeline → Markdown intermediate. + * When MarkItDown is available, tests real conversion of HTML/CSV/Markdown files. + * When unavailable, tests graceful degradation and the pipeline registry plumbing. + */ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { writeFile, rm, mkdtemp, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + ExtractionPipelineRegistry, + type ExtractionPipeline, + type ExtractionInput, + type ExtractionOutput, +} from '@origintrail-official/dkg-core'; +import { MarkItDownConverter, isMarkItDownAvailable } from '../src/extraction/index.js'; + +let tmpDir: string; + +beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'dkg-docproc-e2e-')); +}); + +afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); +}); + +// --------------------------------------------------------------------------- +// Registry-level E2E: register → lookup → extract +// --------------------------------------------------------------------------- + +describe('ExtractionPipelineRegistry E2E', () => { + it('registers MarkItDownConverter and resolves content types', () => { + const registry = new ExtractionPipelineRegistry(); + const converter = new MarkItDownConverter(); + registry.register(converter); + + expect(registry.has('application/pdf')).toBe(true); + expect(registry.has('text/csv')).toBe(true); + expect(registry.has('text/html')).toBe(true); + expect(registry.has('text/plain')).toBe(false); + expect(registry.get('application/pdf')).toBe(converter); + }); + + it('reports all available content types after MarkItDown registration', () => { + const registry = new ExtractionPipelineRegistry(); + registry.register(new MarkItDownConverter()); + + const types = registry.availableContentTypes(); + expect(types.length).toBeGreaterThanOrEqual(6); + expect(types).toContain('application/pdf'); + expect(types).toContain('application/vnd.openxmlformats-officedocument.wordprocessingml.document'); + }); + + it('supports multiple pipelines: custom markdown + MarkItDown', () => { + const registry = new ExtractionPipelineRegistry(); + + const customMdPipeline: ExtractionPipeline = { + contentTypes: ['text/markdown'], + async extract(input: ExtractionInput): Promise { + const md = await readFile(input.filePath, 'utf-8'); + return { mdIntermediate: md, triples: [], provenance: [] }; + }, + }; + + registry.register(customMdPipeline); + registry.register(new MarkItDownConverter()); + + expect(registry.get('text/markdown')).toBe(customMdPipeline); + expect(registry.get('application/pdf')).toBeInstanceOf(MarkItDownConverter); + }); +}); + +// --------------------------------------------------------------------------- +// Document conversion E2E (requires MarkItDown binary) +// --------------------------------------------------------------------------- + +const markitdownAvailable = isMarkItDownAvailable(); + +describe.skipIf(!markitdownAvailable)('MarkItDown E2E — real file conversion', () => { + let converter: MarkItDownConverter; + + beforeEach(() => { + converter = new MarkItDownConverter(); + }); + + it('converts an HTML file to Markdown', async () => { + const htmlFile = join(tmpDir, 'page.html'); + await writeFile(htmlFile, ` + + +

Research Paper

+

This paper discusses decentralized knowledge graphs.

+

Introduction

+

The DKG protocol enables verifiable AI memory.

+
    +
  • Working Memory
  • +
  • Shared Working Memory
  • +
  • Verified Memory
  • +
+ + + `); + + const result = await converter.extract({ + filePath: htmlFile, + contentType: 'text/html', + agentDid: 'did:dkg:agent:0xTest', + }); + + expect(result.mdIntermediate).toBeTruthy(); + expect(result.mdIntermediate).toContain('Research Paper'); + expect(result.mdIntermediate).toContain('decentralized knowledge graphs'); + expect(result.triples).toEqual([]); + expect(result.provenance).toEqual([]); + }); + + it('converts a CSV file to Markdown', async () => { + const csvFile = join(tmpDir, 'data.csv'); + await writeFile(csvFile, 'Name,Role,Trust\nAlice,Researcher,endorsed\nBob,Validator,consensus-verified\n'); + + const result = await converter.extract({ + filePath: csvFile, + contentType: 'text/csv', + agentDid: 'did:dkg:agent:0xTest', + }); + + expect(result.mdIntermediate).toBeTruthy(); + expect(result.mdIntermediate).toContain('Alice'); + expect(result.mdIntermediate).toContain('Bob'); + expect(result.mdIntermediate).toContain('Researcher'); + }); + + it('handles empty file gracefully', async () => { + const emptyFile = join(tmpDir, 'empty.html'); + await writeFile(emptyFile, ''); + + const result = await converter.extract({ + filePath: emptyFile, + contentType: 'text/html', + agentDid: 'did:dkg:agent:0xTest', + }); + + expect(typeof result.mdIntermediate).toBe('string'); + expect(result.triples).toEqual([]); + }); + + it('processes file through registry lookup → extract', async () => { + const registry = new ExtractionPipelineRegistry(); + registry.register(converter); + + const htmlFile = join(tmpDir, 'test.html'); + await writeFile(htmlFile, '

Title

Body text

'); + + const pipeline = registry.get('text/html'); + expect(pipeline).toBeDefined(); + + const result = await pipeline!.extract({ + filePath: htmlFile, + contentType: 'text/html', + agentDid: 'did:dkg:agent:0xTest', + }); + + expect(result.mdIntermediate).toContain('Title'); + expect(result.mdIntermediate).toContain('Body text'); + }); +}); + +// --------------------------------------------------------------------------- +// Graceful degradation (MarkItDown unavailable) +// --------------------------------------------------------------------------- + +describe.skipIf(markitdownAvailable)('MarkItDown unavailable — graceful degradation', () => { + it('isMarkItDownAvailable returns false', () => { + expect(isMarkItDownAvailable()).toBe(false); + }); + + it('extract throws descriptive error when binary is missing', async () => { + const converter = new MarkItDownConverter(); + await expect(converter.extract({ + filePath: '/tmp/fake.pdf', + contentType: 'application/pdf', + agentDid: 'did:dkg:agent:0xTest', + })).rejects.toThrow(/MarkItDown binary not found/); + }); + + it('registry still works — returns undefined for unregistered types', () => { + const registry = new ExtractionPipelineRegistry(); + expect(registry.get('application/pdf')).toBeUndefined(); + expect(registry.availableContentTypes()).toEqual([]); + }); +}); + +// --------------------------------------------------------------------------- +// Full pipeline simulation: file → markdown → mock triples +// --------------------------------------------------------------------------- + +describe('Full extraction pipeline simulation', () => { + it('processes a file through phase 1 (file→MD) and phase 2 (MD→triples)', async () => { + const testFile = join(tmpDir, 'input.md'); + await writeFile(testFile, '# Climate Report\n\nGlobal temperature rose by 1.2°C.\n'); + + // Phase 1: file → markdown intermediate (simulated as direct read for .md files) + const phase1: ExtractionPipeline = { + contentTypes: ['text/markdown'], + async extract(input) { + const md = await readFile(input.filePath, 'utf-8'); + return { mdIntermediate: md, triples: [], provenance: [] }; + }, + }; + + const phase1Result = await phase1.extract({ + filePath: testFile, + contentType: 'text/markdown', + agentDid: 'did:dkg:agent:0xClimate', + }); + + expect(phase1Result.mdIntermediate).toContain('Climate Report'); + expect(phase1Result.mdIntermediate).toContain('1.2°C'); + + // Phase 2: markdown → RDF triples (simulated extraction) + const extractedTriples = [ + { + subject: 'urn:climate:report:2026', + predicate: 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type', + object: 'https://schema.org/Report', + }, + { + subject: 'urn:climate:report:2026', + predicate: 'https://schema.org/name', + object: 'Climate Report', + }, + { + subject: 'urn:climate:report:2026', + predicate: 'https://schema.org/description', + object: 'Global temperature rose by 1.2°C.', + }, + ]; + + const provenanceTriples = [ + { + subject: 'urn:extraction:1', + predicate: 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type', + object: 'dkg:ExtractionProvenance', + }, + { + subject: 'urn:extraction:1', + predicate: 'dkg:extractedBy', + object: 'did:dkg:agent:0xClimate', + }, + ]; + + // Verify the full pipeline output + expect(extractedTriples).toHaveLength(3); + expect(extractedTriples[0].subject).toBe('urn:climate:report:2026'); + expect(provenanceTriples).toHaveLength(2); + expect(provenanceTriples[1].object).toBe('did:dkg:agent:0xClimate'); + + // Simulate what the node does: store all artifacts + const artifacts = { + originalFile: testFile, + mdIntermediate: phase1Result.mdIntermediate, + triples: extractedTriples, + provenance: provenanceTriples, + totalTripleCount: extractedTriples.length + provenanceTriples.length, + }; + + expect(artifacts.totalTripleCount).toBe(5); + expect(artifacts.mdIntermediate).toContain('Climate Report'); + }); + + it('simulates import-file response shape', async () => { + const testFile = join(tmpDir, 'report.html'); + await writeFile(testFile, '

Q4 Sales

Revenue: $1.2M

'); + + const registry = new ExtractionPipelineRegistry(); + + // Register a mock HTML pipeline + registry.register({ + contentTypes: ['text/html'], + async extract(input) { + const content = await readFile(input.filePath, 'utf-8'); + return { + mdIntermediate: content.replace(/<[^>]+>/g, ''), + triples: [{ subject: 'urn:sales:q4', predicate: 'rdf:type', object: 'schema:Report' }], + provenance: [], + }; + }, + }); + + const pipeline = registry.get('text/html'); + expect(pipeline).toBeDefined(); + + const result = await pipeline!.extract({ + filePath: testFile, + contentType: 'text/html', + agentDid: 'did:dkg:agent:0xSales', + }); + + // Build the import-file response as the daemon would + const importFileResponse = { + assertionUri: 'did:dkg:context-graph:sales/assertion/0xSales/q4-report', + fileHash: 'sha256:abc123', + detectedContentType: 'text/html', + extraction: { + status: result.triples.length > 0 ? 'completed' as const : 'skipped' as const, + tripleCount: result.triples.length, + mdIntermediateHash: 'sha256:def456', + pipelineUsed: 'text/html', + }, + }; + + expect(importFileResponse.extraction.status).toBe('completed'); + expect(importFileResponse.extraction.tripleCount).toBe(1); + expect(importFileResponse.extraction.pipelineUsed).toBe('text/html'); + expect(importFileResponse.detectedContentType).toBe('text/html'); + }); + + it('simulates skipped extraction for unknown content type', () => { + const registry = new ExtractionPipelineRegistry(); + registry.register(new MarkItDownConverter()); + + const pipeline = registry.get('application/octet-stream'); + expect(pipeline).toBeUndefined(); + + // Node would return extraction.status: "skipped" + const importFileResponse = { + assertionUri: 'did:dkg:context-graph:test/assertion/0xAgent/binary-blob', + fileHash: 'sha256:xyz789', + detectedContentType: 'application/octet-stream', + extraction: { + status: 'skipped' as const, + }, + }; + + expect(importFileResponse.extraction.status).toBe('skipped'); + }); +}); diff --git a/packages/cli/test/extraction-markitdown.test.ts b/packages/cli/test/extraction-markitdown.test.ts new file mode 100644 index 000000000..5da39770d --- /dev/null +++ b/packages/cli/test/extraction-markitdown.test.ts @@ -0,0 +1,112 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { writeFile, rm, mkdtemp } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +// Dynamic import so vi.mock takes effect before module loads +let MarkItDownConverter: typeof import('../src/extraction/markitdown-converter.js').MarkItDownConverter; +let isMarkItDownAvailable: typeof import('../src/extraction/markitdown-converter.js').isMarkItDownAvailable; +let MARKITDOWN_CONTENT_TYPES: typeof import('../src/extraction/markitdown-converter.js').MARKITDOWN_CONTENT_TYPES; + +describe('MARKITDOWN_CONTENT_TYPES', () => { + beforeEach(async () => { + const mod = await import('../src/extraction/markitdown-converter.js'); + MARKITDOWN_CONTENT_TYPES = mod.MARKITDOWN_CONTENT_TYPES; + }); + + it('includes PDF', () => { + expect(MARKITDOWN_CONTENT_TYPES).toContain('application/pdf'); + }); + + it('includes DOCX', () => { + expect(MARKITDOWN_CONTENT_TYPES).toContain( + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + ); + }); + + it('includes PPTX', () => { + expect(MARKITDOWN_CONTENT_TYPES).toContain( + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + ); + }); + + it('includes XLSX', () => { + expect(MARKITDOWN_CONTENT_TYPES).toContain( + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + ); + }); + + it('includes CSV and HTML', () => { + expect(MARKITDOWN_CONTENT_TYPES).toContain('text/csv'); + expect(MARKITDOWN_CONTENT_TYPES).toContain('text/html'); + }); +}); + +describe('MarkItDownConverter', () => { + beforeEach(async () => { + vi.resetModules(); + const mod = await import('../src/extraction/markitdown-converter.js'); + MarkItDownConverter = mod.MarkItDownConverter; + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it('exposes all supported content types', () => { + const converter = new MarkItDownConverter(); + expect(converter.contentTypes).toContain('application/pdf'); + expect(converter.contentTypes).toContain('text/csv'); + expect(converter.contentTypes.length).toBeGreaterThanOrEqual(6); + }); + + it('extract returns mdIntermediate with empty triples (phase 1 only)', async () => { + const converter = new MarkItDownConverter(); + + // If markitdown is not available, the extract call should throw + // with a helpful error message rather than silently failing + const available = (await import('../src/extraction/markitdown-converter.js')).isMarkItDownAvailable(); + if (!available) { + await expect(converter.extract({ + filePath: '/tmp/nonexistent.pdf', + contentType: 'application/pdf', + agentDid: 'did:dkg:agent:0xAbc', + })).rejects.toThrow(/MarkItDown binary not found/); + return; + } + + // If available, test the actual conversion (only runs if binary is present) + const tmpDir = await mkdtemp(join(tmpdir(), 'markitdown-test-')); + const testFile = join(tmpDir, 'test.html'); + await writeFile(testFile, '

Hello

World

'); + + try { + const result = await converter.extract({ + filePath: testFile, + contentType: 'text/html', + agentDid: 'did:dkg:agent:0xTest', + }); + + expect(typeof result.mdIntermediate).toBe('string'); + expect(result.mdIntermediate.length).toBeGreaterThan(0); + // Phase 1 only — triples are produced by the Markdown extraction pipeline + expect(result.triples).toEqual([]); + expect(result.provenance).toEqual([]); + } finally { + await rm(tmpDir, { recursive: true, force: true }); + } + }); +}); + +describe('isMarkItDownAvailable', () => { + beforeEach(async () => { + vi.resetModules(); + const mod = await import('../src/extraction/markitdown-converter.js'); + isMarkItDownAvailable = mod.isMarkItDownAvailable; + }); + + it('returns a boolean', () => { + const result = isMarkItDownAvailable(); + expect(typeof result).toBe('boolean'); + }); +}); diff --git a/packages/cli/test/skill-endpoint.test.ts b/packages/cli/test/skill-endpoint.test.ts new file mode 100644 index 000000000..21c20bfaa --- /dev/null +++ b/packages/cli/test/skill-endpoint.test.ts @@ -0,0 +1,123 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { createServer, type IncomingMessage, type ServerResponse, type Server } from 'node:http'; +import { readFileSync } from 'node:fs'; +import { httpAuthGuard } from '../src/auth.js'; + +// --------------------------------------------------------------------------- +// Auth: /.well-known/skill.md is a public path +// --------------------------------------------------------------------------- + +describe('httpAuthGuard — /.well-known/skill.md', () => { + const VALID_TOKEN = 'secret'; + const validTokens = new Set([VALID_TOKEN]); + let server: Server; + let baseUrl: string; + + beforeEach(async () => { + server = createServer((req: IncomingMessage, res: ServerResponse) => { + if (!httpAuthGuard(req, res, true, validTokens)) return; + res.writeHead(200, { 'Content-Type': 'text/plain' }); + res.end('ok'); + }); + await new Promise(resolve => server.listen(0, '127.0.0.1', resolve)); + const addr = server.address() as { port: number }; + baseUrl = `http://127.0.0.1:${addr.port}`; + }); + + afterEach(async () => { + await new Promise(resolve => server.close(() => resolve())); + }); + + it('allows /.well-known/skill.md without a token (public endpoint)', async () => { + const res = await fetch(`${baseUrl}/.well-known/skill.md`); + expect(res.status).toBe(200); + const body = await res.text(); + expect(body).toBe('ok'); + }); + + it('still rejects other protected endpoints without token', async () => { + const res = await fetch(`${baseUrl}/api/publish`, { method: 'POST' }); + expect(res.status).toBe(401); + }); +}); + +// --------------------------------------------------------------------------- +// SKILL.md file content +// --------------------------------------------------------------------------- + +describe('SKILL.md file', () => { + let skillContent: string; + + beforeEach(() => { + const skillPath = new URL('../skills/dkg-node/SKILL.md', import.meta.url); + skillContent = readFileSync(skillPath, 'utf-8'); + }); + + it('starts with Agent Skills YAML frontmatter', () => { + expect(skillContent).toMatch(/^---\n/); + expect(skillContent).toContain('name: dkg-node'); + expect(skillContent).toContain('description:'); + expect(skillContent).toMatch(/---\n\n/); + }); + + it('contains the required DKG V10 sections', () => { + expect(skillContent).toContain('## 1. Node Info'); + expect(skillContent).toContain('## 2. Capabilities Overview'); + expect(skillContent).toContain('## 3. Quick Start'); + expect(skillContent).toContain('## 4. Authentication'); + expect(skillContent).toContain('## 5. Memory Model'); + expect(skillContent).toContain('## 6. Context Graphs'); + expect(skillContent).toContain('## 7. File Ingestion'); + expect(skillContent).toContain('## 8. Node Administration'); + expect(skillContent).toContain('## 9. Error Reference'); + expect(skillContent).toContain('## 10. Workflow Recipes'); + }); + + it('contains dynamic placeholders for node info', () => { + expect(skillContent).toContain('(dynamic)'); + expect(skillContent).toContain('**Node version:**'); + expect(skillContent).toContain('**Base URL:**'); + expect(skillContent).toContain('**Peer ID:**'); + }); + + it('documents the three memory layers', () => { + expect(skillContent).toContain('Working Memory (WM)'); + expect(skillContent).toContain('Shared Working Memory (SWM)'); + expect(skillContent).toContain('Verified Memory (VM)'); + }); + + it('includes key available API endpoints', () => { + expect(skillContent).toContain('/api/shared-memory/write'); + expect(skillContent).toContain('/api/shared-memory/publish'); + expect(skillContent).toContain('/api/publish'); + expect(skillContent).toContain('/api/query'); + expect(skillContent).toContain('/api/context-graph/create'); + expect(skillContent).toContain('/api/context-graph/list'); + expect(skillContent).toContain('/api/status'); + }); + + it('marks planned endpoints clearly', () => { + expect(skillContent).toContain('🚧 Planned'); + expect(skillContent).toContain('/api/assertion/create'); + expect(skillContent).toContain('/api/assertion/{name}/import-file'); + }); + + it('documents error status codes', () => { + expect(skillContent).toContain('| 400 |'); + expect(skillContent).toContain('| 401 |'); + expect(skillContent).toContain('| 403 |'); + expect(skillContent).toContain('| 404 |'); + expect(skillContent).toContain('| 409 |'); + }); + + it('includes V9 to V10 migration table', () => { + expect(skillContent).toContain('V9 → V10 Migration'); + expect(skillContent).toContain('Paranet'); + expect(skillContent).toContain('Context Graph'); + }); + + it('is under 500 lines (Agent Skills best practice)', () => { + const lines = skillContent.split('\n').length; + expect(lines).toBeLessThan(500); + }); +}); diff --git a/packages/core/src/constants.ts b/packages/core/src/constants.ts index 4165f40cf..623927e55 100644 --- a/packages/core/src/constants.ts +++ b/packages/core/src/constants.ts @@ -85,9 +85,9 @@ export function contextGraphVerifiedMemoryMetaUri(contextGraphId: string, verifi return `did:dkg:context-graph:${contextGraphId}/_verified_memory/${verifiedMemoryId}/_meta`; } -export function contextGraphDraftUri(contextGraphId: string, agentAddress: string, name: string, subGraphName?: string): string { - if (subGraphName) return `did:dkg:context-graph:${contextGraphId}/${subGraphName}/draft/${agentAddress}/${name}`; - return `did:dkg:context-graph:${contextGraphId}/draft/${agentAddress}/${name}`; +export function contextGraphAssertionUri(contextGraphId: string, agentAddress: string, name: string, subGraphName?: string): string { + if (subGraphName) return `did:dkg:context-graph:${contextGraphId}/${subGraphName}/assertion/${agentAddress}/${name}`; + return `did:dkg:context-graph:${contextGraphId}/assertion/${agentAddress}/${name}`; } export function contextGraphRulesUri(contextGraphId: string): string { @@ -115,7 +115,7 @@ export function validateSubGraphName(name: string): { valid: boolean; reason?: s if (name.startsWith('_')) return { valid: false, reason: 'Sub-graph names starting with "_" are reserved for protocol graphs' }; if (name.includes('/')) return { valid: false, reason: 'Sub-graph names cannot contain "/"' }; if (/[<>"{}|^`\\\s]/.test(name)) return { valid: false, reason: 'Sub-graph name contains characters unsafe for IRIs' }; - if (name === 'context' || name === 'draft') return { valid: false, reason: `"${name}" is a reserved path segment` }; + if (name === 'context' || name === 'assertion' || name === 'draft') return { valid: false, reason: `"${name}" is a reserved path segment` }; return { valid: true }; } diff --git a/packages/core/src/extraction-pipeline.ts b/packages/core/src/extraction-pipeline.ts new file mode 100644 index 000000000..99459f217 --- /dev/null +++ b/packages/core/src/extraction-pipeline.ts @@ -0,0 +1,67 @@ +/** + * Pluggable extraction pipeline interface for converting non-RDF files + * (PDF, DOCX, etc.) into Markdown intermediates and RDF triples. + * + * Spec: 05_PROTOCOL_EXTENSIONS.md §6.5 + */ + +export interface Quad { + subject: string; + predicate: string; + object: string; + graph?: string; +} + +export interface ExtractionInput { + /** Path to the file on disk (temp file from multipart upload). */ + filePath: string; + /** Detected or user-specified MIME content type. */ + contentType: string; + /** Optional: CG's _ontology graph URI for guided extraction. */ + ontologyRef?: string; + /** Extracting agent's DID (for provenance tracking). */ + agentDid: string; +} + +export interface ExtractionOutput { + /** Markdown intermediate (stored alongside original, inspectable). */ + mdIntermediate: string; + /** Extracted RDF triples. */ + triples: Quad[]; + /** dkg:ExtractionProvenance quads for semantically extracted triples. */ + provenance: Quad[]; +} + +export interface ExtractionPipeline { + /** MIME content types this pipeline handles. */ + readonly contentTypes: string[]; + /** Convert a file to Markdown intermediate + RDF triples. */ + extract(input: ExtractionInput): Promise; +} + +/** + * Registry that maps content types to extraction pipelines. + * Nodes register pipelines at startup; the import-file endpoint + * looks up the pipeline for the detected content type. + */ +export class ExtractionPipelineRegistry { + private readonly pipelines = new Map(); + + register(pipeline: ExtractionPipeline): void { + for (const ct of pipeline.contentTypes) { + this.pipelines.set(ct, pipeline); + } + } + + get(contentType: string): ExtractionPipeline | undefined { + return this.pipelines.get(contentType); + } + + has(contentType: string): boolean { + return this.pipelines.has(contentType); + } + + availableContentTypes(): string[] { + return [...this.pipelines.keys()]; + } +} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index a784bb5b5..e8cf11798 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -44,3 +44,10 @@ export { loadAuthTokenSync, loadAuthToken, } from './dkg-home.js'; +export { + type Quad as ExtractionQuad, + type ExtractionInput, + type ExtractionOutput, + type ExtractionPipeline, + ExtractionPipelineRegistry, +} from './extraction-pipeline.js'; diff --git a/packages/core/src/memory-model.ts b/packages/core/src/memory-model.ts index 3573320a4..b472d32e0 100644 --- a/packages/core/src/memory-model.ts +++ b/packages/core/src/memory-model.ts @@ -5,7 +5,7 @@ * for the DKG V10 protocol. * * Memory layers (ordered by trust/permanence): - * WM → Working Memory: local agent drafts, not shared + * WM → Working Memory: local agent assertions, not shared * SWM → Shared Working Memory: published to peers, not anchored * VM → Verified Memory: anchored on-chain and M-of-N verified */ @@ -41,13 +41,14 @@ export interface MemoryTransition { timestamp: string; } -export interface DraftDescriptor { +export interface AssertionDescriptor { contextGraphId: string; agentAddress: string; name: string; createdAt: string; } + export interface ShareRecord { contextGraphId: string; agentAddress: string; @@ -105,7 +106,7 @@ export interface Publication { /** * V10 GET view selectors — each declares which memory layer(s) a query targets. * - * working-memory → WM (agent's own draft graphs, local-only) + * working-memory → WM (agent's own assertion graphs, local-only) * shared-working-memory → SWM (provisional, gossip-replicated) * verified-memory → VM (on-chain anchored, M-of-N quorum verified) */ diff --git a/packages/core/test/extraction-pipeline.test.ts b/packages/core/test/extraction-pipeline.test.ts new file mode 100644 index 000000000..0d99aee6b --- /dev/null +++ b/packages/core/test/extraction-pipeline.test.ts @@ -0,0 +1,117 @@ +import { describe, it, expect } from 'vitest'; +import { + ExtractionPipelineRegistry, + type ExtractionPipeline, + type ExtractionInput, + type ExtractionOutput, +} from '../src/extraction-pipeline.js'; + +function makePipeline(contentTypes: string[], output?: Partial): ExtractionPipeline { + return { + contentTypes, + async extract(_input: ExtractionInput): Promise { + return { + mdIntermediate: output?.mdIntermediate ?? '# Test', + triples: output?.triples ?? [], + provenance: output?.provenance ?? [], + }; + }, + }; +} + +describe('ExtractionPipelineRegistry', () => { + it('starts empty', () => { + const registry = new ExtractionPipelineRegistry(); + expect(registry.availableContentTypes()).toEqual([]); + expect(registry.has('text/markdown')).toBe(false); + expect(registry.get('text/markdown')).toBeUndefined(); + }); + + it('registers a pipeline for its content types', () => { + const registry = new ExtractionPipelineRegistry(); + const pipeline = makePipeline(['application/pdf', 'text/html']); + registry.register(pipeline); + + expect(registry.has('application/pdf')).toBe(true); + expect(registry.has('text/html')).toBe(true); + expect(registry.has('text/plain')).toBe(false); + expect(registry.get('application/pdf')).toBe(pipeline); + expect(registry.get('text/html')).toBe(pipeline); + }); + + it('lists all available content types', () => { + const registry = new ExtractionPipelineRegistry(); + registry.register(makePipeline(['text/markdown'])); + registry.register(makePipeline(['application/pdf', 'text/csv'])); + + const types = registry.availableContentTypes(); + expect(types).toContain('text/markdown'); + expect(types).toContain('application/pdf'); + expect(types).toContain('text/csv'); + expect(types).toHaveLength(3); + }); + + it('later registration overwrites earlier for same content type', () => { + const registry = new ExtractionPipelineRegistry(); + const first = makePipeline(['application/pdf']); + const second = makePipeline(['application/pdf']); + registry.register(first); + registry.register(second); + + expect(registry.get('application/pdf')).toBe(second); + }); + + it('supports multiple pipelines for different types', () => { + const registry = new ExtractionPipelineRegistry(); + const mdPipeline = makePipeline(['text/markdown']); + const pdfPipeline = makePipeline(['application/pdf']); + registry.register(mdPipeline); + registry.register(pdfPipeline); + + expect(registry.get('text/markdown')).toBe(mdPipeline); + expect(registry.get('application/pdf')).toBe(pdfPipeline); + }); +}); + +describe('ExtractionPipeline interface', () => { + it('extract returns mdIntermediate, triples, and provenance', async () => { + const pipeline = makePipeline(['text/markdown'], { + mdIntermediate: '# Hello\n\nWorld', + triples: [{ subject: 'urn:test:1', predicate: 'rdf:type', object: 'schema:Thing' }], + provenance: [{ subject: 'urn:prov:1', predicate: 'dkg:extractedBy', object: 'did:dkg:agent:0x123' }], + }); + + const result = await pipeline.extract({ + filePath: '/tmp/test.md', + contentType: 'text/markdown', + agentDid: 'did:dkg:agent:0x123', + }); + + expect(result.mdIntermediate).toBe('# Hello\n\nWorld'); + expect(result.triples).toHaveLength(1); + expect(result.triples[0].subject).toBe('urn:test:1'); + expect(result.provenance).toHaveLength(1); + }); + + it('extract passes through ontologyRef when provided', async () => { + let capturedInput: ExtractionInput | null = null; + const pipeline: ExtractionPipeline = { + contentTypes: ['application/pdf'], + async extract(input) { + capturedInput = input; + return { mdIntermediate: '', triples: [], provenance: [] }; + }, + }; + + await pipeline.extract({ + filePath: '/tmp/paper.pdf', + contentType: 'application/pdf', + agentDid: 'did:dkg:agent:0xAbc', + ontologyRef: 'did:dkg:context-graph:research/_ontology', + }); + + expect(capturedInput).not.toBeNull(); + expect(capturedInput!.ontologyRef).toBe('did:dkg:context-graph:research/_ontology'); + expect(capturedInput!.agentDid).toBe('did:dkg:agent:0xAbc'); + }); +}); diff --git a/packages/core/test/memory-model-e2e.test.ts b/packages/core/test/memory-model-e2e.test.ts index f6b6b1988..02b30072c 100644 --- a/packages/core/test/memory-model-e2e.test.ts +++ b/packages/core/test/memory-model-e2e.test.ts @@ -4,7 +4,7 @@ import { TransitionType, isValidTransition, type MemoryTransition, - type DraftDescriptor, + type AssertionDescriptor, type ShareRecord, type PublicationRequest, type Publication, @@ -151,15 +151,15 @@ describe('V10 memory model e2e: full lifecycle simulation', () => { } }); - it('draft → share → publish → verify lifecycle with types', () => { - // Step 1: Agent creates a draft (WM) - const draft: DraftDescriptor = { + it('assertion → share → publish → verify lifecycle with types', () => { + // Step 1: Agent creates an assertion (WM) + const assertion: AssertionDescriptor = { contextGraphId: CG_ID, agentAddress: AGENT, name: 'game-turn-42', createdAt: new Date().toISOString(), }; - expect(draft.name).toBeTruthy(); + expect(assertion.name).toBeTruthy(); // Step 2: Agent shares to SWM const share: ShareRecord = { diff --git a/packages/core/test/memory-model.test.ts b/packages/core/test/memory-model.test.ts index 57a1a17be..02230a28a 100644 --- a/packages/core/test/memory-model.test.ts +++ b/packages/core/test/memory-model.test.ts @@ -13,7 +13,7 @@ import { type Publication, type PublicationRequest, type MemoryTransition, - type DraftDescriptor, + type AssertionDescriptor, type ShareRecord, } from '../src/memory-model.js'; @@ -223,15 +223,15 @@ describe('MemoryTransition interface', () => { }); }); -describe('DraftDescriptor interface', () => { - it('describes an agent draft', () => { - const d: DraftDescriptor = { +describe('AssertionDescriptor interface', () => { + it('describes an agent assertion', () => { + const d: AssertionDescriptor = { contextGraphId: 'cg-1', agentAddress: '0xAbc', - name: 'my-draft', + name: 'my-assertion', createdAt: '2026-04-02T00:00:00Z', }; - expect(d.name).toBe('my-draft'); + expect(d.name).toBe('my-assertion'); }); }); diff --git a/packages/core/test/sub-graphs.test.ts b/packages/core/test/sub-graphs.test.ts index 724499d10..174e5c395 100644 --- a/packages/core/test/sub-graphs.test.ts +++ b/packages/core/test/sub-graphs.test.ts @@ -4,7 +4,7 @@ import { contextGraphSubGraphMetaUri, contextGraphSharedMemoryUri, contextGraphSharedMemoryMetaUri, - contextGraphDraftUri, + contextGraphAssertionUri, validateSubGraphName, } from '../src/constants.js'; @@ -47,17 +47,18 @@ describe('sub-graph URI helpers', () => { ); }); - it('contextGraphDraftUri with subGraphName places sub-graph before draft', () => { - expect(contextGraphDraftUri(cgId, '0xAgent', 'scan', 'code')).toBe( - 'did:dkg:context-graph:dkg-v10-dev/code/draft/0xAgent/scan', + it('contextGraphAssertionUri with subGraphName places sub-graph before assertion', () => { + expect(contextGraphAssertionUri(cgId, '0xAgent', 'scan', 'code')).toBe( + 'did:dkg:context-graph:dkg-v10-dev/code/assertion/0xAgent/scan', ); }); - it('contextGraphDraftUri without subGraphName produces flat URI', () => { - expect(contextGraphDraftUri(cgId, '0xAgent', 'scan')).toBe( - 'did:dkg:context-graph:dkg-v10-dev/draft/0xAgent/scan', + it('contextGraphAssertionUri without subGraphName produces flat URI', () => { + expect(contextGraphAssertionUri(cgId, '0xAgent', 'scan')).toBe( + 'did:dkg:context-graph:dkg-v10-dev/assertion/0xAgent/scan', ); }); + }); describe('validateSubGraphName', () => { @@ -98,6 +99,7 @@ describe('validateSubGraphName', () => { it('rejects reserved path segments', () => { expect(validateSubGraphName('context').valid).toBe(false); + expect(validateSubGraphName('assertion').valid).toBe(false); expect(validateSubGraphName('draft').valid).toBe(false); }); }); diff --git a/packages/core/test/v10-constants.test.ts b/packages/core/test/v10-constants.test.ts index f533d5fa6..819b6ba22 100644 --- a/packages/core/test/v10-constants.test.ts +++ b/packages/core/test/v10-constants.test.ts @@ -25,7 +25,7 @@ import { contextGraphSharedMemoryMetaUri, contextGraphVerifiedMemoryUri, contextGraphVerifiedMemoryMetaUri, - contextGraphDraftUri, + contextGraphAssertionUri, contextGraphRulesUri, contextGraphSubGraphUri, // Deprecated aliases @@ -128,8 +128,8 @@ describe('V10 named graph URIs', () => { expect(contextGraphVerifiedMemoryMetaUri(id, '7')).toBe('did:dkg:context-graph:42/_verified_memory/7/_meta'); }); - it('draft URI', () => { - expect(contextGraphDraftUri(id, '0xAbc', 'my-draft')).toBe('did:dkg:context-graph:42/draft/0xAbc/my-draft'); + it('assertion URI', () => { + expect(contextGraphAssertionUri(id, '0xAbc', 'my-assertion')).toBe('did:dkg:context-graph:42/assertion/0xAbc/my-assertion'); }); it('rules URI', () => { diff --git a/packages/network-sim/src/components/ControlPanel.tsx b/packages/network-sim/src/components/ControlPanel.tsx index b9b6074ac..a2a137623 100644 --- a/packages/network-sim/src/components/ControlPanel.tsx +++ b/packages/network-sim/src/components/ControlPanel.tsx @@ -588,7 +588,7 @@ function SharedMemoryTab() { return (
- Write draft triples to shared memory (free, no gas). When ready, publish them to the chain with full finality. + Write triples to shared memory (free, no gas). When ready, publish them to the chain with full finality.
diff --git a/packages/publisher/src/dkg-publisher.ts b/packages/publisher/src/dkg-publisher.ts index 8afacadcc..3c56868fc 100644 --- a/packages/publisher/src/dkg-publisher.ts +++ b/packages/publisher/src/dkg-publisher.ts @@ -1,7 +1,7 @@ import type { Quad, TripleStore } from '@origintrail-official/dkg-storage'; import type { ChainAdapter, OnChainPublishResult, AddBatchToContextGraphParams } from '@origintrail-official/dkg-chain'; import type { EventBus, OperationContext } from '@origintrail-official/dkg-core'; -import { DKGEvent, Logger, createOperationContext, sha256, encodeWorkspacePublishRequest, contextGraphDataUri, contextGraphMetaUri, contextGraphDraftUri, contextGraphSubGraphUri, contextGraphSubGraphMetaUri, validateSubGraphName, isSafeIri, assertSafeIri, assertSafeRdfTerm, type Ed25519Keypair, computeACKDigest } from '@origintrail-official/dkg-core'; +import { DKGEvent, Logger, createOperationContext, sha256, encodeWorkspacePublishRequest, contextGraphDataUri, contextGraphMetaUri, contextGraphAssertionUri, contextGraphSubGraphUri, contextGraphSubGraphMetaUri, validateSubGraphName, isSafeIri, assertSafeIri, assertSafeRdfTerm, type Ed25519Keypair, computeACKDigest } from '@origintrail-official/dkg-core'; import { GraphManager, PrivateContentStore } from '@origintrail-official/dkg-storage'; import type { Publisher, PublishOptions, PublishResult, KAManifestEntry, PhaseCallback } from './publisher.js'; import { autoPartition } from './auto-partition.js'; @@ -171,7 +171,8 @@ export class DKGPublisher implements Publisher { options: ShareOptions, ): Promise { const subjects = [...new Set(quads.map(q => q.subject))]; - const lockKeys = subjects.map(s => `${contextGraphId}\0${s}`); + const lockPrefix = options.subGraphName ? `${contextGraphId}\0${options.subGraphName}` : contextGraphId; + const lockKeys = subjects.map(s => `${lockPrefix}\0${s}`); return this.withWriteLocks(lockKeys, () => this._shareImpl(contextGraphId, quads, options)); } @@ -361,7 +362,8 @@ export class DKGPublisher implements Publisher { const conditionSubjects = options.conditions.map(c => c.subject); const quadSubjects = [...new Set(quads.map(q => q.subject))]; - const lockKeys = [...new Set([...conditionSubjects, ...quadSubjects])].map(s => `${contextGraphId}\0${s}`); + const lockPrefix = options.subGraphName ? `${contextGraphId}\0${options.subGraphName}` : contextGraphId; + const lockKeys = [...new Set([...conditionSubjects, ...quadSubjects])].map(s => `${lockPrefix}\0${s}`); return this.withWriteLocks(lockKeys, () => this._executeConditionalWrite(contextGraphId, quads, options)); } @@ -383,7 +385,7 @@ export class DKGPublisher implements Publisher { const ctx = options.operationCtx ?? createOperationContext('share'); await this.graphManager.ensureContextGraph(contextGraphId); - const swmGraph = this.graphManager.sharedMemoryUri(contextGraphId); + const swmGraph = this.graphManager.sharedMemoryUri(contextGraphId, options.subGraphName); for (const cond of options.conditions) { const ask = cond.expectedValue === null @@ -1455,7 +1457,7 @@ export class DKGPublisher implements Publisher { } } - // ── Working Memory Draft Operations (spec §6) ──────────────────────── + // ── Working Memory Assertion Operations (spec §6) ─────────────────── private static validateOptionalSubGraph(subGraphName: string | undefined): void { if (subGraphName !== undefined) { @@ -1466,55 +1468,54 @@ export class DKGPublisher implements Publisher { clearSubGraphOwnership(ownershipKey: string): void { this.sharedMemoryOwnedEntities.delete(ownershipKey); + this.ownedEntities.delete(ownershipKey); + this.privateStore.clearCache(ownershipKey); } - async draftCreate(contextGraphId: string, draftName: string, agentAddress: string, subGraphName?: string): Promise { + async assertionCreate(contextGraphId: string, name: string, agentAddress: string, subGraphName?: string): Promise { DKGPublisher.validateOptionalSubGraph(subGraphName); - const graphUri = contextGraphDraftUri(contextGraphId, agentAddress, draftName, subGraphName); + const graphUri = contextGraphAssertionUri(contextGraphId, agentAddress, name, subGraphName); await this.store.createGraph(graphUri); return graphUri; } - async draftWrite( + async assertionWrite( contextGraphId: string, - draftName: string, + name: string, agentAddress: string, input: Quad[] | Array<{ subject: string; predicate: string; object: string }>, subGraphName?: string, ): Promise { DKGPublisher.validateOptionalSubGraph(subGraphName); - const graphUri = contextGraphDraftUri(contextGraphId, agentAddress, draftName, subGraphName); + const graphUri = contextGraphAssertionUri(contextGraphId, agentAddress, name, subGraphName); const quads = input.map((t) => ({ - subject: t.subject, - predicate: t.predicate, - object: t.object, - graph: graphUri, + subject: t.subject, predicate: t.predicate, object: t.object, graph: graphUri, })); await this.store.insert(quads); } - async draftQuery( + async assertionQuery( contextGraphId: string, - draftName: string, + name: string, agentAddress: string, subGraphName?: string, ): Promise { DKGPublisher.validateOptionalSubGraph(subGraphName); - const graphUri = contextGraphDraftUri(contextGraphId, agentAddress, draftName, subGraphName); + const graphUri = contextGraphAssertionUri(contextGraphId, agentAddress, name, subGraphName); const result = await this.store.query( `CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <${graphUri}> { ?s ?p ?o } }`, ); return result.type === 'quads' ? result.quads : []; } - async draftPromote( + async assertionPromote( contextGraphId: string, - draftName: string, + name: string, agentAddress: string, opts?: { entities?: string[] | 'all'; subGraphName?: string }, ): Promise<{ promotedCount: number }> { DKGPublisher.validateOptionalSubGraph(opts?.subGraphName); - const graphUri = contextGraphDraftUri(contextGraphId, agentAddress, draftName, opts?.subGraphName); + const graphUri = contextGraphAssertionUri(contextGraphId, agentAddress, name, opts?.subGraphName); const swmGraphUri = this.graphManager.sharedMemoryUri(contextGraphId, opts?.subGraphName); const result = await this.store.query( @@ -1537,7 +1538,7 @@ export class DKGPublisher implements Publisher { const swmQuads = quadsToPromote.map((q) => ({ ...q, graph: swmGraphUri })); await this.store.insert(swmQuads); - // Delete promoted triples from draft + // Delete promoted triples from assertion graph await this.store.delete(quadsToPromote.map((q) => ({ ...q, graph: graphUri }))); // Record ShareTransition metadata in _shared_memory_meta (spec §8) @@ -1547,7 +1548,7 @@ export class DKGPublisher implements Publisher { contextGraphId, operationId, agentAddress, - draftName, + assertionName: name, entities, timestamp: new Date(), }); @@ -1556,11 +1557,12 @@ export class DKGPublisher implements Publisher { return { promotedCount: swmQuads.length }; } - async draftDiscard(contextGraphId: string, draftName: string, agentAddress: string, subGraphName?: string): Promise { + async assertionDiscard(contextGraphId: string, name: string, agentAddress: string, subGraphName?: string): Promise { DKGPublisher.validateOptionalSubGraph(subGraphName); - const graphUri = contextGraphDraftUri(contextGraphId, agentAddress, draftName, subGraphName); + const graphUri = contextGraphAssertionUri(contextGraphId, agentAddress, name, subGraphName); await this.store.dropGraph(graphUri); } + } /** diff --git a/packages/publisher/src/metadata.ts b/packages/publisher/src/metadata.ts index 3bc106b55..3944b3600 100644 --- a/packages/publisher/src/metadata.ts +++ b/packages/publisher/src/metadata.ts @@ -277,7 +277,7 @@ export interface ShareTransitionMetadata { contextGraphId: string; operationId: string; agentAddress: string; - draftName: string; + assertionName: string; entities: string[]; timestamp: Date; } @@ -287,7 +287,7 @@ export function generateShareTransitionMetadata(meta: ShareTransitionMetadata): const subject = `urn:dkg:share:${meta.operationId}`; const quads: Quad[] = [ mq(subject, `${RDF}type`, `${DKG}ShareTransition`, metaGraph), - mq(subject, `${DKG}source`, lit(`draft/${meta.agentAddress}/${meta.draftName}`), metaGraph), + mq(subject, `${DKG}source`, lit(`assertion/${meta.agentAddress}/${meta.assertionName}`), metaGraph), mq(subject, `${DKG}agent`, `did:dkg:agent:${meta.agentAddress}`, metaGraph), mq(subject, `${DKG}timestamp`, dateLit(meta.timestamp), metaGraph), ]; diff --git a/packages/publisher/test/draft-lifecycle.test.ts b/packages/publisher/test/draft-lifecycle.test.ts index 985b89d9f..514801d79 100644 --- a/packages/publisher/test/draft-lifecycle.test.ts +++ b/packages/publisher/test/draft-lifecycle.test.ts @@ -1,15 +1,15 @@ import { describe, it, expect, beforeEach } from 'vitest'; import { OxigraphStore, type Quad } from '@origintrail-official/dkg-storage'; import { MockChainAdapter } from '@origintrail-official/dkg-chain'; -import { TypedEventBus, generateEd25519Keypair, contextGraphDraftUri } from '@origintrail-official/dkg-core'; +import { TypedEventBus, generateEd25519Keypair, contextGraphAssertionUri } from '@origintrail-official/dkg-core'; import { DKGPublisher } from '../src/index.js'; import { ethers } from 'ethers'; -const CG_ID = 'test-draft-cg'; +const CG_ID = 'test-assertion-cg'; const SWM_GRAPH = `did:dkg:context-graph:${CG_ID}/_shared_memory`; const AGENT = '0x1234567890abcdef1234567890abcdef12345678'; const AGENT_B = '0xabcdefabcdefabcdefabcdefabcdefabcdefabcd'; -const DRAFT = 'my-draft'; +const ASSERTION_NAME = 'my-assertion'; const TRIPLES = [ { subject: 'urn:test:entity:alice', predicate: 'http://schema.org/name', object: '"Alice"' }, @@ -17,7 +17,7 @@ const TRIPLES = [ { subject: 'urn:test:entity:bob', predicate: 'http://schema.org/name', object: '"Bob"' }, ]; -describe('Working Memory Draft Lifecycle', () => { +describe('Working Memory Assertion Lifecycle', () => { let store: OxigraphStore; let publisher: DKGPublisher; @@ -36,47 +36,47 @@ describe('Working Memory Draft Lifecycle', () => { }); }); - it('create returns the correct draft graph URI', async () => { - const uri = await publisher.draftCreate(CG_ID, DRAFT, AGENT); - expect(uri).toBe(contextGraphDraftUri(CG_ID, AGENT, DRAFT)); + it('create returns the correct assertion graph URI', async () => { + const uri = await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + expect(uri).toBe(contextGraphAssertionUri(CG_ID, AGENT, ASSERTION_NAME)); }); - it('write inserts triples into the draft graph', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); + it('write inserts triples into the assertion graph', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); - const quads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); + const quads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); expect(quads.length).toBe(3); const subjects = new Set(quads.map((q: Quad) => q.subject)); expect(subjects.has('urn:test:entity:alice')).toBe(true); expect(subjects.has('urn:test:entity:bob')).toBe(true); }); - it('query returns triples from the draft only', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); + it('query returns triples from the assertion only', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); - // Write something to a different draft — should not appear - await publisher.draftCreate(CG_ID, 'other-draft', AGENT); - await publisher.draftWrite(CG_ID, 'other-draft', AGENT, [ + // Write something to a different assertion — should not appear + await publisher.assertionCreate(CG_ID, 'other-assertion', AGENT); + await publisher.assertionWrite(CG_ID, 'other-assertion', AGENT, [ { subject: 'urn:test:entity:charlie', predicate: 'http://schema.org/name', object: '"Charlie"' }, ]); - const quads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); + const quads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); expect(quads.length).toBe(3); const subjects = new Set(quads.map((q: Quad) => q.subject)); expect(subjects.has('urn:test:entity:charlie')).toBe(false); }); - it('promote moves all triples to SWM and empties draft', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); + it('promote moves all triples to SWM and empties assertion', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); - const result = await publisher.draftPromote(CG_ID, DRAFT, AGENT); + const result = await publisher.assertionPromote(CG_ID, ASSERTION_NAME, AGENT); expect(result.promotedCount).toBe(3); - const draftQuads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); - expect(draftQuads.length).toBe(0); + const assertionQuads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); + expect(assertionQuads.length).toBe(0); const swmResult = await store.query( `SELECT ?s ?p ?o WHERE { GRAPH <${SWM_GRAPH}> { ?s ?p ?o } }`, @@ -88,15 +88,15 @@ describe('Working Memory Draft Lifecycle', () => { }); it('promote with entity filter only moves selected entities', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); - const result = await publisher.draftPromote(CG_ID, DRAFT, AGENT, { + const result = await publisher.assertionPromote(CG_ID, ASSERTION_NAME, AGENT, { entities: ['urn:test:entity:alice'], }); expect(result.promotedCount).toBe(2); - const remaining = await publisher.draftQuery(CG_ID, DRAFT, AGENT); + const remaining = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); expect(remaining.length).toBe(1); expect(remaining[0].subject).toBe('urn:test:entity:bob'); @@ -111,46 +111,46 @@ describe('Working Memory Draft Lifecycle', () => { } }); - it('discard drops the draft graph', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); - await publisher.draftDiscard(CG_ID, DRAFT, AGENT); + it('discard drops the assertion graph', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); + await publisher.assertionDiscard(CG_ID, ASSERTION_NAME, AGENT); - const quads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); + const quads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); expect(quads.length).toBe(0); }); - it('different agents have isolated draft graphs', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftCreate(CG_ID, DRAFT, AGENT_B); + it('different agents have isolated assertion graphs', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT_B); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, [ + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, [ { subject: 'urn:test:alice', predicate: 'http://schema.org/name', object: '"Alice"' }, ]); - await publisher.draftWrite(CG_ID, DRAFT, AGENT_B, [ + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT_B, [ { subject: 'urn:test:bob', predicate: 'http://schema.org/name', object: '"Bob"' }, ]); - const agentAQuads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); + const agentAQuads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); expect(agentAQuads.length).toBe(1); expect(agentAQuads[0].subject).toBe('urn:test:alice'); - const agentBQuads = await publisher.draftQuery(CG_ID, DRAFT, AGENT_B); + const agentBQuads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT_B); expect(agentBQuads.length).toBe(1); expect(agentBQuads[0].subject).toBe('urn:test:bob'); }); - it('promote on empty draft returns 0', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - const result = await publisher.draftPromote(CG_ID, DRAFT, AGENT); + it('promote on empty assertion returns 0', async () => { + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + const result = await publisher.assertionPromote(CG_ID, ASSERTION_NAME, AGENT); expect(result.promotedCount).toBe(0); }); it('promote records ShareTransition metadata in _shared_memory_meta', async () => { const SWM_META = `did:dkg:context-graph:${CG_ID}/_shared_memory_meta`; - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); - await publisher.draftPromote(CG_ID, DRAFT, AGENT); + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); + await publisher.assertionPromote(CG_ID, ASSERTION_NAME, AGENT); const result = await store.query( `SELECT ?s ?type WHERE { @@ -170,16 +170,16 @@ describe('Working Memory Draft Lifecycle', () => { }); it('full lifecycle: create → write → promote → verify SWM → discard', async () => { - await publisher.draftCreate(CG_ID, DRAFT, AGENT); - await publisher.draftWrite(CG_ID, DRAFT, AGENT, TRIPLES); + await publisher.assertionCreate(CG_ID, ASSERTION_NAME, AGENT); + await publisher.assertionWrite(CG_ID, ASSERTION_NAME, AGENT, TRIPLES); - let draftQuads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); - expect(draftQuads.length).toBe(3); + let quads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); + expect(quads.length).toBe(3); - await publisher.draftPromote(CG_ID, DRAFT, AGENT); + await publisher.assertionPromote(CG_ID, ASSERTION_NAME, AGENT); - draftQuads = await publisher.draftQuery(CG_ID, DRAFT, AGENT); - expect(draftQuads.length).toBe(0); + quads = await publisher.assertionQuery(CG_ID, ASSERTION_NAME, AGENT); + expect(quads.length).toBe(0); const swmResult = await store.query( `SELECT (COUNT(*) AS ?c) WHERE { GRAPH <${SWM_GRAPH}> { ?s ?p ?o } }`, @@ -190,6 +190,6 @@ describe('Working Memory Draft Lifecycle', () => { expect(count).toBe(3); } - await publisher.draftDiscard(CG_ID, DRAFT, AGENT); + await publisher.assertionDiscard(CG_ID, ASSERTION_NAME, AGENT); }); }); diff --git a/packages/publisher/test/get-views.test.ts b/packages/publisher/test/get-views.test.ts index 0dd75acde..a497cf3a4 100644 --- a/packages/publisher/test/get-views.test.ts +++ b/packages/publisher/test/get-views.test.ts @@ -4,7 +4,7 @@ import { GET_VIEWS, contextGraphSharedMemoryUri, contextGraphVerifiedMemoryUri, - contextGraphDraftUri, + contextGraphAssertionUri, } from '@origintrail-official/dkg-core'; import { resolveViewGraphs, type ViewResolution } from '@origintrail-official/dkg-query'; @@ -17,11 +17,11 @@ describe('resolveViewGraphs', () => { expect(() => resolveViewGraphs('working-memory', CG)).toThrow('agentAddress is required'); }); - it('returns a prefix for all agent drafts when no draftName given', () => { + it('returns a prefix for all agent assertions when no assertionName given', () => { const res = resolveViewGraphs('working-memory', CG, { agentAddress: AGENT }); expect(res.graphs).toHaveLength(0); expect(res.graphPrefixes).toHaveLength(1); - expect(res.graphPrefixes[0]).toBe(`did:dkg:context-graph:${CG}/draft/${AGENT}/`); + expect(res.graphPrefixes[0]).toBe(`did:dkg:context-graph:${CG}/assertion/${AGENT}/`); }); it('includes the agent address in the graph URI prefix', () => { @@ -29,9 +29,9 @@ describe('resolveViewGraphs', () => { expect(res.graphPrefixes[0]).toContain(AGENT); }); - it('returns an exact draft URI when draftName is provided', () => { - const res = resolveViewGraphs('working-memory', CG, { agentAddress: AGENT, draftName: 'exp-lr' }); - expect(res.graphs).toEqual([contextGraphDraftUri(CG, AGENT, 'exp-lr')]); + it('returns an exact assertion URI when assertionName is provided', () => { + const res = resolveViewGraphs('working-memory', CG, { agentAddress: AGENT, assertionName: 'exp-lr' }); + expect(res.graphs).toEqual([contextGraphAssertionUri(CG, AGENT, 'exp-lr')]); expect(res.graphPrefixes).toHaveLength(0); }); }); diff --git a/packages/publisher/test/metadata.test.ts b/packages/publisher/test/metadata.test.ts index dbdc60af0..b21f327a3 100644 --- a/packages/publisher/test/metadata.test.ts +++ b/packages/publisher/test/metadata.test.ts @@ -278,7 +278,7 @@ describe('generateShareTransitionMetadata', () => { contextGraphId: PARANET, operationId: 'op-share-001', agentAddress: '0x1234567890abcdef1234567890abcdef12345678', - draftName: 'my-draft', + assertionName: 'my-assertion', entities: ['urn:test:entity:alice', 'urn:test:entity:bob'], timestamp: new Date('2026-04-01T00:00:00Z'), }; @@ -306,12 +306,12 @@ describe('generateShareTransitionMetadata', () => { expect(preds).toContain(`${DKG}entities`); }); - it('source includes draft path with agent and name', () => { + it('source includes assertion path with agent and name', () => { const quads = generateShareTransitionMetadata(shareMeta); const sourceQuad = quads.find(q => q.predicate === `${DKG}source`); - expect(sourceQuad!.object).toContain('draft/'); + expect(sourceQuad!.object).toContain('assertion/'); expect(sourceQuad!.object).toContain(shareMeta.agentAddress); - expect(sourceQuad!.object).toContain(shareMeta.draftName); + expect(sourceQuad!.object).toContain(shareMeta.assertionName); }); it('generates one entity quad per entity', () => { diff --git a/packages/query/src/dkg-query-engine.ts b/packages/query/src/dkg-query-engine.ts index faca0f68c..053c5ad59 100644 --- a/packages/query/src/dkg-query-engine.ts +++ b/packages/query/src/dkg-query-engine.ts @@ -2,7 +2,7 @@ import type { TripleStore, Quad, QueryResult as StoreQueryResult } from '@origin import { GraphManager } from '@origintrail-official/dkg-storage'; import type { QueryResult, QueryOptions, QueryEngine } from './query-engine.js'; import { - contextGraphDataUri, contextGraphSharedMemoryUri, contextGraphVerifiedMemoryUri, contextGraphDraftUri, + contextGraphDataUri, contextGraphSharedMemoryUri, contextGraphVerifiedMemoryUri, contextGraphAssertionUri, contextGraphSubGraphUri, assertSafeIri, escapeSparqlLiteral, validateSubGraphName, type GetView, @@ -20,7 +20,7 @@ export interface ViewResolution { /** * Graph URI prefixes — the engine discovers all named graphs matching * each prefix and unions the results. Used for working-memory (multiple - * drafts) and verified-memory (multiple quorum graphs). + * assertions) and verified-memory (multiple quorum graphs). */ graphPrefixes: string[]; } @@ -34,7 +34,7 @@ export interface ViewResolution { export function resolveViewGraphs( view: GetView, contextGraphId: string, - opts?: { agentAddress?: string; verifiedGraph?: string; draftName?: string }, + opts?: { agentAddress?: string; verifiedGraph?: string; assertionName?: string }, ): ViewResolution { if (REMOVED_VIEWS.includes(view as string)) { throw new Error( @@ -47,15 +47,15 @@ export function resolveViewGraphs( if (!opts?.agentAddress) { throw new Error('agentAddress is required for the working-memory view'); } - if (opts.draftName) { + if (opts.assertionName) { return { - graphs: [contextGraphDraftUri(contextGraphId, opts.agentAddress, opts.draftName)], + graphs: [contextGraphAssertionUri(contextGraphId, opts.agentAddress, opts.assertionName)], graphPrefixes: [], }; } return { graphs: [], - graphPrefixes: [`did:dkg:context-graph:${contextGraphId}/draft/${opts.agentAddress}/`], + graphPrefixes: [`did:dkg:context-graph:${contextGraphId}/assertion/${opts.agentAddress}/`], }; } case 'shared-working-memory': @@ -159,6 +159,7 @@ export class DKGQueryEngine implements QueryEngine { const resolution = resolveViewGraphs(view, contextGraphId, { agentAddress: options.agentAddress, verifiedGraph: options.verifiedGraph, + assertionName: options.assertionName, }); const allGraphs = [...resolution.graphs]; diff --git a/packages/query/src/query-engine.ts b/packages/query/src/query-engine.ts index ab6a04b8d..1d8e163be 100644 --- a/packages/query/src/query-engine.ts +++ b/packages/query/src/query-engine.ts @@ -20,10 +20,12 @@ export interface QueryOptions { includeWorkspace?: boolean; /** V10 declared state view — determines which graph(s) the query targets. */ view?: GetView; - /** Agent address — required when view is 'working-memory' to resolve draft graphs. */ + /** Agent address — required when view is 'working-memory' to resolve assertion graphs. */ agentAddress?: string; /** Specific verified graph name — used with view='verified-memory' to target a single verified graph. */ verifiedGraph?: string; + /** Specific assertion name — used with view='working-memory' to target a single assertion graph. */ + assertionName?: string; /** * Scope the query to a specific sub-graph within the context graph. * When set, the query targets `did:dkg:context-graph:{id}/{subGraphName}` diff --git a/packages/storage/src/graph-manager.ts b/packages/storage/src/graph-manager.ts index d5aed7c91..4aaec3767 100644 --- a/packages/storage/src/graph-manager.ts +++ b/packages/storage/src/graph-manager.ts @@ -7,7 +7,7 @@ import { contextGraphSharedMemoryMetaUri, contextGraphVerifiedMemoryUri, contextGraphVerifiedMemoryMetaUri, - contextGraphDraftUri, + contextGraphAssertionUri, contextGraphSubGraphUri, contextGraphSubGraphMetaUri, contextGraphSubGraphPrivateUri, @@ -51,8 +51,8 @@ export class ContextGraphManager { return contextGraphVerifiedMemoryMetaUri(contextGraphId, verifiedMemoryId); } - draftUri(contextGraphId: string, agentAddress: string, name: string): string { - return contextGraphDraftUri(contextGraphId, agentAddress, name); + assertionUri(contextGraphId: string, agentAddress: string, name: string): string { + return contextGraphAssertionUri(contextGraphId, agentAddress, name); } subGraphUri(contextGraphId: string, subGraphName: string): string { @@ -117,7 +117,7 @@ export class ContextGraphManager { const prefix = `${CG_PREFIX}${contextGraphId}/`; const allGraphs = await this.store.listGraphs(); const subGraphNames = new Set(); - const reservedPrefixes = ['_', 'draft/', 'context/']; + const reservedPrefixes = ['_', 'assertion/', 'draft/', 'context/']; for (const g of allGraphs) { if (!g.startsWith(prefix)) continue; const rest = g.slice(prefix.length); diff --git a/packages/storage/src/private-store.ts b/packages/storage/src/private-store.ts index 6378c7a82..353bdc0ba 100644 --- a/packages/storage/src/private-store.ts +++ b/packages/storage/src/private-store.ts @@ -19,6 +19,10 @@ export class PrivateContentStore { this.graphManager = graphManager; } + clearCache(key: string): void { + this.privateEntities.delete(key); + } + private privateGraph(contextGraphId: string, subGraphName?: string): string { return subGraphName ? this.graphManager.subGraphPrivateUri(contextGraphId, subGraphName)