diff --git a/packages/cli/src/api-client.ts b/packages/cli/src/api-client.ts index eb614a2fd..c08d5bbf6 100644 --- a/packages/cli/src/api-client.ts +++ b/packages/cli/src/api-client.ts @@ -711,6 +711,50 @@ export class ApiClient { return this.post('/api/kafka/endpoint/verify', request); } + // ─── Token administration (slice 06 — root-only) ────────────────────────── + // + // Wrappers around `/api/auth/tokens`. The mint endpoint returns the full + // secret EXACTLY ONCE; the CLI is responsible for surfacing it to the + // operator with a clear "save this — it won't be shown again" notice. + // List + revoke responses never carry full secrets. + + async mintAuthToken(request: { + scope: string | string[]; + name?: string; + }): Promise<{ + token: string; + prefix: string; + scopes: string[] | '*'; + name?: string; + createdAt: string; + }> { + return this.post('/api/auth/tokens', { + scope: request.scope, + ...(request.name !== undefined ? { name: request.name } : {}), + }); + } + + async listAuthTokens(): Promise<{ + tokens: Array<{ + prefix: string; + scopes: string[] | '*'; + name?: string; + createdAt?: string; + // Codex bug 4: surfaces which rows are revocable via DELETE. + // Only `'file'` rows can be revoked through this API; `'config'` + // requires editing dkg.config.yaml; `'agent'` is auto-issued by + // /api/agent/register and cleared on daemon restart. Optional + // for forward-compat with older daemons that omit the field. + source?: 'file' | 'config' | 'agent'; + }>; + }> { + return this.get('/api/auth/tokens'); + } + + async revokeAuthToken(prefix: string): Promise> { + return this.delete(`/api/auth/tokens/${encodeURIComponent(prefix)}`); + } + async signJoinRequest(contextGraphId: string): Promise<{ ok: boolean; status?: string; @@ -1054,6 +1098,9 @@ export class ApiClient { const data = await res.json().catch(() => ({ error: res.statusText })); throw ApiClient.httpError(res.status, ApiClient.errorMessageFromBody(data, res.statusText), data); } + // 204 No Content has an empty body — `res.json()` would throw. Return + // an empty object so callers don't need to special-case the type. + if (res.status === 204) return {} as T; return res.json() as Promise; } diff --git a/packages/cli/src/auth.ts b/packages/cli/src/auth.ts index 2927858e1..87dfb445c 100644 --- a/packages/cli/src/auth.ts +++ b/packages/cli/src/auth.ts @@ -3,14 +3,50 @@ * * Uses bearer tokens stored on disk. Tokens are auto-generated on first start. * Any interface that needs auth calls `verifyToken(token)` against the loaded set. + * + * Slice 06 — added scoped tokens via the `token-store` deep module. The + * auth file format is extended to allow per-token scopes (see ADR-0003); + * legacy scope-less lines continue to grant full access. The new + * `loadTokenStore` returns the structured map; `loadTokens` is a thin + * wrapper that returns just the set of full-token strings, so the 13 + * existing call sites keep compiling and behaving identically for legacy + * tokens. */ import { randomBytes } from 'node:crypto'; -import { readFile, writeFile, mkdir, chmod } from 'node:fs/promises'; +import { readFile, mkdir, chmod, writeFile } from 'node:fs/promises'; import { join, dirname } from 'node:path'; import { existsSync } from 'node:fs'; import type { IncomingMessage, ServerResponse } from 'node:http'; import { dkgDir } from './config.js'; +import { + parseTokenFile, + serializeTokenStore, + lookupTokenRecord, + setTokenRecord, + tokenPrefix, + type ParsedTokenFile, + type TokenStore, + type TokenRecord, + type Scope, +} from './token-store.js'; + +// Re-export the deep-module types + helpers so call sites only import from +// `auth.ts`. Keeps the seam between the deep parser/serializer module and +// the I/O-bearing auth surface visible in one place. +export type { TokenStore, TokenRecord, Scope, TokenSource } from './token-store.js'; +export { + lookupTokenRecord, + toPublicRecord, + tokenPrefix, + setTokenRecord, + deleteTokenRecord, + addTokenToStore, + removeTokenFromStore, + serializeTokenStore, + parseTokenFile, + type PublicTokenRecord, +} from './token-store.js'; // --------------------------------------------------------------------------- // Types @@ -27,7 +63,12 @@ export interface AuthConfig { // Token file management // --------------------------------------------------------------------------- -function tokenFilePath(): string { +/** + * Resolve the on-disk auth-token path. Goes through `dkgDir()` so test + * harnesses can redirect via `DKG_HOME=/tmp/...` without touching the + * production location. + */ +export function tokenFilePath(): string { return join(dkgDir(), 'auth.token'); } @@ -36,42 +77,90 @@ function generateToken(): string { } /** - * Load tokens from disk + config. Auto-generates a token file if none exists. - * Returns the set of valid tokens. + * Load the structured token store from disk + config. Auto-generates a + * token file (legacy single-line format → scopes = `'*'`) if none exists. + * + * Config-defined tokens (from `dkg.config.yaml`) are inserted as + * scope-less = root tokens — they are typically operator-supplied + * preshared secrets, and slice 06's contract is "legacy = full access". + * + * Reads the file with `parseTokenFile`, which skips malformed lines + * with a warning rather than crashing. The warning sink defaults to + * `console.warn` — the daemon can route these through `Logger` later. */ -export async function loadTokens(authConfig?: AuthConfig): Promise> { - const tokens = new Set(); +export async function loadTokenStore(authConfig?: AuthConfig): Promise { + const filePath = tokenFilePath(); - // Add any config-defined tokens - if (authConfig?.tokens) { - for (const t of authConfig.tokens) { - if (t.length > 0) tokens.add(t); - } - } + let parsed: ParsedTokenFile = { store: new Map(), preserved: [] }; - // Load or generate the file-based token - const filePath = tokenFilePath(); if (existsSync(filePath)) { try { const raw = await readFile(filePath, 'utf-8'); - for (const line of raw.split('\n')) { - const t = line.trim(); - if (t.length > 0 && !t.startsWith('#')) tokens.add(t); - } + parsed = parseTokenFile(raw, { + onWarning: (msg) => console.warn(`[auth] ${msg}`), + }); } catch { - // Unreadable — generate a fresh one + // Unreadable — fall through to auto-generate, same as the pre-slice-06 + // behavior. Don't lock the operator out of a fresh restart. + } + } + + // Insert config tokens AFTER file tokens so a config token can't + // accidentally clobber a file-only one (the parser already de-dupes + // file lines by prefix). Stamped `source: 'config'` so the operator + // sees them flagged as such in `dkg auth list-tokens` and DELETE + // doesn't fool them into thinking the token is gone (it'd still be + // re-loaded from config on the next restart). + if (authConfig?.tokens) { + for (const t of authConfig.tokens) { + if (t.length === 0) continue; + const existing = lookupTokenRecord(t, parsed.store); + if (existing) continue; + const record: TokenRecord = { + prefix: tokenPrefix(t), + fullToken: t, + scopes: '*', + source: 'config', + }; + setTokenRecord(parsed.store, record); } } - if (tokens.size === 0) { + // Auto-generate on first run. Legacy single-line format so a + // downgrade to a pre-slice-06 daemon still reads it correctly. The + // record IS `source: 'file'` because we're about to write it to disk + // — this is the canonical operator-managed root token. + if (parsed.store.size === 0) { const token = generateToken(); - tokens.add(token); + const record: TokenRecord = { + prefix: tokenPrefix(token), + fullToken: token, + scopes: '*', + source: 'file', + }; + setTokenRecord(parsed.store, record); + parsed.preserved.push({ + text: '# DKG node API token — treat this like a password', + index: 0, + }); await mkdir(dirname(filePath), { recursive: true }); - await writeFile(filePath, `# DKG node API token — treat this like a password\n${token}\n`, { mode: 0o600 }); + const out = serializeTokenStore(parsed); + await writeFile(filePath, out, { mode: 0o600 }); await chmod(filePath, 0o600); } - return tokens; + return parsed.store; +} + +/** + * Backward-compat wrapper. Returns the set of full-token strings — the + * pre-slice-06 shape of `loadTokens`. Used by 13 call sites (the daemon + * lifecycle, every route module, the api-client). They continue to work + * unchanged for legacy tokens. + */ +export async function loadTokens(authConfig?: AuthConfig): Promise> { + const store = await loadTokenStore(authConfig); + return new Set([...store.values()].map((r) => r.fullToken)); } // --------------------------------------------------------------------------- @@ -87,6 +176,38 @@ export function verifyToken(token: string | undefined, validTokens: Set) return validTokens.has(token); } +/** + * Verify a bearer token has the requested scope. + * + * - `'*'` (root) grants any NON-EMPTY scope. + * - Explicit scope arrays are exact-match (no globbing). + * - Unknown / unrecognized tokens fail closed (false). + * - Empty / undefined `requiredScope` fails closed (false). The TS + * signature forbids this, but a JS caller (or a `someValue as Scope` + * cast) could still slip through; without the guard, a wildcard + * token would grant a "no scope" check, which is exactly the + * forgotten-argument bug we want loud rather than silent. + * + * Pure function — every input is explicit, no global state. Callers are + * expected to send the appropriate 403 (NOT 401: the token IS valid; the + * scope is wrong). + */ +export function verifyTokenScope( + token: string | undefined, + requiredScope: Scope, + store: TokenStore, +): boolean { + if (!token) return false; + // Fail-closed BEFORE the wildcard shortcut so a forgotten/empty scope + // argument can never accidentally grant access to a root token. See + // review I1: slice 07 will copy this guard verbatim. + if (!requiredScope) return false; + const record = lookupTokenRecord(token, store); + if (!record) return false; + if (record.scopes === '*') return true; + return record.scopes.includes(requiredScope); +} + /** * Extract a bearer token from an HTTP Authorization header value. * Accepts: "Bearer " or just "". @@ -128,6 +249,11 @@ function isPublicPath(pathname: string): boolean { * * Usage in the server handler: * if (!httpAuthGuard(req, res, authEnabled, validTokens)) return; + * + * Note: scope checks live PER ROUTE (see `daemon/routes/kafka.ts`). The + * guard only enforces "valid token present" — pushing per-route scope + * knowledge into the guard would force it to know every route's required + * scope, which is exactly the smell ADR-0003 calls out. */ export function httpAuthGuard( req: IncomingMessage, diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index c73ba46b4..7ec587217 100644 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -485,7 +485,10 @@ authCmd authCmd .command('rotate') - .description('Generate a new auth token (replaces the file-based token)') + .description( + "Replace the entire token file with a single new root token " + + "(WARNING: any tokens minted via 'dkg auth mint-token' will be deleted)", + ) .action(async () => { const { randomBytes } = await import('node:crypto'); const { writeFile, chmod, mkdir } = await import('node:fs/promises'); @@ -514,6 +517,110 @@ authCmd } }); +// ── Slice 06 ── token-admin subcommands (root-only). Hit the daemon's +// `/api/auth/tokens` so the running token set is updated atomically and +// the file write goes through the daemon's serialization mutex. We +// deliberately do NOT mutate the file directly from the CLI — that would +// race with daemon-side mints and lose records. + +authCmd + .command('mint-token') + .description('Mint a new scoped API token (daemon must be running, root token required)') + .requiredOption('--scope ', 'Comma-separated scope list (e.g. "kafka:endpoint:read,kafka:endpoint:write")') + .option('--name