diff --git a/packages/cli/src/api-client.ts b/packages/cli/src/api-client.ts index d51715b38..9d72787c1 100644 --- a/packages/cli/src/api-client.ts +++ b/packages/cli/src/api-client.ts @@ -1,5 +1,6 @@ import { readFile } from 'node:fs/promises'; import { basename } from 'node:path'; +import type { SecurityProtocol } from '@origintrail-official/dkg-kafka'; import { readApiPort, readPid, isProcessRunning } from './config.js'; import { loadTokens } from './auth.js'; @@ -556,11 +557,35 @@ export class ApiClient { broker: string; topic: string; messageFormat: string; + // Opportunistic verification fields (slice 04). All optional; when omitted + // the daemon skips the probe and the KA records `verificationStatus: + // "unattempted"`. + securityProtocol?: SecurityProtocol; + sasl?: { + mechanism?: 'plain' | 'scram-sha-256' | 'scram-sha-512'; + username: string; + password: string; + }; + ssl?: { + ca?: string; + cert?: string; + key?: string; + caPath?: string; + certPath?: string; + keyPath?: string; + rejectUnauthorized?: boolean; + }; + /** When true, KA is registered even if the probe fails. Sent as ?force=true. */ + force?: boolean; }): Promise<{ uri: string; contextGraphId: string; + verificationStatus?: 'unattempted' | 'verified' | 'failed'; + verifiedAt?: string; }> { - return this.post('/api/kafka/endpoint', request); + const { force, ...body } = request; + const path = force ? '/api/kafka/endpoint?force=true' : '/api/kafka/endpoint'; + return this.post(path, body); } async signJoinRequest(contextGraphId: string): Promise<{ diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index 0e55ea5de..7fd9e73fd 100644 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -7,8 +7,9 @@ import { spawn, execSync } from 'node:child_process'; import { createReadStream } from 'node:fs'; import { fileURLToPath } from 'node:url'; import { join } from 'node:path'; -import { writeFile, unlink } from 'node:fs/promises'; +import { readFile, writeFile, unlink } from 'node:fs/promises'; import { ethers } from 'ethers'; +import type { SecurityProtocol } from '@origintrail-official/dkg-kafka'; import { dkgAuthTokenPath, requestFaucetFunding, toErrorMessage, hasErrorCode } from '@origintrail-official/dkg-core'; import yaml from 'js-yaml'; import { @@ -1717,6 +1718,62 @@ assertionCmd // ─── dkg kafka ────────────────────────────────────────────────────── +/** + * Resolve the SASL password for `dkg kafka endpoint register`, in priority + * order: + * 1. `--password-stdin` (read from stdin; conflicts with `--password`) + * 2. `--password ` + * 3. `DKG_KAFKA_PASSWORD` environment variable + * 4. `undefined` (no password supplied) + * + * `--password ` exposes the secret to shell history and `ps -ef`. The + * stdin / env var paths exist so CI and humans can avoid that. We only ever + * read the FIRST line of stdin and trim trailing newlines — anything beyond + * that is not a password. + * + * Stdin handling: when `--password-stdin` is set, we require a non-TTY stdin + * (a piped value). A TTY-attached stdin would need an interactive prompt with + * suppressed echo, which is intentionally out of scope for this commit; if a + * TTY is detected we fail with a clear pointer to the alternatives. + */ +async function resolveKafkaPassword(opts: { + password?: string; + passwordStdin?: boolean; +}): Promise { + if (opts.passwordStdin && opts.password) { + throw new Error( + '--password and --password-stdin are mutually exclusive (pick one)', + ); + } + if (opts.passwordStdin) { + if (process.stdin.isTTY) { + // Interactive masked prompt is a separate piece of work; pipe the + // password instead, e.g. `printf %s "$PW" | dkg ... --password-stdin`. + throw new Error( + '--password-stdin requires piped stdin; pipe the password (or use DKG_KAFKA_PASSWORD)', + ); + } + const chunks: Buffer[] = []; + for await (const chunk of process.stdin) { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); + } + const raw = Buffer.concat(chunks).toString('utf8'); + // Take the first line only, trim trailing CR/LF — keeps secrets that + // legitimately contain whitespace intact while rejecting the trailing + // newline that `printf '%s\n' "$PW" | ...` and most shells will append. + const firstLine = raw.split(/\r?\n/, 1)[0] ?? ''; + return firstLine.length > 0 ? firstLine : undefined; + } + if (typeof opts.password === 'string' && opts.password.length > 0) { + return opts.password; + } + const envPw = process.env.DKG_KAFKA_PASSWORD; + if (typeof envPw === 'string' && envPw.length > 0) { + return envPw; + } + return undefined; +} + const kafkaCmd = program .command('kafka') .description('Kafka metadata registration commands'); @@ -1732,20 +1789,141 @@ kafkaEndpointCmd .requiredOption('--broker ', 'Kafka broker host:port') .requiredOption('--topic ', 'Kafka topic name') .option('--format ', 'Kafka message format MIME type', 'application/json') + // ── opportunistic verification flags (slice 04) ───────────────────── + // Without `--security-protocol` no probe runs and the KA records + // `verificationStatus: "unattempted"`. With it, the daemon attempts a + // one-shot probe and rejects the registration on failure unless + // `--force` is passed. + .option('--security-protocol ', 'PLAINTEXT | SASL_PLAINTEXT | SASL_SSL | SSL') + .option('--username ', 'SASL username (SASL_PLAINTEXT or SASL_SSL)') + .option( + '--password ', + 'SASL password (NOT recommended — exposes secret in shell history; prefer --password-stdin or DKG_KAFKA_PASSWORD)', + ) + .option( + '--password-stdin', + 'Read SASL password from stdin (recommended; prevents shell-history exposure)', + ) + .option( + '--sasl-mechanism ', + 'SASL mechanism: plain (default), scram-sha-256, scram-sha-512', + 'plain', + ) + .option('--ca-pem-path ', 'Filesystem path to a CA PEM bundle (SASL_SSL or SSL)') + .option('--cert-pem-path ', 'Filesystem path to an mTLS client cert PEM (SSL)') + .option('--key-pem-path ', 'Filesystem path to an mTLS client key PEM (SSL)') + .option('--force', 'Register the KA even if the broker probe fails (verificationStatus="failed")') .action(async (opts: ActionOpts) => { try { + // Validate `--sasl-mechanism` here (rather than via commander's + // `Option.choices()`) so the error message matches the rest of this + // file's style and so the daemon and CLI produce identical wording for + // the same misconfig. The valid set mirrors `KafkaSaslCredentials`. + const VALID_SASL_MECHANISMS = ['plain', 'scram-sha-256', 'scram-sha-512'] as const; + type SaslMechanism = (typeof VALID_SASL_MECHANISMS)[number]; + const saslMechanism = String(opts.saslMechanism ?? 'plain').toLowerCase(); + if (!(VALID_SASL_MECHANISMS as readonly string[]).includes(saslMechanism)) { + throw new Error( + `--sasl-mechanism must be one of ${VALID_SASL_MECHANISMS.join(', ')}`, + ); + } + + // Resolve filesystem PEMs at the CLI layer so the request body carries + // inline PEM strings — the daemon's "filesystem path" mode is a + // separate escape hatch for callers that prefer the daemon to read + // them, but the CLI prefers explicit transport. + const ssl: Record = {}; + if (opts.caPemPath) ssl.ca = await readFile(String(opts.caPemPath), 'utf8'); + if (opts.certPemPath) ssl.cert = await readFile(String(opts.certPemPath), 'utf8'); + if (opts.keyPemPath) ssl.key = await readFile(String(opts.keyPemPath), 'utf8'); + + // Resolve the SASL password from --password / --password-stdin / + // DKG_KAFKA_PASSWORD before composing the request body so that all + // downstream SASL-credential logic reads from a single resolved value. + const resolvedPassword = await resolveKafkaPassword({ + password: typeof opts.password === 'string' ? opts.password : undefined, + passwordStdin: Boolean(opts.passwordStdin), + }); + + // ── SASL credential validation ──────────────────────────────────── + // The previous shape silently dropped a half-supplied SASL block when + // exactly one of --username / --password was present. That left the + // caller with `verificationStatus: "unattempted"` even though they + // clearly intended to authenticate — a confusing footgun. Fail fast + // instead, with messages that name every input that could have set + // the credential (including --password-stdin / DKG_KAFKA_PASSWORD). + const username = typeof opts.username === 'string' && opts.username.length > 0 + ? opts.username + : undefined; + const securityProtocol = opts.securityProtocol + ? (String(opts.securityProtocol).toUpperCase() as SecurityProtocol) + : undefined; + const isSaslProtocol = + securityProtocol === 'SASL_PLAINTEXT' || securityProtocol === 'SASL_SSL'; + const isNonSaslProtocol = + securityProtocol === 'PLAINTEXT' || securityProtocol === 'SSL'; + + if ((username && !resolvedPassword) || (!username && resolvedPassword)) { + throw new Error( + '--username and --password (or --password-stdin / DKG_KAFKA_PASSWORD) must be supplied together', + ); + } + if (isSaslProtocol && (!username || !resolvedPassword)) { + throw new Error( + 'SASL_PLAINTEXT/SASL_SSL requires --username and --password (or --password-stdin / DKG_KAFKA_PASSWORD)', + ); + } + if (isNonSaslProtocol && (username || resolvedPassword)) { + throw new Error( + '--username/--password is only valid with SASL_PLAINTEXT or SASL_SSL', + ); + } + const client = await ApiClient.connect(); const result = await client.registerKafkaEndpoint({ contextGraphId: opts.cg, broker: opts.broker, topic: opts.topic, messageFormat: opts.format, + ...(securityProtocol ? { securityProtocol } : {}), + ...(username && resolvedPassword + ? { + sasl: { + mechanism: saslMechanism as SaslMechanism, + username, + password: resolvedPassword, + }, + } + : {}), + ...(Object.keys(ssl).length > 0 ? { ssl } : {}), + ...(opts.force ? { force: true } : {}), }); console.log('Kafka endpoint registered:'); - console.log(` URI: ${result.uri}`); - console.log(` Context graph: ${result.contextGraphId}`); + console.log(` URI: ${result.uri}`); + console.log(` Context graph: ${result.contextGraphId}`); + if (result.verificationStatus) { + console.log(` Verification status: ${result.verificationStatus}`); + } + if (result.verifiedAt) { + console.log(` Verified at: ${result.verifiedAt}`); + } } catch (err) { console.error(toErrorMessage(err)); + // The route's 422 response carries `probeStatus` (e.g. "failed", + // "unreachable") and `probeError` (kafkajs error class name) as + // top-level fields on the response body. Render them here so users + // debugging an auth or topic failure see the actual cause instead of + // just the generic "pass force=true" message. + const body = (err as { responseBody?: unknown }).responseBody; + if (body && typeof body === 'object') { + const r = body as Record; + if (typeof r.probeStatus === 'string') { + console.error(` Probe status: ${r.probeStatus}`); + } + if (typeof r.probeError === 'string') { + console.error(` Probe error: ${r.probeError}`); + } + } process.exit(1); } }); diff --git a/packages/cli/src/daemon/parsers/kafka-request.ts b/packages/cli/src/daemon/parsers/kafka-request.ts new file mode 100644 index 000000000..0cd5480c0 --- /dev/null +++ b/packages/cli/src/daemon/parsers/kafka-request.ts @@ -0,0 +1,242 @@ +import { + type KafkaSaslCredentials, + type KafkaSslMaterial, + type SecurityProtocol, +} from '@origintrail-official/dkg-kafka'; + +export function isNonEmptyString(value: unknown): value is string { + return typeof value === 'string' && value.trim().length > 0; +} + +const VALID_PROTOCOLS: ReadonlySet = new Set([ + 'PLAINTEXT', + 'SASL_PLAINTEXT', + 'SASL_SSL', + 'SSL', +]); + +const VALID_SASL_MECHANISMS: ReadonlySet = new Set([ + 'plain', + 'scram-sha-256', + 'scram-sha-512', +]); + +/** + * Thrown by `parseSasl` / `parseSsl` when the caller supplied a `sasl` or + * `ssl` block that is structurally present but malformed (wrong type, unknown + * mechanism, empty username, non-string PEM, ...). The route handler catches + * this class and translates it into HTTP 400. + * + * The `publicMessage` is intentionally a sanitized error string — it names + * the offending field and (where helpful) the valid alternatives, but never + * echoes credential values. Safe to send to the caller in the 400 body. + */ +export class KafkaRequestParseError extends Error { + constructor(public readonly publicMessage: string) { + super(publicMessage); + this.name = 'KafkaRequestParseError'; + } +} + +export interface KafkaEndpointRequestBody { + contextGraphId: string; + broker: string; + topic: string; + messageFormat: string; + securityProtocol?: SecurityProtocol; + sasl?: KafkaSaslCredentials; + ssl?: KafkaSslMaterial; +} + +/** + * `dependsOnProbe` — opportunistic verification per ADR 0002. + * + * TL;DR: setting `securityProtocol` is the explicit opt-in to verification; + * its absence means no probe. + * + * The probe runs whenever `securityProtocol` is set AND the caller supplied + * the inputs that protocol logically needs: + * + * - `PLAINTEXT`: always probe (reachability is the most permissive answer). + * - `SSL`: always probe — `buildSsl` accepts mTLS material, a CA-only + * bundle, or no SSL block at all (default trust store), so the probe + * runs in all three shapes. Forcing cert+key here would be inconsistent + * with the kafka-package contract. + * - `SASL_PLAINTEXT` / `SASL_SSL`: probe only when both `sasl.username` and + * `sasl.password` are present — they are the credentials being verified. + * + * When no `securityProtocol` is set the route skips the probe entirely and + * the resulting KA records `verificationStatus: "unattempted"`. + * + * Exported so unit tests can pin the gate's behaviour without standing up + * the full daemon HTTP surface. + */ +export function shouldProbe(body: KafkaEndpointRequestBody): boolean { + if (!body.securityProtocol) return false; + switch (body.securityProtocol) { + case 'PLAINTEXT': + case 'SSL': + return true; + case 'SASL_PLAINTEXT': + case 'SASL_SSL': + return Boolean(body.sasl?.username && body.sasl?.password); + default: + return false; + } +} + +export function parseSecurityProtocol(value: unknown): SecurityProtocol | undefined { + if (typeof value !== 'string') return undefined; + const upper = value.toUpperCase(); + return VALID_PROTOCOLS.has(upper as SecurityProtocol) + ? (upper as SecurityProtocol) + : undefined; +} + +/** + * Cross-field consistency check between `securityProtocol` and the auth + * material. Mirrors the CLI's fail-fast logic so direct HTTP callers also + * fail fast instead of getting silent `unattempted` registrations. + * + * Throws `KafkaRequestParseError` on mismatch; returns void on consistency. + * + * Slice-01 wire compatibility: when no `securityProtocol` is declared we do + * not enforce anything — the route already skips the probe and the KA records + * `verificationStatus: "unattempted"`. + */ +export function validateKafkaAuthConsistency(body: KafkaEndpointRequestBody): void { + const sp = body.securityProtocol; + if (!sp) { + // No protocol declared — slice-01 wire compat allows this for plain, + // unauthenticated requests. But auth/TLS blocks require a protocol + // declaration; sending them without one is ambiguous misconfig. + if (body.sasl !== undefined) { + throw new KafkaRequestParseError( + `"sasl" must not be set without "securityProtocol"`, + ); + } + if (body.ssl !== undefined) { + throw new KafkaRequestParseError( + `"ssl" must not be set without "securityProtocol"`, + ); + } + return; + } + const requiresSasl = sp === 'SASL_PLAINTEXT' || sp === 'SASL_SSL'; + const hasSasl = body.sasl !== undefined; + if (requiresSasl && !hasSasl) { + throw new KafkaRequestParseError( + `"securityProtocol" "${sp}" requires a "sasl" block with username and password`, + ); + } + if (!requiresSasl && hasSasl) { + throw new KafkaRequestParseError( + `"sasl" must not be set when "securityProtocol" is "${sp}"`, + ); + } +} + +/** + * Parse a SASL block from the request body. + * + * Returns `undefined` when the field is genuinely absent (`null` / `undefined` + * / missing). Throws `KafkaRequestParseError` when the field is present but + * malformed — e.g. wrong type, unknown mechanism, empty username/password. + * Empty strings are treated as misconfiguration, not as "no creds": a caller + * that wants no SASL block should omit the field entirely. + * + * Error messages name the offending field and (for unknown mechanisms) the + * valid alternatives; they never echo credential values. + */ +export function parseSasl(value: unknown): KafkaSaslCredentials | undefined { + if (value === null || value === undefined) return undefined; + if (typeof value !== 'object' || Array.isArray(value)) { + throw new KafkaRequestParseError('"sasl" must be an object'); + } + const v = value as Record; + + if (!isNonEmptyString(v.username)) { + throw new KafkaRequestParseError('"sasl.username" must be a non-empty string'); + } + if (!isNonEmptyString(v.password)) { + throw new KafkaRequestParseError('"sasl.password" must be a non-empty string'); + } + + let mechanism: KafkaSaslCredentials['mechanism'] = 'plain'; + if (v.mechanism !== undefined) { + if (typeof v.mechanism !== 'string') { + throw new KafkaRequestParseError('"sasl.mechanism" must be a string'); + } + const lower = v.mechanism.toLowerCase(); + if (!VALID_SASL_MECHANISMS.has(lower as KafkaSaslCredentials['mechanism'])) { + throw new KafkaRequestParseError( + '"sasl.mechanism" must be one of plain, scram-sha-256, scram-sha-512', + ); + } + mechanism = lower as KafkaSaslCredentials['mechanism']; + } + + return { + mechanism, + username: v.username, + password: v.password, + }; +} + +/** + * Parse an SSL block from the request body. + * + * Returns `undefined` when the field is genuinely absent (`null` / `undefined` + * / missing) OR when the caller passed `ssl: {}` (no recognized field set — + * functionally equivalent to no SSL block). Throws `KafkaRequestParseError` + * when the field is present but malformed — wrong outer type, non-string + * PEM/path, non-boolean `rejectUnauthorized`. + * + * Error messages name the offending field; they never echo PEM contents. + */ +export function parseSsl(value: unknown): KafkaSslMaterial | undefined { + if (value === null || value === undefined) return undefined; + if (typeof value !== 'object' || Array.isArray(value)) { + throw new KafkaRequestParseError('"ssl" must be an object'); + } + const v = value as Record; + + const out: KafkaSslMaterial = {}; + assignStringField(v, 'ca', out, 'caPem', 'ssl.ca'); + assignStringField(v, 'cert', out, 'certPem', 'ssl.cert'); + assignStringField(v, 'key', out, 'keyPem', 'ssl.key'); + assignStringField(v, 'caPath', out, 'caPath', 'ssl.caPath'); + assignStringField(v, 'certPath', out, 'certPath', 'ssl.certPath'); + assignStringField(v, 'keyPath', out, 'keyPath', 'ssl.keyPath'); + + if (v.rejectUnauthorized !== undefined) { + if (typeof v.rejectUnauthorized !== 'boolean') { + throw new KafkaRequestParseError('"ssl.rejectUnauthorized" must be a boolean'); + } + out.rejectUnauthorized = v.rejectUnauthorized; + } + + return Object.keys(out).length > 0 ? out : undefined; +} + +// Common assignment helper: `srcKey` is what the caller sent, `dstKey` is +// the kafkajs-shaped field on `KafkaSslMaterial`. Throws on wrong type or +// empty string; "field genuinely absent" is the only path that leaves `dst` +// untouched. +function assignStringField( + src: Record, + srcKey: string, + dst: KafkaSslMaterial, + dstKey: keyof KafkaSslMaterial, + publicName: string, +): void { + const raw = src[srcKey]; + if (raw === undefined) return; + if (!isNonEmptyString(raw)) { + throw new KafkaRequestParseError(`"${publicName}" must be a non-empty string`); + } + // The keys we route to are all `string | undefined` on KafkaSslMaterial + // except `rejectUnauthorized` (handled separately above), so the cast is + // safe — the function is only called with string-typed destination keys. + (dst as unknown as Record)[dstKey as string] = raw; +} diff --git a/packages/cli/src/daemon/routes/kafka.ts b/packages/cli/src/daemon/routes/kafka.ts index 550f6f05e..b7afd8b42 100644 --- a/packages/cli/src/daemon/routes/kafka.ts +++ b/packages/cli/src/daemon/routes/kafka.ts @@ -1,20 +1,32 @@ import { jsonResponse, readBody, validateRequiredContextGraphId } from '../http-utils.js'; +import { + isNonEmptyString, + KafkaRequestParseError, + parseSasl, + parseSecurityProtocol, + parseSsl, + shouldProbe, + validateKafkaAuthConsistency, + type KafkaEndpointRequestBody, +} from '../parsers/kafka-request.js'; import type { RequestContext } from './context.js'; import { + KafkaEndpointProbeFailedError, + probe as kafkaProbe, registerKafkaEndpoint, + toKafkaEndpointProbeOutcome, type KafkaEndpointPublisher, + type KafkaProbeOptions, + type ProbeResult, } from '@origintrail-official/dkg-kafka'; -function isNonEmptyString(value: unknown): value is string { - return typeof value === 'string' && value.trim().length > 0; -} - export async function handleKafkaRoutes(ctx: RequestContext): Promise { const { req, res, agent, path, + url, requestAgentAddress, } = ctx; @@ -27,12 +39,8 @@ export async function handleKafkaRoutes(ctx: RequestContext): Promise { return jsonResponse(res, 400, { error: 'Invalid JSON in request body' }); } - const { - contextGraphId, - broker, - topic, - messageFormat, - } = parsed as Record; + const raw = parsed as Record; + const { contextGraphId, broker, topic, messageFormat } = raw; if (!validateRequiredContextGraphId(contextGraphId, res)) { return; @@ -48,6 +56,48 @@ export async function handleKafkaRoutes(ctx: RequestContext): Promise { return jsonResponse(res, 400, { error: '"messageFormat" must be a non-empty string' }); } + const securityProtocol = parseSecurityProtocol(raw.securityProtocol); + if (raw.securityProtocol !== undefined && !securityProtocol) { + return jsonResponse(res, 400, { + error: '"securityProtocol" must be one of PLAINTEXT, SASL_PLAINTEXT, SASL_SSL, SSL', + }); + } + // `parseSasl` / `parseSsl` throw `KafkaRequestParseError` on present-but- + // malformed payloads (wrong type, unknown mechanism, non-string PEM, ...). + // `validateKafkaAuthConsistency` throws on protocol/credential mismatch + // (e.g. SASL_SSL with no sasl block, PLAINTEXT with sasl present). Both + // translate to HTTP 400 so the caller learns about the misconfig up front, + // instead of getting a confusing kafkajs auth failure later or — worse — + // a `verificationStatus: "unattempted"` registration that silently + // dropped the broken auth block. Error messages are sanitized by the + // parser; safe to forward verbatim. + let reqBody: KafkaEndpointRequestBody; + try { + const sasl = parseSasl(raw.sasl); + const ssl = parseSsl(raw.ssl); + reqBody = { + contextGraphId: targetContextGraphId, + broker, + topic, + messageFormat, + securityProtocol, + sasl, + ssl, + }; + validateKafkaAuthConsistency(reqBody); + } catch (err) { + if (err instanceof KafkaRequestParseError) { + return jsonResponse(res, 400, { error: err.publicMessage }); + } + throw err; + } + + // `?force=true` overrides a non-verified probe outcome. We honor `1` + // and `true` (case-insensitive) as truthy; any other value is treated + // as false. The flag is only consulted when a probe ran AND failed. + const forceParam = (url.searchParams.get('force') ?? '').trim().toLowerCase(); + const force = forceParam === 'true' || forceParam === '1'; + const publisher: KafkaEndpointPublisher = { async publish(cgId, content) { await agent.publish( @@ -57,15 +107,71 @@ export async function handleKafkaRoutes(ctx: RequestContext): Promise { }, }; - const result = await registerKafkaEndpoint({ - contextGraphId: targetContextGraphId, - owner: requestAgentAddress.toLowerCase(), - broker, - topic, - messageFormat, - publisher, - }); + let probeResult: ProbeResult | undefined; + if (shouldProbe(reqBody) && reqBody.securityProtocol) { + const probeOpts: KafkaProbeOptions = { + brokers: [reqBody.broker], + topic: reqBody.topic, + securityProtocol: reqBody.securityProtocol, + sasl: reqBody.sasl, + ssl: reqBody.ssl, + }; + // `probe()` returns network/auth failures as structured results, but + // throws on ill-formed input (e.g. SSL with no cert/key, unreadable PEM + // path). Translate those into a 400 — they are caller errors, not + // unexpected daemon faults. The error message is always a safe, + // credential-free string composed in the kafka package. + try { + probeResult = await kafkaProbe(probeOpts); + } catch (err) { + return jsonResponse(res, 400, { + error: + err instanceof Error + ? `Invalid Kafka probe options: ${err.message}` + : 'Invalid Kafka probe options', + }); + } + } + + try { + const result = await registerKafkaEndpoint({ + contextGraphId: targetContextGraphId, + owner: requestAgentAddress.toLowerCase(), + broker, + topic, + messageFormat, + publisher, + securityProtocol: reqBody.securityProtocol, + probe: probeResult ? toKafkaEndpointProbeOutcome(probeResult) : undefined, + force, + }); - return jsonResponse(res, 200, result); + return jsonResponse(res, 200, result); + } catch (err) { + if (err instanceof KafkaEndpointProbeFailedError) { + // Surface the probe outcome (sans credentials) so the CLI / API client + // can render a meaningful failure. The `verificationStatus` reflects + // what would have been written had the caller passed `force=true`. + // The probe error string is part of the typed outcome — already + // classified to a kafkajs class name, never carries credential + // substrings. + // + // `probeStatus` and `probeError` are emitted at the top level so a + // CLI client can render them without having to drill into the + // `probe` sub-object. The nested `probe` block is retained for + // backwards compatibility with any caller that already reads + // `probe.status` / `probe.probedAt`. + return jsonResponse(res, 422, { + error: err.message, + probeStatus: err.outcome.status, + probeError: err.outcome.error, + probe: { + status: err.outcome.status, + probedAt: err.outcome.probedAt, + }, + }); + } + throw err; + } } } diff --git a/packages/cli/test/api-client.test.ts b/packages/cli/test/api-client.test.ts index 7a6cfff20..428fee799 100644 --- a/packages/cli/test/api-client.test.ts +++ b/packages/cli/test/api-client.test.ts @@ -194,13 +194,14 @@ describe('ApiClient', () => { expect(body.name).toBe('incident'); }); - it('registerKafkaEndpoint() posts the endpoint payload', async () => { + it('registerKafkaEndpoint() posts the endpoint payload (no creds → no force)', async () => { const { fetch, calls } = createTrackingFetch({ ok: true, status: 200, body: { uri: 'urn:dkg:kafka-endpoint:0xabc:hash', contextGraphId: 'devnet-test', + verificationStatus: 'unattempted', }, }); globalThis.fetch = fetch; @@ -222,6 +223,77 @@ describe('ApiClient', () => { }); }); + it('registerKafkaEndpoint() carries SASL_PLAINTEXT credentials in the request body', async () => { + const { fetch, calls } = createTrackingFetch({ + ok: true, + status: 200, + body: { uri: 'urn:dkg:kafka-endpoint:0xabc:hash', contextGraphId: 'cg', verificationStatus: 'verified' }, + }); + globalThis.fetch = fetch; + + await client.registerKafkaEndpoint({ + contextGraphId: 'cg', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + securityProtocol: 'SASL_PLAINTEXT', + sasl: { mechanism: 'plain', username: 'alice', password: 'creds-MARKER-123' }, + }); + + const body = JSON.parse(calls[0].opts.body as string); + expect(body.securityProtocol).toBe('SASL_PLAINTEXT'); + expect(body.sasl).toEqual({ mechanism: 'plain', username: 'alice', password: 'creds-MARKER-123' }); + }); + + it('registerKafkaEndpoint() sends ?force=true as a query param when force is set', async () => { + const { fetch, calls } = createTrackingFetch({ + ok: true, + status: 200, + body: { uri: 'urn:dkg:kafka-endpoint:0xabc:hash', contextGraphId: 'cg', verificationStatus: 'failed' }, + }); + globalThis.fetch = fetch; + + await client.registerKafkaEndpoint({ + contextGraphId: 'cg', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + securityProtocol: 'PLAINTEXT', + force: true, + }); + + expect(calls[0].url).toBe(`http://127.0.0.1:${PORT}/api/kafka/endpoint?force=true`); + const body = JSON.parse(calls[0].opts.body as string); + expect(body.force).toBeUndefined(); + }); + + it('registerKafkaEndpoint() carries inline SSL PEMs in the body under ssl.{ca,cert,key}', async () => { + const { fetch, calls } = createTrackingFetch({ + ok: true, + status: 200, + body: { uri: 'urn:dkg:kafka-endpoint:0xabc:hash', contextGraphId: 'cg', verificationStatus: 'verified' }, + }); + globalThis.fetch = fetch; + + await client.registerKafkaEndpoint({ + contextGraphId: 'cg', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + securityProtocol: 'SSL', + ssl: { + ca: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + cert: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + key: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + }, + }); + + const body = JSON.parse(calls[0].opts.body as string); + expect(body.ssl.ca).toContain('CA'); + expect(body.ssl.cert).toContain('CERT'); + expect(body.ssl.key).toContain('KEY'); + }); + it('approveCclPolicy() posts approval payload', async () => { const { fetch, calls } = createTrackingFetch({ ok: true, status: 200, body: { policyUri: 'urn:policy', bindingUri: 'urn:binding', approvedAt: 'now' } }); globalThis.fetch = fetch; diff --git a/packages/cli/test/kafka-cli-smoke.test.ts b/packages/cli/test/kafka-cli-smoke.test.ts index f80ec60b2..3c5a85995 100644 --- a/packages/cli/test/kafka-cli-smoke.test.ts +++ b/packages/cli/test/kafka-cli-smoke.test.ts @@ -1,6 +1,6 @@ -import { beforeAll, afterAll, describe, expect, it } from 'vitest'; +import { beforeAll, afterAll, beforeEach, describe, expect, it } from 'vitest'; import { createServer } from 'node:http'; -import { execFile } from 'node:child_process'; +import { execFile, spawn } from 'node:child_process'; import { promisify } from 'node:util'; import { mkdtemp, writeFile, rm } from 'node:fs/promises'; import { existsSync } from 'node:fs'; @@ -12,12 +12,52 @@ const execFileAsync = promisify(execFile); const __dirname = dirname(fileURLToPath(import.meta.url)); const CLI_ENTRY = join(__dirname, '..', 'dist', 'cli.js'); +/** + * Run the CLI with a piped stdin payload. Returns the same shape as the + * `execFileAsync` resolved value, plus an `exitCode`. When `expectFailure` + * is true the helper resolves on non-zero exit instead of throwing — used + * for negative tests where we want to inspect stderr. + */ +function runCliWithStdin( + args: string[], + stdinPayload: string, + env: NodeJS.ProcessEnv, +): Promise<{ stdout: string; stderr: string; exitCode: number }> { + return new Promise((resolve, reject) => { + const child = spawn('node', [CLI_ENTRY, ...args], { env }); + let stdout = ''; + let stderr = ''; + child.stdout.on('data', (d) => { stdout += String(d); }); + child.stderr.on('data', (d) => { stderr += String(d); }); + child.on('error', reject); + child.on('close', (code) => { + resolve({ stdout, stderr, exitCode: code ?? 0 }); + }); + child.stdin.write(stdinPayload); + child.stdin.end(); + }); +} + +interface CapturedRequest { + url: string; + body: string; + authHeader: string; +} + +interface NextResponse { + status: number; + body: unknown; +} + describe.sequential('kafka CLI smoke', () => { let dkgHome: string; let server: ReturnType; let smokeApiPort: string; - let lastBody = ''; - let lastAuthHeader = ''; + let last: CapturedRequest = { url: '', body: '', authHeader: '' }; + // Optional per-test override of the mock response. Tests that exercise + // error paths (e.g. 422 probe failure) set this in a beforeEach. When + // unset, the handler falls back to the success response below. + let nextResponse: NextResponse | null = null; beforeAll(async () => { dkgHome = await mkdtemp(join(tmpdir(), 'dkg-kafka-cli-')); @@ -31,17 +71,26 @@ describe.sequential('kafka CLI smoke', () => { await writeFile(join(dkgHome, 'auth.token'), 'smoke-token\n'); server = createServer(async (req, res) => { - if (req.method === 'POST' && req.url === '/api/kafka/endpoint') { - lastAuthHeader = String(req.headers.authorization ?? ''); + if (req.method === 'POST' && (req.url ?? '').startsWith('/api/kafka/endpoint')) { + const authHeader = String(req.headers.authorization ?? ''); const chunks: Buffer[] = []; for await (const chunk of req) { chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); } - lastBody = Buffer.concat(chunks).toString('utf8'); + const body = Buffer.concat(chunks).toString('utf8'); + last = { url: req.url ?? '', body, authHeader }; + if (nextResponse) { + const { status, body: respBody } = nextResponse; + nextResponse = null; + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(respBody)); + return; + } res.writeHead(200, { 'Content-Type': 'application/json' }); res.end(JSON.stringify({ uri: 'urn:dkg:kafka-endpoint:0xabc:hash', contextGraphId: 'devnet-test', + verificationStatus: 'unattempted', })); return; } @@ -60,12 +109,17 @@ describe.sequential('kafka CLI smoke', () => { }); }); + beforeEach(() => { + last = { url: '', body: '', authHeader: '' }; + nextResponse = null; + }); + afterAll(async () => { await new Promise((resolve) => server.close(() => resolve())); await rm(dkgHome, { recursive: true, force: true }); }); - it('registers a Kafka endpoint through the CLI', async () => { + it('registers a Kafka endpoint through the CLI (no creds)', async () => { const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; const result = await execFileAsync('node', [ @@ -84,12 +138,525 @@ describe.sequential('kafka CLI smoke', () => { expect(result.stdout).toContain('Kafka endpoint registered:'); expect(result.stdout).toContain('urn:dkg:kafka-endpoint:0xabc:hash'); expect(result.stdout).toContain('devnet-test'); - expect(lastAuthHeader).toBe('Bearer smoke-token'); - expect(JSON.parse(lastBody)).toEqual({ + expect(last.authHeader).toBe('Bearer smoke-token'); + expect(last.url).toBe('/api/kafka/endpoint'); + expect(JSON.parse(last.body)).toEqual({ contextGraphId: 'devnet-test', broker: 'kafka.example.com:9092', topic: 'orders.created', messageFormat: 'application/json', }); }, 15000); + + it('passes --username/--password into the request body', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password', + 'cli-secret-XYZ', + ], { env }); + + const body = JSON.parse(last.body); + expect(body.securityProtocol).toBe('SASL_PLAINTEXT'); + expect(body.sasl).toEqual({ mechanism: 'plain', username: 'alice', password: 'cli-secret-XYZ' }); + }, 15000); + + it('reads --ca-pem-path and ships the contents in body.ssl.ca', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + const caPath = join(dkgHome, 'ca-from-cli.pem'); + await writeFile(caPath, '-----BEGIN CERTIFICATE-----\nCLI-FILE-CA\n-----END CERTIFICATE-----'); + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9093', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_SSL', + '--username', + 'alice', + '--password', + 'pw', + '--ca-pem-path', + caPath, + ], { env }); + + const body = JSON.parse(last.body); + expect(body.ssl.ca).toContain('CLI-FILE-CA'); + }, 15000); + + it('passes --force as a ?force=true query param', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'PLAINTEXT', + '--force', + ], { env }); + + expect(last.url).toBe('/api/kafka/endpoint?force=true'); + const body = JSON.parse(last.body); + expect(body.force).toBeUndefined(); + expect(body.securityProtocol).toBe('PLAINTEXT'); + }, 15000); + + it('honors --sasl-mechanism scram-sha-256 in the request body', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9093', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_SSL', + '--username', + 'alice', + '--password', + 'cli-secret-XYZ', + '--sasl-mechanism', + 'scram-sha-256', + ], { env }); + + const body = JSON.parse(last.body); + expect(body.sasl).toEqual({ + mechanism: 'scram-sha-256', + username: 'alice', + password: 'cli-secret-XYZ', + }); + }, 15000); + + it('rejects an unknown --sasl-mechanism with a non-zero exit and a clear error', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + let exited = false; + let stderr = ''; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password', + 'pw', + '--sasl-mechanism', + 'gibberish', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + } + + expect(exited).toBe(true); + expect(stderr).toContain('--sasl-mechanism'); + expect(stderr).toContain('plain'); + expect(stderr).toContain('scram-sha-256'); + expect(stderr).toContain('scram-sha-512'); + }, 15000); + + // --- Fix 3: non-argv password input ---------------------------------- + // `--password ` exposes the credential to shell history and `ps -ef`. + // The CLI also accepts `--password-stdin` (recommended) and the + // `DKG_KAFKA_PASSWORD` environment variable as alternatives. + + it('reads password from stdin via --password-stdin and ships it in the SASL block', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + const { exitCode, stderr } = await runCliWithStdin( + [ + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password-stdin', + ], + // Trailing newline mirrors how shells like `printf '%s\n' ... | dkg` + // would feed the password; the CLI must strip it without dropping the + // password itself. + 'stdin-secret-XYZ\n', + env, + ); + + expect({ exitCode, stderr }).toEqual({ exitCode: 0, stderr: '' }); + const body = JSON.parse(last.body); + expect(body.securityProtocol).toBe('SASL_PLAINTEXT'); + expect(body.sasl).toEqual({ + mechanism: 'plain', + username: 'alice', + password: 'stdin-secret-XYZ', + }); + }, 15000); + + it('reads password from DKG_KAFKA_PASSWORD when --password is not supplied', async () => { + const env = { + ...process.env, + DKG_HOME: dkgHome, + DKG_API_PORT: smokeApiPort, + DKG_KAFKA_PASSWORD: 'env-secret-XYZ', + }; + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + ], { env }); + + const body = JSON.parse(last.body); + expect(body.sasl).toEqual({ + mechanism: 'plain', + username: 'alice', + password: 'env-secret-XYZ', + }); + }, 15000); + + it('--password takes precedence over DKG_KAFKA_PASSWORD', async () => { + const env = { + ...process.env, + DKG_HOME: dkgHome, + DKG_API_PORT: smokeApiPort, + DKG_KAFKA_PASSWORD: 'env-loses-XYZ', + }; + + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password', + 'flag-wins-XYZ', + ], { env }); + + const body = JSON.parse(last.body); + expect(body.sasl?.password).toBe('flag-wins-XYZ'); + }, 15000); + + it('rejects --password and --password-stdin together with a non-zero exit', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + const { exitCode, stderr } = await runCliWithStdin( + [ + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password', + 'argv-pw', + '--password-stdin', + ], + 'stdin-pw\n', + env, + ); + + expect(exitCode).not.toBe(0); + expect(stderr).toContain('--password'); + expect(stderr).toContain('--password-stdin'); + }, 15000); + + // --- Fix 2: fail fast on partial / misplaced SASL credentials ------- + // The previous shape silently dropped a half-supplied SASL block + // (`opts.username && opts.password ? sasl : {}`) which left the resulting + // KA in a confusing `verificationStatus: "unattempted"` state. The CLI + // now refuses these inputs up front. + + it('fails fast when only --username is supplied (no password)', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + let exited = false; + let stderr = ''; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + } + + expect(exited).toBe(true); + expect(stderr).toContain('--username'); + expect(stderr).toContain('--password'); + }, 15000); + + it('fails fast when only --password is supplied (no username)', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + let exited = false; + let stderr = ''; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--password', + 'pw', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + } + + expect(exited).toBe(true); + expect(stderr).toContain('--username'); + expect(stderr).toContain('--password'); + }, 15000); + + it('fails fast when --security-protocol SASL_PLAINTEXT is set without credentials', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + let exited = false; + let stderr = ''; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + } + + expect(exited).toBe(true); + expect(stderr).toContain('SASL_PLAINTEXT'); + expect(stderr).toContain('SASL_SSL'); + expect(stderr).toContain('--username'); + expect(stderr).toContain('--password'); + }, 15000); + + it('fails fast when SASL credentials are passed with PLAINTEXT', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + let exited = false; + let stderr = ''; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'PLAINTEXT', + '--username', + 'alice', + '--password', + 'pw', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + } + + expect(exited).toBe(true); + expect(stderr).toContain('SASL_PLAINTEXT'); + expect(stderr).toContain('SASL_SSL'); + }, 15000); + + it('fails fast when --password-stdin sees an empty stream (treated as no password)', async () => { + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + + const { exitCode, stderr } = await runCliWithStdin( + [ + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password-stdin', + ], + // Empty stdin → resolveKafkaPassword returns undefined → Fix 2's + // partial-credential check fires (username present, password absent). + '', + env, + ); + + expect(exitCode).not.toBe(0); + expect(stderr).toContain('--username'); + expect(stderr).toContain('--password'); + expect(stderr).toContain('--password-stdin'); + }, 15000); + + it('renders probeStatus and probeError on a 422 probe-failure response', async () => { + // The route's 422 carries `probeStatus` (e.g. "failed", "unreachable") + // and `probeError` (kafkajs error class) at the top level. Without + // surfacing them the user sees only the generic "pass force=true" + // message and has no idea whether they're debugging credentials, + // network, or a missing topic. The CLI must print both lines on stderr. + nextResponse = { + status: 422, + body: { + error: + 'Kafka endpoint probe failed at 2026-05-04T00:00:00.000Z; pass force=true to register anyway', + probeStatus: 'failed', + probeError: 'KafkaJSSASLAuthenticationError', + probe: { status: 'failed', probedAt: '2026-05-04T00:00:00.000Z' }, + }, + }; + const env = { ...process.env, DKG_HOME: dkgHome, DKG_API_PORT: smokeApiPort }; + let exited = false; + let stderr = ''; + let exitCode = 0; + try { + await execFileAsync('node', [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + 'devnet-test', + '--broker', + 'kafka.example.com:9092', + '--topic', + 'orders.created', + '--security-protocol', + 'SASL_PLAINTEXT', + '--username', + 'alice', + '--password', + 'pw', + ], { env }); + } catch (err) { + exited = true; + stderr = String((err as { stderr?: string }).stderr ?? ''); + exitCode = Number((err as { code?: number }).code ?? 0); + } + + expect(exited).toBe(true); + expect(exitCode).toBe(1); + // Top-level error message from `toErrorMessage(err)`. + expect(stderr).toContain('pass force=true'); + // Probe details rendered from responseBody. + expect(stderr).toContain('Probe status: failed'); + expect(stderr).toContain('Probe error:'); + expect(stderr).toContain('KafkaJSSASLAuthenticationError'); + }, 15000); }); diff --git a/packages/cli/test/kafka-route-parsers.test.ts b/packages/cli/test/kafka-route-parsers.test.ts new file mode 100644 index 000000000..74c451e9a --- /dev/null +++ b/packages/cli/test/kafka-route-parsers.test.ts @@ -0,0 +1,409 @@ +import { describe, expect, it } from 'vitest'; +import { + KafkaRequestParseError, + parseSasl, + parseSecurityProtocol, + parseSsl, + shouldProbe, + validateKafkaAuthConsistency, + type KafkaEndpointRequestBody, +} from '../src/daemon/parsers/kafka-request.js'; + +// These tests pin the route-level input gate that decides whether the +// opportunistic probe runs. The slice's UX promise: a request with a +// genuinely-absent `sasl` / `ssl` field results in `verificationStatus: +// "unattempted"`, but a present-but-malformed block produces a HTTP 400 so +// the caller is never silently downgraded into an unverified KA. + +describe('parseSecurityProtocol', () => { + it('uppercases and accepts the four supported protocols', () => { + expect(parseSecurityProtocol('plaintext')).toBe('PLAINTEXT'); + expect(parseSecurityProtocol('sasl_plaintext')).toBe('SASL_PLAINTEXT'); + expect(parseSecurityProtocol('SASL_SSL')).toBe('SASL_SSL'); + expect(parseSecurityProtocol('SSL')).toBe('SSL'); + }); + + it('returns undefined for unknown protocols and non-strings', () => { + expect(parseSecurityProtocol('PLAINTEX')).toBeUndefined(); + expect(parseSecurityProtocol('rot13')).toBeUndefined(); + expect(parseSecurityProtocol(0)).toBeUndefined(); + expect(parseSecurityProtocol(undefined)).toBeUndefined(); + }); +}); + +describe('parseSasl', () => { + it('returns undefined when the field is genuinely absent', () => { + expect(parseSasl(undefined)).toBeUndefined(); + expect(parseSasl(null)).toBeUndefined(); + }); + + it('throws on a non-object value', () => { + expect(() => parseSasl('plain')).toThrow(KafkaRequestParseError); + expect(() => parseSasl('plain')).toThrow(/"sasl" must be an object/); + expect(() => parseSasl(42)).toThrow(KafkaRequestParseError); + expect(() => parseSasl([])).toThrow(KafkaRequestParseError); + }); + + it('throws on missing username or password', () => { + expect(() => parseSasl({ password: 'p' })).toThrow(/"sasl.username"/); + expect(() => parseSasl({ username: 'a' })).toThrow(/"sasl.password"/); + }); + + it('throws on empty / whitespace username', () => { + expect(() => parseSasl({ username: '', password: 'p' })).toThrow( + /"sasl.username" must be a non-empty string/, + ); + expect(() => parseSasl({ username: ' ', password: 'p' })).toThrow( + /"sasl.username"/, + ); + }); + + it('throws on empty / whitespace password', () => { + expect(() => parseSasl({ username: 'a', password: '' })).toThrow( + /"sasl.password" must be a non-empty string/, + ); + expect(() => parseSasl({ username: 'a', password: ' ' })).toThrow( + /"sasl.password"/, + ); + }); + + it('throws on an unknown mechanism, listing the valid alternatives', () => { + const fn = () => + parseSasl({ mechanism: 'totp', username: 'a', password: 'p' }); + expect(fn).toThrow(KafkaRequestParseError); + expect(fn).toThrow(/plain, scram-sha-256, scram-sha-512/); + }); + + it('throws on a non-string mechanism', () => { + expect(() => + parseSasl({ mechanism: 42, username: 'a', password: 'p' }), + ).toThrow(/"sasl.mechanism" must be a string/); + }); + + it('error messages never echo the credential value', () => { + // Defence in depth: even if the message contained the field name, it + // must never contain the supplied secret. + try { + parseSasl({ username: 'CRED-MARKER-USER', password: '' }); + throw new Error('expected throw'); + } catch (err) { + expect(err).toBeInstanceOf(KafkaRequestParseError); + expect((err as Error).message).not.toContain('CRED-MARKER-USER'); + } + try { + parseSasl({ username: 'a', password: 'CRED-MARKER-PASS' }); + } catch { + // intentionally empty: this branch should not trigger because + // "CRED-MARKER-PASS" is non-empty and therefore valid. + } + }); + + it('defaults mechanism to plain and lowercases user input', () => { + expect(parseSasl({ username: 'a', password: 'p' })).toEqual({ + mechanism: 'plain', + username: 'a', + password: 'p', + }); + expect( + parseSasl({ mechanism: 'SCRAM-SHA-256', username: 'a', password: 'p' }), + ).toEqual({ mechanism: 'scram-sha-256', username: 'a', password: 'p' }); + }); +}); + +describe('parseSsl', () => { + it('returns undefined when the field is genuinely absent', () => { + expect(parseSsl(undefined)).toBeUndefined(); + expect(parseSsl(null)).toBeUndefined(); + }); + + it('throws on a non-object value', () => { + expect(() => parseSsl('PEM')).toThrow(KafkaRequestParseError); + expect(() => parseSsl('PEM')).toThrow(/"ssl" must be an object/); + expect(() => parseSsl([])).toThrow(KafkaRequestParseError); + }); + + it('returns undefined for an empty object (caller intent: no SSL block)', () => { + expect(parseSsl({})).toBeUndefined(); + }); + + it('throws on a non-string `ca`', () => { + expect(() => parseSsl({ ca: 12345 })).toThrow(/"ssl.ca" must be a non-empty string/); + }); + + it('throws on an empty / whitespace `ca`', () => { + expect(() => parseSsl({ ca: '' })).toThrow(/"ssl.ca"/); + expect(() => parseSsl({ ca: ' ' })).toThrow(/"ssl.ca"/); + }); + + it('throws on a non-string `cert`, `key`, `caPath`, `certPath`, or `keyPath`', () => { + expect(() => parseSsl({ cert: 1 })).toThrow(/"ssl.cert"/); + expect(() => parseSsl({ key: false })).toThrow(/"ssl.key"/); + expect(() => parseSsl({ caPath: {} })).toThrow(/"ssl.caPath"/); + expect(() => parseSsl({ certPath: 0 })).toThrow(/"ssl.certPath"/); + expect(() => parseSsl({ keyPath: null })).toThrow(/"ssl.keyPath"/); + }); + + it('omitting a field entirely is fine — only present-but-malformed fields throw', () => { + // A request that only sets `caPath` should pass through cleanly; the + // other PEM/path fields are simply absent. + const out = parseSsl({ caPath: '/etc/ca.pem' }); + expect(out).toEqual({ caPath: '/etc/ca.pem' }); + }); + + it('throws on a non-boolean `rejectUnauthorized`', () => { + expect(() => parseSsl({ rejectUnauthorized: 'true' })).toThrow( + /"ssl.rejectUnauthorized" must be a boolean/, + ); + expect(() => parseSsl({ rejectUnauthorized: 1 })).toThrow( + /"ssl.rejectUnauthorized"/, + ); + }); + + it('passes through non-empty inline PEMs and paths', () => { + const out = parseSsl({ + ca: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + cert: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + key: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + caPath: '/etc/ca.pem', + certPath: '/etc/cert.pem', + keyPath: '/etc/key.pem', + rejectUnauthorized: false, + }); + expect(out).toEqual({ + caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + certPem: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + keyPem: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + caPath: '/etc/ca.pem', + certPath: '/etc/cert.pem', + keyPath: '/etc/key.pem', + rejectUnauthorized: false, + }); + }); +}); + +describe('shouldProbe — valid inputs and explicit absences', () => { + // These tests now use parseSasl/parseSsl results that are guaranteed valid + // by the parser (or genuinely absent) — the old "empty creds collapse" + // path no longer exists; empty creds throw. + it('SASL_PLAINTEXT with valid creds → probe', () => { + const sasl = parseSasl({ username: 'a', password: 'p' }); + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'SASL_PLAINTEXT', + ...(sasl ? { sasl } : {}), + }; + expect(shouldProbe(body)).toBe(true); + }); + + it('SASL_SSL with no sasl field at all → no probe', () => { + const sasl = parseSasl(undefined); + expect(sasl).toBeUndefined(); + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'SASL_SSL', + ...(sasl ? { sasl } : {}), + }; + expect(shouldProbe(body)).toBe(false); + }); + + it('SSL with no ssl field → probe (default trust store)', () => { + // `buildSsl` (in @origintrail-official/dkg-kafka) accepts SSL with no + // SSL block at all — the kafkajs client falls back to the platform's + // default trust store. The gate must not be stricter than buildSsl. + const ssl = parseSsl(undefined); + expect(ssl).toBeUndefined(); + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'SSL', + ...(ssl ? { ssl } : {}), + }; + expect(shouldProbe(body)).toBe(true); + }); + + it('SSL with only caPem → probe (CA-only one-way TLS)', () => { + const ssl = parseSsl({ + ca: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + }); + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'SSL', + ...(ssl ? { ssl } : {}), + }; + expect(shouldProbe(body)).toBe(true); + }); + + it('SSL with full mTLS material (cert+key) → probe', () => { + const ssl = parseSsl({ + ca: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + cert: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + key: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + }); + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'SSL', + ...(ssl ? { ssl } : {}), + }; + expect(shouldProbe(body)).toBe(true); + }); + + it('PLAINTEXT with explicit protocol → probe (no creds needed)', () => { + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + securityProtocol: 'PLAINTEXT', + }; + expect(shouldProbe(body)).toBe(true); + }); + + it('No securityProtocol at all → no probe', () => { + const body: KafkaEndpointRequestBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + }; + expect(shouldProbe(body)).toBe(false); + }); +}); + +describe('validateKafkaAuthConsistency', () => { + // Cross-field consistency check between `securityProtocol` and the auth + // material. The route's per-field parsers validate each field in isolation; + // this helper closes the protocol/credential mismatch gap so direct HTTP + // callers cannot smuggle a SASL_SSL request without creds (or PLAINTEXT + // with creds) past the route and silently land on `verificationStatus: + // "unattempted"`. + + const baseBody = { + contextGraphId: 'cg', + broker: 'b', + topic: 't', + messageFormat: 'application/json', + } as const; + + const validSasl = { mechanism: 'plain', username: 'a', password: 'p' } as const; + + it('SASL_SSL with no sasl block → throws, naming the protocol', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'SASL_SSL', + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/SASL_SSL/); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"sasl"/); + }); + + it('SASL_PLAINTEXT with no sasl block → throws, naming the protocol', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'SASL_PLAINTEXT', + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/SASL_PLAINTEXT/); + }); + + it('PLAINTEXT with sasl block present → throws, naming the protocol', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'PLAINTEXT', + sasl: validSasl, + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/PLAINTEXT/); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"sasl"/); + }); + + it('SSL with sasl block present → throws, naming the protocol', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'SSL', + sasl: validSasl, + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/SSL/); + }); + + it('SASL_SSL with valid sasl block → no throw', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'SASL_SSL', + sasl: validSasl, + }; + expect(() => validateKafkaAuthConsistency(body)).not.toThrow(); + }); + + it('PLAINTEXT with no sasl block → no throw', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'PLAINTEXT', + }; + expect(() => validateKafkaAuthConsistency(body)).not.toThrow(); + }); + + it('SSL with no sasl block, optional ssl block present → no throw', () => { + const body: KafkaEndpointRequestBody = { + ...baseBody, + securityProtocol: 'SSL', + ssl: { caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----' }, + }; + expect(() => validateKafkaAuthConsistency(body)).not.toThrow(); + }); + + it('No securityProtocol declared → no throw (slice-01 wire compat)', () => { + // Slice-01 callers can omit `securityProtocol` entirely. The route already + // skips the probe and the KA records `verificationStatus: "unattempted"`. + // The consistency check must not regress that path. + const body: KafkaEndpointRequestBody = { ...baseBody }; + expect(() => validateKafkaAuthConsistency(body)).not.toThrow(); + }); + + it('No securityProtocol but sasl block present → throws, naming both fields', () => { + // Without `securityProtocol`, `shouldProbe` returns false and the route + // would silently drop the supplied auth payload into an `unattempted` KA. + // Reject this ambiguous misconfig at the gate so the caller sees a 400. + const body: KafkaEndpointRequestBody = { + ...baseBody, + sasl: validSasl, + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"sasl"/); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"securityProtocol"/); + }); + + it('No securityProtocol but ssl block present → throws, naming both fields', () => { + // Same silent-downgrade pattern as the sasl case: without a protocol the + // route would skip the probe and drop the SSL material into an unverified + // KA. Reject so the caller is forced to declare intent. + const body: KafkaEndpointRequestBody = { + ...baseBody, + ssl: { caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----' }, + }; + expect(() => validateKafkaAuthConsistency(body)).toThrow(KafkaRequestParseError); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"ssl"/); + expect(() => validateKafkaAuthConsistency(body)).toThrow(/"securityProtocol"/); + }); + + it('No securityProtocol and no sasl/ssl blocks → no throw (slice-01 wire compat preserved)', () => { + // Regression guard: tightening the no-protocol branch must still permit + // genuine slice-01 wire-compat requests that send neither auth nor TLS. + const body: KafkaEndpointRequestBody = { ...baseBody }; + expect(() => validateKafkaAuthConsistency(body)).not.toThrow(); + }); +}); diff --git a/packages/kafka/package.json b/packages/kafka/package.json index abd8eca4e..8b19a9363 100644 --- a/packages/kafka/package.json +++ b/packages/kafka/package.json @@ -10,8 +10,14 @@ "test:coverage": "vitest run --coverage", "clean": "rm -rf dist tsconfig.tsbuildinfo" }, + "dependencies": { + "kafkajs": "2.2.4" + }, "devDependencies": { + "@origintrail-official/dkg-core": "workspace:*", + "@testcontainers/kafka": "11.14.0", "@vitest/coverage-v8": "^4.0.18", + "testcontainers": "11.14.0", "vitest": "^4.0.18" }, "publishConfig": { diff --git a/packages/kafka/src/endpoint.ts b/packages/kafka/src/endpoint.ts index 22888c8d2..a7a5999d3 100644 --- a/packages/kafka/src/endpoint.ts +++ b/packages/kafka/src/endpoint.ts @@ -1,13 +1,22 @@ -import { buildKafkaEndpointKnowledgeAsset } from './ka-builder.js'; +import { + buildKafkaEndpointKnowledgeAsset, + type KafkaEndpointVerificationStatus, +} from './ka-builder.js'; +import type { ProbeResult } from './kafka-probe.js'; import { buildKafkaEndpointUri } from './uri.js'; +/** + * The JSON-LD shape produced by `buildKafkaEndpointKnowledgeAsset` and handed + * to the publisher. Captured as a type alias so callers can describe their + * publisher signature without re-deriving the structural type. + */ +export type KafkaEndpointKnowledgeAsset = ReturnType; + /** * Dependency-inversion boundary: the kafka package needs something that can * publish a JSON-LD knowledge asset. The package hands the bare KA across this * interface; envelope wrapping (e.g. `{ public: ... }`) belongs to the caller. */ -export type KafkaEndpointKnowledgeAsset = ReturnType; - export interface KafkaEndpointPublisher { publish( contextGraphId: string, @@ -15,6 +24,32 @@ export interface KafkaEndpointPublisher { ): Promise; } +/** + * Probe outcome handed to `registerKafkaEndpoint`. The probe is run by the + * caller (the route handler). This package's pure layer never opens Kafka + * connections of its own — see ADR 0001/0002. The shape mirrors the public + * `ProbeResult` from `kafka-probe.ts` minus its surface-irrelevant + * `securityProtocol` echo (the route already knows that and passes it + * directly via `RegisterKafkaEndpointInput.securityProtocol`). + */ +export interface KafkaEndpointProbeOutcome { + status: 'verified' | 'failed' | 'unreachable'; + /** ISO-8601 timestamp recorded at probe completion. */ + probedAt: string; + /** + * Sanitized error description from the underlying probe (already classified + * to a stable kafkajs error class name — never carries credential + * substrings). Present on `failed` / `unreachable` outcomes; absent on + * `verified`. + */ + error?: string; +} + +/** + * Inputs to `registerKafkaEndpoint`. Captures the endpoint identity, the + * publisher to use, and the optional probe outcome the route handler ran on + * the caller's behalf (per ADR 0002). + */ export interface RegisterKafkaEndpointInput { contextGraphId: string; owner: string; @@ -23,24 +58,98 @@ export interface RegisterKafkaEndpointInput { messageFormat: string; issuedAt?: string; publisher: KafkaEndpointPublisher; + /** + * Advertised broker auth hint, mirrored to the KA as `dkg:securityProtocol`. + * Set whenever the request specified one — even if no probe ran. + */ + securityProtocol?: string; + /** + * Probe outcome from the route handler. `undefined` means "no probe ran" + * (creds were absent in the request). When defined, the registration + * decision rules below apply. + */ + probe?: KafkaEndpointProbeOutcome; + /** + * Caller's `?force=true` override. Only consulted when `probe.status` is + * not `verified`. Without `force`, a non-verified probe causes the + * registration to throw — the route translates that to HTTP 4xx. + */ + force?: boolean; } +/** + * Outcome of a successful `registerKafkaEndpoint` call: the endpoint URI, the + * target context graph, and the verification status that was advertised on + * the published KA. + */ export interface RegisterKafkaEndpointResult { uri: string; contextGraphId: string; + verificationStatus: KafkaEndpointVerificationStatus; + /** Probe completion timestamp, present whenever a probe ran. */ + verifiedAt?: string; } +/** + * Thrown when a probe failed and the caller did not pass `force=true`. The + * route translates this into a 4xx response. We use a typed error so route + * handlers can branch on `instanceof` instead of stringly-typed checks. + */ +export class KafkaEndpointProbeFailedError extends Error { + constructor(public readonly outcome: KafkaEndpointProbeOutcome) { + super( + `Kafka endpoint probe ${outcome.status} at ${outcome.probedAt}; ` + + `pass force=true to register anyway`, + ); + this.name = 'KafkaEndpointProbeFailedError'; + } +} + +/** + * Build and publish a Kafka topic endpoint KA into the named context graph. + * Consumes the route's probe decision (if any) per ADR 0002, applies the + * `force` override, and throws `KafkaEndpointProbeFailedError` when a + * non-verified probe runs without `force=true`. + */ export async function registerKafkaEndpoint( input: RegisterKafkaEndpointInput, ): Promise { const issuedAt = input.issuedAt ?? new Date().toISOString(); const uri = buildKafkaEndpointUri(input); + + // ADR 0002: opportunistic verification. + // + // probe absent → status: unattempted, no verifiedAt + // probe verified → status: verified, verifiedAt = probedAt + // probe failed/unreachable → throw unless caller forced us + // probe failed + force=true → status: failed, verifiedAt = probedAt + // + // The route is the only caller; it owns the decision tree about whether + // to invoke the probe at all. We just consume its result. + let verificationStatus: KafkaEndpointVerificationStatus; + let verifiedAt: string | undefined; + if (!input.probe) { + verificationStatus = 'unattempted'; + } else if (input.probe.status === 'verified') { + verificationStatus = 'verified'; + verifiedAt = input.probe.probedAt; + } else { + if (!input.force) { + throw new KafkaEndpointProbeFailedError(input.probe); + } + verificationStatus = 'failed'; + verifiedAt = input.probe.probedAt; + } + const knowledgeAsset = buildKafkaEndpointKnowledgeAsset({ owner: input.owner, broker: input.broker, topic: input.topic, messageFormat: input.messageFormat, issuedAt, + verificationStatus, + verifiedAt, + securityProtocol: input.securityProtocol, }); await input.publisher.publish(input.contextGraphId, knowledgeAsset); @@ -48,5 +157,20 @@ export async function registerKafkaEndpoint( return { uri, contextGraphId: input.contextGraphId, + verificationStatus, + verifiedAt, + }; +} + +/** + * Convert a kafka-probe result into the endpoint registration probe-outcome shape. + * The endpoint contract intentionally exposes a narrower view than the probe (no + * credential-adjacent fields, no broker connection details). + */ +export function toKafkaEndpointProbeOutcome(result: ProbeResult): KafkaEndpointProbeOutcome { + return { + status: result.status, + probedAt: result.probedAt, + ...(result.error ? { error: result.error } : {}), }; } diff --git a/packages/kafka/src/index.ts b/packages/kafka/src/index.ts index 9e74b9f44..355a6d178 100644 --- a/packages/kafka/src/index.ts +++ b/packages/kafka/src/index.ts @@ -1,3 +1,4 @@ export * from './uri.js'; export * from './ka-builder.js'; export * from './endpoint.js'; +export * from './kafka-probe.js'; diff --git a/packages/kafka/src/ka-builder.ts b/packages/kafka/src/ka-builder.ts index 42fe40c30..9ca9894ae 100644 --- a/packages/kafka/src/ka-builder.ts +++ b/packages/kafka/src/ka-builder.ts @@ -7,17 +7,73 @@ const KAFKA_ENDPOINT_CONTEXT = { xsd: 'http://www.w3.org/2001/XMLSchema#', } as const; +/** + * Verification status hint that lands on the KA as `dkg:verificationStatus`. + * + * - `unattempted`: caller did not supply credentials, no probe ran. + * - `verified`: probe succeeded (topic reachable with the supplied creds). + * - `failed`: probe ran and failed; only published when the caller passed + * `force=true` to override the registration block. + * + * The published value is advertised. It is not load-bearing — consumers may + * choose to re-verify before connecting. + */ +export type KafkaEndpointVerificationStatus = + | 'unattempted' + | 'verified' + | 'failed'; + +/** + * Inputs to `buildKafkaEndpointKnowledgeAsset`. Verification fields + * (`verificationStatus`, `verifiedAt`, `securityProtocol`) are all optional + * and only land on the KA when the caller opts in. + */ export interface BuildKafkaEndpointKnowledgeAssetInput { owner: string; broker: string; topic: string; messageFormat: string; issuedAt: string; + /** + * Optional probe outcome to advertise. Out-of-scope: omitting this field + * keeps the KA shape identical to slice-01. + */ + verificationStatus?: KafkaEndpointVerificationStatus; + /** Probe completion timestamp, ISO-8601. Only emitted when the probe ran. */ + verifiedAt?: string; + /** + * Advertised auth hint, mirrored from the registration request. Set even + * when no probe ran; never holds raw credentials. + */ + securityProtocol?: string; } +/** + * Build the JSON-LD knowledge asset for a Kafka topic endpoint. The KA is + * stable wire output: same inputs always produce the same shape, optional + * verification fields are appended only when supplied (slice-01 fixtures + * stay byte-compatible when probing is not opted into). + */ export function buildKafkaEndpointKnowledgeAsset(input: BuildKafkaEndpointKnowledgeAssetInput) { const owner = input.owner.toLowerCase(); + // Optional fields are appended only when present so the KA stays identical + // to slice-01 when the caller doesn't opt into verification metadata. This + // keeps the existing golden fixture trivially compatible. + const optional: Record = {}; + if (input.verificationStatus) { + optional['dkg:verificationStatus'] = input.verificationStatus; + } + if (input.verifiedAt) { + optional['dkg:verifiedAt'] = { + '@value': input.verifiedAt, + '@type': 'xsd:dateTime', + }; + } + if (input.securityProtocol) { + optional['dkg:securityProtocol'] = input.securityProtocol; + } + return { '@context': KAFKA_ENDPOINT_CONTEXT, '@id': buildKafkaEndpointUri(input), @@ -35,5 +91,6 @@ export function buildKafkaEndpointKnowledgeAsset(input: BuildKafkaEndpointKnowle '@value': input.issuedAt, '@type': 'xsd:dateTime', }, + ...optional, }; } diff --git a/packages/kafka/src/kafka-probe.ts b/packages/kafka/src/kafka-probe.ts new file mode 100644 index 000000000..a6dacc82a --- /dev/null +++ b/packages/kafka/src/kafka-probe.ts @@ -0,0 +1,352 @@ +// kafka-probe.ts — opportunistic broker reachability probe. +// +// ADR 0001 (kafka package writes metadata only): this module is a one-shot +// ADMIN call. It opens a connection, fetches topic metadata, and disconnects. +// No consumer, no group ID, no offset tracking, no long-lived broker state. +// Resist any urge to grow this into a smarter primitive — the probe is meant +// to answer one yes/no question (is the topic reachable with these creds?) +// and nothing more. +// +// kafkajs version is pinned in `package.json` (`kafkajs@2.2.4`) — chosen as +// the first runtime dependency on this package. kafkajs 2.x is the actively +// maintained line; the Admin API exposes `fetchTopicMetadata({ topics })`, +// which is the named operation the spec calls `describeTopics`. +// +// Credentials passed in are scoped to a single execution. The function never +// stores them on a closure outliving its own promise, never returns them, +// never logs them, and never persists them. The `ProbeResult` deliberately +// omits any credential strings. + +import { readFile } from 'node:fs/promises'; +import { + Kafka, + logLevel, + type Admin, + type KafkaConfig, + type SASLOptions, +} from 'kafkajs'; + +export type SecurityProtocol = + | 'PLAINTEXT' + | 'SASL_PLAINTEXT' + | 'SASL_SSL' + | 'SSL'; + +/** + * TLS material for SSL/SASL_SSL broker connections. PEMs accepted inline or + * via filesystem paths (escape hatch). + * + * All fields are optional. Both one-way TLS (server cert validated against + * the host trust store, no client cert) and mTLS (client cert + key supplied) + * are supported. SASL_SSL behaves the same way: TLS to the broker is + * server-side only by default, and a client cert/key may be supplied if the + * broker also requires mutual auth. + * + * Client `cert` and `key` must be supplied together (mTLS), or neither + * (one-way TLS). Supplying only one is rejected as invalid input — half of + * an mTLS pair is a local misconfiguration, not a broker reachability + * problem, and would otherwise surface as a confusing kafkajs handshake + * error. + */ +export interface KafkaSslMaterial { + /** PEM string (CA bundle). Preferred. */ + caPem?: string; + /** PEM string (mTLS client cert). Optional — only needed for mTLS. */ + certPem?: string; + /** PEM string (mTLS client key). Optional — only needed for mTLS. */ + keyPem?: string; + /** + * Filesystem-path escape hatch. The daemon host must have the PEMs + * pre-staged at these paths and readable by the daemon process. Inline PEMs + * are preferred; this exists for caller convenience and is read at probe + * time only. + */ + caPath?: string; + certPath?: string; + keyPath?: string; + /** Mirror of kafkajs `tls.rejectUnauthorized`. Defaults to `true`. */ + rejectUnauthorized?: boolean; +} + +export interface KafkaSaslCredentials { + /** SASL mechanism. kafkajs accepts lowercase identifiers. */ + mechanism: 'plain' | 'scram-sha-256' | 'scram-sha-512'; + username: string; + password: string; +} + +/** + * Inputs to a one-shot Kafka admin probe. Credentials are passed once to + * kafkajs and never returned, logged, or stored. + * + * For `SSL` and `SASL_SSL`, the `ssl` block may carry just a CA bundle + * (one-way TLS) or a CA bundle plus client cert + key (mTLS); both are + * supported. The probe does not enforce mTLS — supply cert + key only when + * the broker actually requires it. + */ +export interface KafkaProbeOptions { + brokers: string[]; + topic: string; + securityProtocol: SecurityProtocol; + sasl?: KafkaSaslCredentials; + ssl?: KafkaSslMaterial; + /** kafkajs client identifier (logged on the broker side). */ + clientId?: string; + /** Hard timeout for the entire probe call. Defaults to 5_000 ms. */ + timeoutMs?: number; +} + +export type ProbeStatus = 'verified' | 'failed' | 'unreachable'; + +/** + * Structured outcome of a probe call. Network/auth failures are encoded as + * `status` ≠ `'verified'`; the probe never throws on broker errors. + */ +export interface ProbeResult { + status: ProbeStatus; + /** Echoed for the KA. Not a credential. */ + securityProtocol: SecurityProtocol; + /** ISO-8601 timestamp recorded immediately before disconnect. */ + probedAt: string; + /** Sanitized error description. NEVER contains credential substrings. */ + error?: string; +} + +// Wall-clock ceiling for the entire probe round-trip. The kafkajs internal +// `connectionTimeout` (2_000) + `requestTimeout` (3_000) below should fit +// inside this budget; if you raise either, raise this too. See the kafkajs +// config block in `buildKafkaConfig` for the split rationale. +const DEFAULT_TIMEOUT_MS = 5_000; +const DEFAULT_CLIENT_ID = 'dkg-kafka-probe'; + +/** + * Runs a one-shot Kafka admin probe to verify a broker + topic combination. + * + * Network and auth failures are returned as structured results + * (`{ status: 'failed' | 'unreachable', error, ... }`). + * + * Throws ONLY on ill-formed input options: + * - `securityProtocol` requires SASL but `opts.sasl` is missing, + * - a PEM filesystem path is unreadable, + * - `securityProtocol` is not one of the four supported values. + * + * Callers (the route handler) are expected to validate input shape before + * invoking the probe; broker reachability is the function's domain. + * + * Credentials supplied in `opts` are passed once to the kafkajs admin client + * and never returned, logged, or persisted on the closure beyond the + * function's local scope. The `ProbeResult` deliberately omits any + * credential strings. + */ +export async function probe(opts: KafkaProbeOptions): Promise { + const timeoutMs = opts.timeoutMs ?? DEFAULT_TIMEOUT_MS; + const config = await buildKafkaConfig(opts); + + const kafka = new Kafka(config); + const admin: Admin = kafka.admin(); + + let result: { status: ProbeStatus; error?: string }; + try { + result = await runWithTimeout(probeAdmin(admin, opts.topic), timeoutMs); + } catch (err) { + // The only path that throws here is `runWithTimeout` racing against a + // hung `probeAdmin` call. Map it onto a structured failure so callers + // never have to discriminate "thrown vs returned" from this function. + result = { status: 'failed', error: classifyError(err) }; + } + + try { + return { + status: result.status, + securityProtocol: opts.securityProtocol, + probedAt: new Date().toISOString(), + error: result.error, + }; + } finally { + // Best-effort disconnect. If the connection never came up, kafkajs + // tolerates a no-op disconnect — but we swallow any throw here so the + // probe always returns a structured result instead of leaking. + try { + await admin.disconnect(); + } catch { + // intentionally swallowed: a probe failure already drove this branch + } + } +} + +async function probeAdmin(admin: Admin, topic: string): Promise<{ status: ProbeStatus; error?: string }> { + try { + await admin.connect(); + } catch (err) { + // kafkajs throws `KafkaJSSASLAuthenticationError` (and its parent + // `KafkaJSAuthenticationError`) from `connect()` when credentials are + // wrong — that is an auth failure, NOT broker unreachability. Lumping it + // under `unreachable` lies about the failure mode and steers operators + // towards network debugging instead of credential debugging. Anything we + // cannot positively identify as auth stays `unreachable` (the safe + // default for connect-time errors: the broker isn't reachable in a + // useful way). + const errorClass = classifyError(err); + const status: ProbeStatus = isAuthErrorClass(errorClass) ? 'failed' : 'unreachable'; + return { status, error: errorClass }; + } + + try { + const metadata = await admin.fetchTopicMetadata({ topics: [topic] }); + const found = metadata.topics.some((t) => t.name === topic); + if (!found) { + return { status: 'failed', error: `Topic "${topic}" not present in cluster metadata` }; + } + return { status: 'verified' }; + } catch (err) { + return { status: 'failed', error: classifyError(err) }; + } +} + +// Names of kafkajs error classes that indicate authentication failure. +// `KafkaJSSASLAuthenticationError` is the SASL-specific class; the parent +// `KafkaJSAuthenticationError` covers any future auth-class addition that +// inherits from it. Both must map to `failed` (auth failure), not +// `unreachable` (network failure). +function isAuthErrorClass(name: string): boolean { + return ( + name === 'KafkaJSSASLAuthenticationError' || + name === 'KafkaJSAuthenticationError' + ); +} + +/** + * kafkajs surfaces typed errors with stable `name` values (KafkaJSConnectionError, + * KafkaJSSASLAuthenticationError, etc.). We strip free-form messages to a + * fixed dictionary plus the error class name; this keeps any accidentally-leaked + * credential substrings out of the result. + */ +function classifyError(err: unknown): string { + const name = (err as { name?: string } | null)?.name ?? 'Error'; + // kafkajs' UNKNOWN_TOPIC_OR_PARTITION protocol error class is the canonical + // "topic doesn't exist" signal we hit through fetchTopicMetadata. + if (name === 'KafkaJSProtocolError') return 'KafkaJSProtocolError'; + if (name === 'KafkaJSConnectionError') return 'KafkaJSConnectionError'; + if (name === 'KafkaJSConnectionClosedError') return 'KafkaJSConnectionClosedError'; + if (name === 'KafkaJSBrokerNotFound') return 'KafkaJSBrokerNotFound'; + if (name === 'KafkaJSSASLAuthenticationError') return 'KafkaJSSASLAuthenticationError'; + if (name === 'KafkaJSNumberOfRetriesExceeded') return 'KafkaJSNumberOfRetriesExceeded'; + if (name === 'KafkaJSRequestTimeoutError') return 'KafkaJSRequestTimeoutError'; + return name; +} + +async function buildKafkaConfig(opts: KafkaProbeOptions): Promise { + const base: KafkaConfig = { + brokers: opts.brokers, + clientId: opts.clientId ?? DEFAULT_CLIENT_ID, + // Silence kafkajs' built-in logger entirely. We deliberately don't pipe it + // into our own logger because kafkajs occasionally embeds connection + // details in its log payloads, and this probe must never emit credentials. + logLevel: logLevel.NOTHING, + // Split timeouts that fail fast on different failure modes: + // `connectionTimeout` — TCP/TLS reach (unreachable broker → quick fail) + // `requestTimeout` — slow broker response after the connection is up + // Their sum (5_000 ms) deliberately matches `DEFAULT_TIMEOUT_MS` so the + // outer `runWithTimeout` only fires on a kafkajs hang that ignores both + // inner clocks. + connectionTimeout: 2_000, + requestTimeout: 3_000, + // Disable retries — a single probe attempt is intentional. Retries would + // multiply the wall-clock cost of `unreachable` outcomes and obscure the + // fact that the broker isn't reachable. + retry: { retries: 0 }, + }; + + switch (opts.securityProtocol) { + case 'PLAINTEXT': + return { ...base, ssl: false }; + case 'SASL_PLAINTEXT': + return { ...base, ssl: false, sasl: requireSasl(opts) }; + case 'SASL_SSL': + return { ...base, ssl: await buildSsl(opts.ssl), sasl: requireSasl(opts) }; + case 'SSL': + return { ...base, ssl: await buildSsl(opts.ssl) }; + default: { + const exhaustive: never = opts.securityProtocol; + throw new Error(`Unsupported securityProtocol: ${String(exhaustive)}`); + } + } +} + +function requireSasl(opts: KafkaProbeOptions): SASLOptions { + if (!opts.sasl) { + throw new Error(`securityProtocol "${opts.securityProtocol}" requires SASL credentials`); + } + return { + mechanism: opts.sasl.mechanism, + username: opts.sasl.username, + password: opts.sasl.password, + }; +} + +/** @internal */ +interface SslConnectionOptions { + rejectUnauthorized: boolean; + ca?: string[]; + cert?: string; + key?: string; +} + +// SSL/SASL_SSL TLS material is fully optional. Pass-through whatever the +// caller supplied: a CA-only block produces a one-way-TLS config (server cert +// validated against the bundle, no client cert), and a CA + cert + key block +// produces an mTLS config. Brokers that demand mTLS will reject the handshake +// without the cert/key — that failure surfaces as a structured probe outcome, +// not a thrown exception, so callers can react uniformly. +// +// However, supplying half of an mTLS pair (cert without key, or key without +// cert) is a LOCAL input error, not a broker reachability problem. Reject it +// up front so the route translates it to HTTP 400 (input validation) rather +// than letting kafkajs fail later with a vague handshake error mapped to 422. +async function buildSsl( + ssl: KafkaSslMaterial | undefined, +): Promise { + const material = ssl ?? {}; + const ca = await loadOptionalPem(material.caPem, material.caPath); + const cert = await loadOptionalPem(material.certPem, material.certPath); + const key = await loadOptionalPem(material.keyPem, material.keyPath); + + if ((cert && !key) || (!cert && key)) { + throw new Error( + 'SSL configuration requires both client cert and key together (or neither)', + ); + } + + const tlsOpts: SslConnectionOptions = { + rejectUnauthorized: material.rejectUnauthorized ?? true, + }; + if (ca) tlsOpts.ca = [ca]; + if (cert) tlsOpts.cert = cert; + if (key) tlsOpts.key = key; + return tlsOpts; +} + +async function loadOptionalPem( + inline: string | undefined, + path: string | undefined, +): Promise { + if (inline && inline.trim().length > 0) return inline; + if (path && path.trim().length > 0) { + return readFile(path, 'utf8'); + } + return undefined; +} + +async function runWithTimeout(promise: Promise, timeoutMs: number): Promise { + let timer: ReturnType | undefined; + const timeout = new Promise((_, reject) => { + timer = setTimeout(() => { + reject(new Error(`Kafka probe timed out after ${timeoutMs}ms`)); + }, timeoutMs); + }); + try { + return await Promise.race([promise, timeout]); + } finally { + if (timer) clearTimeout(timer); + } +} diff --git a/packages/kafka/src/uri.ts b/packages/kafka/src/uri.ts index 86ad2df53..292736f3e 100644 --- a/packages/kafka/src/uri.ts +++ b/packages/kafka/src/uri.ts @@ -1,5 +1,9 @@ import { createHash } from 'node:crypto'; +/** + * Identity tuple for a Kafka topic endpoint URI: the (owner, broker, topic) + * triple is what the URI uniquely names. + */ export interface KafkaEndpointIdentity { owner: string; broker: string; @@ -12,6 +16,11 @@ function hashBrokerAndTopic(broker: string, topic: string): string { .digest('hex'); } +/** + * Build the deterministic URI for a Kafka topic endpoint. Owner is + * lowercased; (broker, topic) are sha256-hashed so the URI is stable across + * topology rewrites and casing variations. + */ export function buildKafkaEndpointUri(identity: KafkaEndpointIdentity): string { const owner = identity.owner.toLowerCase(); const hash = hashBrokerAndTopic(identity.broker, identity.topic); diff --git a/packages/kafka/test/e2e/walking-skeleton.test.ts b/packages/kafka/test/e2e/walking-skeleton.test.ts index 9cca516be..ea90b008e 100644 --- a/packages/kafka/test/e2e/walking-skeleton.test.ts +++ b/packages/kafka/test/e2e/walking-skeleton.test.ts @@ -4,9 +4,14 @@ import { constants } from 'node:fs'; import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; import { promisify } from 'node:util'; -import { beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; import { ApiClient } from '../../../cli/src/api-client.js'; import { buildKafkaEndpointUri } from '../../src/uri.js'; +import { + startPlaintextKafka, + type PlaintextKafka, +} from '../helpers/kafka-container.js'; +import { createTopicAndProduce } from '../helpers/synthetic-producer.js'; const execFileAsync = promisify(execFile); @@ -61,6 +66,7 @@ async function waitForEndpointRow( PREFIX dct: PREFIX dkg: SELECT ?broker ?topic ?messageFormat ?publisher ?endpointUrl ?issued + ?verificationStatus ?verifiedAt ?securityProtocol WHERE { GRAPH ?g { BIND(<${uri}> AS ?endpoint) @@ -71,6 +77,9 @@ async function waitForEndpointRow( dct:publisher ?publisher ; dct:issued ?issued ; dcat:endpointURL ?endpointUrl . + OPTIONAL { ?endpoint dkg:verificationStatus ?verificationStatus } + OPTIONAL { ?endpoint dkg:verifiedAt ?verifiedAt } + OPTIONAL { ?endpoint dkg:securityProtocol ?securityProtocol } } } `; @@ -185,5 +194,80 @@ describe('kafka walking skeleton e2e', () => { expect(stripIriDelimiters(row.publisher ?? '')).toBe(`urn:dkg:agent:${owner}`); expect(stripIriDelimiters(row.endpointUrl ?? '')).toBe(`kafka://${broker}/${topic}`); expect(Number.isNaN(Date.parse(stripQuotedLiteral(row.issued ?? '')))).toBe(false); + // Slice 04: with no creds, the KA records `verificationStatus = + // "unattempted"` and carries neither verifiedAt nor securityProtocol. + expect(stripQuotedLiteral(row.verificationStatus ?? '')).toBe('unattempted'); }, 90_000); + + describe('live probe (slice 04)', () => { + let kafka: PlaintextKafka | undefined; + + beforeAll(async () => { + if (!RUN_E2E || !devnetReachable) return; + kafka = await startPlaintextKafka(); + }, 180_000); + + afterAll(async () => { + if (kafka) await kafka.stop(); + }, 60_000); + + it( + 'registers with creds + reachable topic → KA verified, verifiedAt within last minute', + async () => { + if (!kafka) throw new Error('kafka container should be up'); + // Create the synthetic topic the daemon's probe will look for. + const topic = `walking-skeleton-probe.${Date.now()}`; + await createTopicAndProduce({ bootstrap: kafka.bootstrap, topic }); + + const broker = kafka.bootstrap; + const messageFormat = 'application/cloudevents+json'; + const expectedUri = buildKafkaEndpointUri({ owner, broker, topic }); + + const before = Date.now(); + const result = await execFileAsync( + 'node', + [ + CLI_ENTRY, + 'kafka', + 'endpoint', + 'register', + '--cg', + CONTEXT_GRAPH_ID, + '--broker', + broker, + '--topic', + topic, + '--format', + messageFormat, + '--security-protocol', + 'PLAINTEXT', + ], + { + cwd: REPO_ROOT, + env: { + ...process.env, + DKG_HOME: DEVNET_NODE1_HOME, + DKG_API_PORT: String(port), + }, + }, + ); + + expect(result.stdout).toContain('Kafka endpoint registered:'); + expect(result.stdout).toContain('Verification status: verified'); + + const row = await waitForEndpointRow(client, CONTEXT_GRAPH_ID, expectedUri); + expect(stripQuotedLiteral(row.verificationStatus ?? '')).toBe('verified'); + expect(stripQuotedLiteral(row.securityProtocol ?? '')).toBe('PLAINTEXT'); + + const verifiedAt = stripQuotedLiteral(row.verifiedAt ?? ''); + const verifiedAtMs = Date.parse(verifiedAt); + expect(Number.isNaN(verifiedAtMs)).toBe(false); + // Within the last minute means: between (before - 1s) and (now + 1s) + // for clock skew. + expect(verifiedAtMs).toBeGreaterThanOrEqual(before - 1_000); + expect(verifiedAtMs).toBeLessThanOrEqual(Date.now() + 1_000); + }, + 240_000, + ); + }); }); diff --git a/packages/kafka/test/endpoint.register.test.ts b/packages/kafka/test/endpoint.register.test.ts index e22f06fcb..377eab47a 100644 --- a/packages/kafka/test/endpoint.register.test.ts +++ b/packages/kafka/test/endpoint.register.test.ts @@ -1,30 +1,52 @@ import { describe, expect, it } from 'vitest'; -import { registerKafkaEndpoint } from '../src/endpoint.js'; +import { + KafkaEndpointProbeFailedError, + registerKafkaEndpoint, + toKafkaEndpointProbeOutcome, +} from '../src/endpoint.js'; -describe('registerKafkaEndpoint', () => { +interface CapturedPublish { + contextGraphId: string; + content: any; +} + +function makePublisher() { + const calls: CapturedPublish[] = []; + const publisher = { + async publish(contextGraphId: string, content: unknown) { + calls.push({ contextGraphId, content }); + return { ual: 'did:dkg:test/1', kcId: '1', status: 'confirmed' as const }; + }, + }; + return { publisher, calls }; +} + +const BASE_INPUT = { + contextGraphId: 'devnet-test', + owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + issuedAt: '2026-05-04T12:34:56.000Z', +}; + +const EXPECTED_URI = + 'urn:dkg:kafka-endpoint:0xabcdefabcdefabcdefabcdefabcdefabcdefabcd:' + + '33b58f60595c766739f72b29e4ee417888d1a46af8339a4b5bdb1c3a5692f652'; + +describe('registerKafkaEndpoint — slice-01 backwards compat', () => { it('publishes the Kafka endpoint KA into the named context graph', async () => { - const calls: Array<{ contextGraphId: string; content: unknown }> = []; - const publisher = { - async publish(contextGraphId: string, content: unknown) { - calls.push({ contextGraphId, content }); - return { ual: 'did:dkg:test/1', kcId: '1', status: 'confirmed' as const }; - }, - }; + const { publisher, calls } = makePublisher(); const result = await registerKafkaEndpoint({ - contextGraphId: 'devnet-test', - owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', - broker: 'kafka.example.com:9092', - topic: 'orders.created', - messageFormat: 'application/json', - issuedAt: '2026-05-04T12:34:56.000Z', + ...BASE_INPUT, publisher, }); expect(result).toEqual({ - uri: 'urn:dkg:kafka-endpoint:0xabcdefabcdefabcdefabcdefabcdefabcdefabcd:' + - '33b58f60595c766739f72b29e4ee417888d1a46af8339a4b5bdb1c3a5692f652', + uri: EXPECTED_URI, contextGraphId: 'devnet-test', + verificationStatus: 'unattempted', }); expect(calls).toHaveLength(1); @@ -52,7 +74,240 @@ describe('registerKafkaEndpoint', () => { '@value': '2026-05-04T12:34:56.000Z', '@type': 'xsd:dateTime', }, + // Verification metadata always lands on the KA — `unattempted` is + // the canonical no-probe value (see ADR 0002). + 'dkg:verificationStatus': 'unattempted', }, }); }); + + it('falls back to "now" when issuedAt is omitted', async () => { + // The default `issuedAt` is `new Date().toISOString()`. We assert the KA + // carries a fresh, well-formed ISO-8601 timestamp without dictating the + // exact moment — wall-clock equality is brittle. + const { publisher, calls } = makePublisher(); + const before = new Date(); + + const { issuedAt: _drop, ...inputWithoutIssuedAt } = BASE_INPUT; + void _drop; + const result = await registerKafkaEndpoint({ + ...inputWithoutIssuedAt, + publisher, + }); + + expect(result.verificationStatus).toBe('unattempted'); + const ka = calls[0].content as Record; + const issued = ka['dct:issued']; + expect(issued['@type']).toBe('xsd:dateTime'); + const issuedDate = new Date(issued['@value']); + expect(Number.isNaN(issuedDate.getTime())).toBe(false); + // The default branch must produce a timestamp at or after the moment we + // entered the call. Allow 5 s of slack for slow CI clocks. + expect(issuedDate.getTime()).toBeGreaterThanOrEqual(before.getTime() - 5_000); + }); +}); + +describe('registerKafkaEndpoint — opportunistic verification (ADR 0002)', () => { + it('creds absent → no probe → status "unattempted", no verifiedAt; advertised securityProtocol still lands', async () => { + const { publisher, calls } = makePublisher(); + + const result = await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'PLAINTEXT', + // probe omitted → caller did not supply credentials + }); + + expect(result.verificationStatus).toBe('unattempted'); + expect(result.verifiedAt).toBeUndefined(); + + const ka = calls[0].content; + expect(ka['dkg:verificationStatus']).toBe('unattempted'); + expect(ka['dkg:securityProtocol']).toBe('PLAINTEXT'); + expect(ka['dkg:verifiedAt']).toBeUndefined(); + }); + + it('creds present + probe verified → status "verified", verifiedAt set to probedAt', async () => { + const { publisher, calls } = makePublisher(); + + const result = await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_SSL', + probe: { status: 'verified', probedAt: '2026-05-04T12:35:00.000Z' }, + }); + + expect(result).toMatchObject({ + verificationStatus: 'verified', + verifiedAt: '2026-05-04T12:35:00.000Z', + }); + + const ka = calls[0].content; + expect(ka['dkg:verificationStatus']).toBe('verified'); + expect(ka['dkg:verifiedAt']).toEqual({ + '@value': '2026-05-04T12:35:00.000Z', + '@type': 'xsd:dateTime', + }); + expect(ka['dkg:securityProtocol']).toBe('SASL_SSL'); + }); + + it('creds present + probe verified + force=true → identical to force=false on success (force ignored)', async () => { + // ADR 0002: `force` is only consulted when the probe did NOT verify. On a + // successful probe, the flag is irrelevant — the resulting KA must be + // bit-identical to the force=false verified case. Guards against a future + // change that lets `force=true` mutate the recorded `verificationStatus` + // when there's nothing to override. + const { publisher, calls } = makePublisher(); + + const result = await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_SSL', + probe: { status: 'verified', probedAt: '2026-05-04T12:35:00.000Z' }, + force: true, + }); + + expect(result).toMatchObject({ + verificationStatus: 'verified', + verifiedAt: '2026-05-04T12:35:00.000Z', + }); + + const ka = calls[0].content; + expect(ka['dkg:verificationStatus']).toBe('verified'); + expect(ka['dkg:verifiedAt']).toEqual({ + '@value': '2026-05-04T12:35:00.000Z', + '@type': 'xsd:dateTime', + }); + expect(ka['dkg:securityProtocol']).toBe('SASL_SSL'); + }); + + it('creds present + probe failed (no force) → throws KafkaEndpointProbeFailedError; no KA published', async () => { + const { publisher, calls } = makePublisher(); + + await expect( + registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_PLAINTEXT', + probe: { status: 'failed', probedAt: '2026-05-04T12:36:00.000Z' }, + }), + ).rejects.toBeInstanceOf(KafkaEndpointProbeFailedError); + + expect(calls).toHaveLength(0); + }); + + it('creds present + probe unreachable (no force) → throws; no KA published', async () => { + const { publisher, calls } = makePublisher(); + + await expect( + registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_PLAINTEXT', + probe: { status: 'unreachable', probedAt: '2026-05-04T12:37:00.000Z' }, + }), + ).rejects.toBeInstanceOf(KafkaEndpointProbeFailedError); + + expect(calls).toHaveLength(0); + }); + + it('creds present + probe failed + force=true → status "failed", verifiedAt set, KA published', async () => { + const { publisher, calls } = makePublisher(); + + const result = await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_PLAINTEXT', + probe: { status: 'failed', probedAt: '2026-05-04T12:38:00.000Z' }, + force: true, + }); + + expect(result).toMatchObject({ + verificationStatus: 'failed', + verifiedAt: '2026-05-04T12:38:00.000Z', + }); + + const ka = calls[0].content; + expect(ka['dkg:verificationStatus']).toBe('failed'); + expect(ka['dkg:verifiedAt']).toEqual({ + '@value': '2026-05-04T12:38:00.000Z', + '@type': 'xsd:dateTime', + }); + expect(ka['dkg:securityProtocol']).toBe('SASL_PLAINTEXT'); + }); + + it('creds present + probe unreachable + force=true → status "failed", KA published', async () => { + const { publisher, calls } = makePublisher(); + + const result = await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SSL', + probe: { status: 'unreachable', probedAt: '2026-05-04T12:39:00.000Z' }, + force: true, + }); + + // Both `failed` and `unreachable` collapse to the published + // `verificationStatus: "failed"` value — the KA only records "we ran a + // probe and it didn't verify". The granular distinction stays in the + // log, not on the wire. + expect(result.verificationStatus).toBe('failed'); + expect(calls).toHaveLength(1); + }); + + it('KA never includes raw credential fields under any branch', async () => { + // Smoke check that `endpoint.register` doesn't accidentally pull a + // credential field through from somewhere; the input type doesn't + // accept one, but defence-in-depth doesn't hurt. + const { publisher, calls } = makePublisher(); + await registerKafkaEndpoint({ + ...BASE_INPUT, + publisher, + securityProtocol: 'SASL_SSL', + probe: { status: 'verified', probedAt: '2026-05-04T12:40:00.000Z' }, + }); + const blob = JSON.stringify(calls[0].content); + expect(blob).not.toMatch(/password/i); + expect(blob).not.toMatch(/username/i); + expect(blob).not.toMatch(/BEGIN [A-Z ]+/); + }); +}); + +describe('toKafkaEndpointProbeOutcome', () => { + it('passes through status and probedAt on a verified result (no error)', () => { + expect( + toKafkaEndpointProbeOutcome({ + status: 'verified', + securityProtocol: 'SASL_SSL', + probedAt: '2026-05-04T12:40:00.000Z', + }), + ).toEqual({ + status: 'verified', + probedAt: '2026-05-04T12:40:00.000Z', + }); + }); + + it('includes the error field when the probe carries one', () => { + expect( + toKafkaEndpointProbeOutcome({ + status: 'failed', + securityProtocol: 'PLAINTEXT', + probedAt: '2026-05-04T12:41:00.000Z', + error: 'KafkaJSProtocolError', + }), + ).toEqual({ + status: 'failed', + probedAt: '2026-05-04T12:41:00.000Z', + error: 'KafkaJSProtocolError', + }); + }); + + it('omits the error field when the probe result has no error string', () => { + const out = toKafkaEndpointProbeOutcome({ + status: 'verified', + securityProtocol: 'PLAINTEXT', + probedAt: '2026-05-04T12:42:00.000Z', + }); + expect('error' in out).toBe(false); + }); }); diff --git a/packages/kafka/test/fixtures/docker-compose.yml b/packages/kafka/test/fixtures/docker-compose.yml new file mode 100644 index 000000000..beadca74e --- /dev/null +++ b/packages/kafka/test/fixtures/docker-compose.yml @@ -0,0 +1,33 @@ +# Manual-debug fixture for the kafka-probe integration tests. +# +# CI does NOT use this file — testcontainers spins up the same image +# programmatically through `test/helpers/kafka-container.ts`. This compose +# file exists so you can `docker compose -f packages/kafka/test/fixtures/docker-compose.yml up` +# locally and point the probe at `localhost:9093` while iterating. +# +# IMPORTANT: keep the image tag and listener config in lockstep with +# `test/helpers/kafka-container.ts` (KAFKA_IMAGE constant). If you bump +# one, bump the other. + +services: + kafka: + image: confluentinc/cp-kafka:7.5.0 + container_name: dkg-kafka-probe-debug + ports: + - "9093:9093" + environment: + KAFKA_NODE_ID: 1 + KAFKA_PROCESS_ROLES: broker,controller + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT + KAFKA_LISTENERS: BROKER://0.0.0.0:9092,PLAINTEXT://0.0.0.0:9093,CONTROLLER://0.0.0.0:9094 + KAFKA_ADVERTISED_LISTENERS: BROKER://kafka:9092,PLAINTEXT://localhost:9093 + KAFKA_INTER_BROKER_LISTENER_NAME: BROKER + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka:9094 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_LOG_FLUSH_INTERVAL_MESSAGES: 9223372036854775807 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" + CLUSTER_ID: 4L6g3nShT-eMCtK--X86sw diff --git a/packages/kafka/test/fixtures/endpoint-ka.json b/packages/kafka/test/fixtures/endpoint-ka.json index 71da18435..4a1a538cd 100644 --- a/packages/kafka/test/fixtures/endpoint-ka.json +++ b/packages/kafka/test/fixtures/endpoint-ka.json @@ -22,5 +22,11 @@ "dct:issued": { "@value": "2026-05-04T12:34:56.000Z", "@type": "xsd:dateTime" - } + }, + "dkg:verificationStatus": "verified", + "dkg:verifiedAt": { + "@value": "2026-05-04T12:35:00.000Z", + "@type": "xsd:dateTime" + }, + "dkg:securityProtocol": "SASL_SSL" } diff --git a/packages/kafka/test/helpers/kafka-container.ts b/packages/kafka/test/helpers/kafka-container.ts new file mode 100644 index 000000000..5600a55c6 --- /dev/null +++ b/packages/kafka/test/helpers/kafka-container.ts @@ -0,0 +1,34 @@ +// Test-only helper. Production code never imports testcontainers. +// +// Brings up a single-broker Confluent Kafka via @testcontainers/kafka +// (KRaft mode for cp-kafka >= 8.x), and surfaces the broker bootstrap +// string as `host:mappedPort`. +// +// The image tag is duplicated in `test/fixtures/docker-compose.yml`. If you +// change one, change the other so manual debugging matches CI. + +import { KafkaContainer, type StartedKafkaContainer } from '@testcontainers/kafka'; + +/** + * Image used by the testcontainers helper AND by the + * `test/fixtures/docker-compose.yml` manual-debug fixture. Keep them in sync. + */ +export const KAFKA_IMAGE = 'confluentinc/cp-kafka:7.5.0'; + +export interface PlaintextKafka { + bootstrap: string; + container: StartedKafkaContainer; + stop(): Promise; +} + +export async function startPlaintextKafka(): Promise { + const container = await new KafkaContainer(KAFKA_IMAGE).start(); + // The PLAINTEXT external listener is bound to container port 9093 by + // @testcontainers/kafka. We map it onto a free host port at start time. + const bootstrap = `${container.getHost()}:${container.getMappedPort(9093)}`; + return { + bootstrap, + container, + stop: () => container.stop(), + }; +} diff --git a/packages/kafka/test/helpers/synthetic-producer.ts b/packages/kafka/test/helpers/synthetic-producer.ts new file mode 100644 index 000000000..363269299 --- /dev/null +++ b/packages/kafka/test/helpers/synthetic-producer.ts @@ -0,0 +1,46 @@ +// Test-only helper. Creates a topic on the broker and (optionally) produces a +// single message so the kafka-probe has something concrete to find via +// `fetchTopicMetadata`. The probe never reads message content; one message +// is sent purely to nudge `auto.create.topics.enable=true` semantics on the +// rare image where it matters. + +import { Kafka, logLevel } from 'kafkajs'; + +export interface SyntheticProducerOptions { + bootstrap: string; + topic: string; + /** Produce a single message after creating the topic. Defaults to true. */ + produce?: boolean; +} + +export async function createTopicAndProduce(opts: SyntheticProducerOptions): Promise { + const kafka = new Kafka({ + clientId: 'synthetic-producer', + brokers: [opts.bootstrap], + logLevel: logLevel.NOTHING, + retry: { retries: 2 }, + }); + const admin = kafka.admin(); + try { + await admin.connect(); + await admin.createTopics({ + topics: [{ topic: opts.topic, numPartitions: 1, replicationFactor: 1 }], + waitForLeaders: true, + }); + } finally { + await admin.disconnect(); + } + + if (opts.produce ?? true) { + const producer = kafka.producer(); + try { + await producer.connect(); + await producer.send({ + topic: opts.topic, + messages: [{ value: 'synthetic' }], + }); + } finally { + await producer.disconnect(); + } + } +} diff --git a/packages/kafka/test/integration/kafka-probe.test.ts b/packages/kafka/test/integration/kafka-probe.test.ts new file mode 100644 index 000000000..17e69b591 --- /dev/null +++ b/packages/kafka/test/integration/kafka-probe.test.ts @@ -0,0 +1,100 @@ +// Integration tests for `probe` against a real Kafka broker (via +// testcontainers). Docker is required. +// +// Gating: set `DKG_KAFKA_INTEGRATION=0` to skip locally if Docker isn't +// available. Defaults to running in any environment that has Docker — we +// don't want a missing flag to silently bypass coverage. + +import { afterAll, beforeAll, describe, expect, it } from 'vitest'; +import { probe } from '../../src/kafka-probe.js'; +import { + startPlaintextKafka, + type PlaintextKafka, +} from '../helpers/kafka-container.js'; +import { createTopicAndProduce } from '../helpers/synthetic-producer.js'; + +const SKIP = + process.env.DKG_KAFKA_INTEGRATION === '0' || + process.env.DKG_KAFKA_INTEGRATION === 'false'; + +const VITEST_TIMEOUT = 180_000; + +describe.skipIf(SKIP)('kafka-probe integration (PLAINTEXT)', () => { + let kafka: PlaintextKafka; + const presentTopic = 'probe-present'; + + beforeAll(async () => { + kafka = await startPlaintextKafka(); + await createTopicAndProduce({ bootstrap: kafka.bootstrap, topic: presentTopic }); + }, VITEST_TIMEOUT); + + afterAll(async () => { + if (kafka) await kafka.stop(); + }, VITEST_TIMEOUT); + + it('verified: topic exists on the broker', async () => { + const result = await probe({ + brokers: [kafka.bootstrap], + topic: presentTopic, + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('verified'); + expect(result.error).toBeUndefined(); + expect(Number.isNaN(Date.parse(result.probedAt))).toBe(false); + }, VITEST_TIMEOUT); + + it('failed: topic does not exist on the broker', async () => { + const result = await probe({ + brokers: [kafka.bootstrap], + topic: 'absent-topic-' + Date.now(), + securityProtocol: 'PLAINTEXT', + }); + // Either the broker says "topic absent in metadata" → 'failed', or it + // throws a protocol error (kafka image version-dependent) → also 'failed'. + expect(result.status).toBe('failed'); + }, VITEST_TIMEOUT); + + it('unreachable: wrong port', async () => { + // Map a port that is almost certainly closed on the host. + const result = await probe({ + brokers: ['127.0.0.1:1'], + topic: presentTopic, + securityProtocol: 'PLAINTEXT', + timeoutMs: 3_000, + }); + expect(['unreachable', 'failed']).toContain(result.status); + // Whatever surfaces, it's classified, never the raw error message. + expect(result.error).toBeDefined(); + expect(result.error).not.toMatch(/127\.0\.0\.1/); + }, VITEST_TIMEOUT); + + it('credential discarding: SASL creds passed against PLAINTEXT broker → no creds in result', async () => { + // The broker we spin up is PLAINTEXT, so a SASL_PLAINTEXT probe will + // fail at the connection layer. We want to verify that the failure + // result carries no credential substrings. + const result = await probe({ + brokers: [kafka.bootstrap], + topic: presentTopic, + securityProtocol: 'SASL_PLAINTEXT', + sasl: { mechanism: 'plain', username: 'INTEG-USER-MARKER', password: 'INTEG-PASS-MARKER' }, + timeoutMs: 5_000, + }); + expect(['failed', 'unreachable']).toContain(result.status); + const blob = JSON.stringify(result); + expect(blob).not.toContain('INTEG-USER-MARKER'); + expect(blob).not.toContain('INTEG-PASS-MARKER'); + }, VITEST_TIMEOUT); +}); + +// SASL_SSL coverage is deferred — wiring up a TLS-enabled broker via +// testcontainers requires generating a JKS keystore, plumbing it as a SASL +// SSL listener, and bouncing the broker. The kafka-container helper has a +// `withSaslSslListener` option but the certificate plumbing exceeds the +// "straightforward" bar called out in the slice's acceptance criteria. The +// SASL_SSL config-wiring branch is exercised in the unit tests +// (`test/kafka-probe.test.ts`); the integration coverage stays PLAINTEXT-only +// for this slice. +// +// Follow-up tracking: extend this file with a `describe.skipIf(SKIP)` block +// that drives a SASL_SSL listener once we have a fixture certificate +// generator we trust. diff --git a/packages/kafka/test/ka-builder.test.ts b/packages/kafka/test/ka-builder.test.ts index cb39ba5e8..9df6933a9 100644 --- a/packages/kafka/test/ka-builder.test.ts +++ b/packages/kafka/test/ka-builder.test.ts @@ -3,13 +3,16 @@ import { describe, expect, it } from 'vitest'; import { buildKafkaEndpointKnowledgeAsset } from '../src/ka-builder.js'; describe('buildKafkaEndpointKnowledgeAsset', () => { - it('builds the minimum Kafka endpoint KA shape', async () => { + it('builds the full Kafka endpoint KA shape with verification metadata', async () => { const actual = buildKafkaEndpointKnowledgeAsset({ owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', broker: 'kafka.example.com:9092', topic: 'orders.created', messageFormat: 'application/json', issuedAt: '2026-05-04T12:34:56.000Z', + verificationStatus: 'verified', + verifiedAt: '2026-05-04T12:35:00.000Z', + securityProtocol: 'SASL_SSL', }); const fixtureUrl = new URL('./fixtures/endpoint-ka.json', import.meta.url); @@ -17,4 +20,52 @@ describe('buildKafkaEndpointKnowledgeAsset', () => { expect(actual).toEqual(expected); }); + + it('omits verification metadata when no probe-related fields are passed (slice-01 shape)', () => { + const actual = buildKafkaEndpointKnowledgeAsset({ + owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + issuedAt: '2026-05-04T12:34:56.000Z', + }); + + expect(actual).not.toHaveProperty('dkg:verificationStatus'); + expect(actual).not.toHaveProperty('dkg:verifiedAt'); + expect(actual).not.toHaveProperty('dkg:securityProtocol'); + }); + + it('emits verificationStatus and securityProtocol but omits verifiedAt when probe did not run', () => { + const actual = buildKafkaEndpointKnowledgeAsset({ + owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + issuedAt: '2026-05-04T12:34:56.000Z', + verificationStatus: 'unattempted', + securityProtocol: 'PLAINTEXT', + }); + + expect((actual as Record)['dkg:verificationStatus']).toBe('unattempted'); + expect((actual as Record)['dkg:securityProtocol']).toBe('PLAINTEXT'); + expect(actual).not.toHaveProperty('dkg:verifiedAt'); + }); + + it('emits verifiedAt as a typed xsd:dateTime literal', () => { + const actual = buildKafkaEndpointKnowledgeAsset({ + owner: '0xAbCDEFabcdefABCDEFabcdefABCDEFabcdefABCD', + broker: 'kafka.example.com:9092', + topic: 'orders.created', + messageFormat: 'application/json', + issuedAt: '2026-05-04T12:34:56.000Z', + verificationStatus: 'failed', + verifiedAt: '2026-05-04T12:35:00.000Z', + securityProtocol: 'SASL_PLAINTEXT', + }); + + expect((actual as Record)['dkg:verifiedAt']).toEqual({ + '@value': '2026-05-04T12:35:00.000Z', + '@type': 'xsd:dateTime', + }); + }); }); diff --git a/packages/kafka/test/kafka-probe.test.ts b/packages/kafka/test/kafka-probe.test.ts new file mode 100644 index 000000000..7be7892cf --- /dev/null +++ b/packages/kafka/test/kafka-probe.test.ts @@ -0,0 +1,752 @@ +import { mkdtemp, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import type { KafkaConfig, SASLOptions } from 'kafkajs'; + +interface CapturedAdmin { + config: KafkaConfig; + connect: ReturnType; + disconnect: ReturnType; + fetchTopicMetadata: ReturnType; +} + +const captured: { last: CapturedAdmin | null } = { last: null }; + +interface AdminBehavior { + connect?: () => Promise; + disconnect?: () => Promise; + fetchTopicMetadata?: ( + options: { topics: string[] }, + ) => Promise<{ topics: Array<{ name: string; partitions: unknown[] }> }>; +} + +let nextAdminBehavior: AdminBehavior = {}; + +vi.mock('kafkajs', async () => { + // We mock the entire kafkajs surface area we touch. Keep the mock dumb — + // any "smart" behavior here would mask bugs in `kafka-probe`. + return { + Kafka: class { + private readonly _config: KafkaConfig; + constructor(config: KafkaConfig) { + this._config = config; + } + admin() { + const behavior = nextAdminBehavior; + const admin: CapturedAdmin = { + config: this._config, + connect: vi.fn(behavior.connect ?? (async () => {})), + disconnect: vi.fn(behavior.disconnect ?? (async () => {})), + fetchTopicMetadata: vi.fn( + behavior.fetchTopicMetadata ?? + (async ({ topics }: { topics: string[] }) => ({ + topics: topics.map((name) => ({ name, partitions: [] })), + })), + ), + }; + captured.last = admin; + return admin; + } + }, + logLevel: { NOTHING: 0, ERROR: 1, WARN: 2, INFO: 4, DEBUG: 5 }, + }; +}); + +beforeEach(() => { + captured.last = null; + nextAdminBehavior = {}; +}); + +afterEach(() => { + vi.restoreAllMocks(); +}); + +async function importProbe() { + // Importing here, after the vi.mock above is registered, ensures the probe + // module sees the mocked kafkajs. + const mod = await import('../src/kafka-probe.js'); + return mod; +} + +describe('probe — auth-mode wiring', () => { + it('PLAINTEXT: ssl=false, no sasl', async () => { + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('verified'); + expect(captured.last!.config.ssl).toBe(false); + expect(captured.last!.config.sasl).toBeUndefined(); + }); + + it('SASL_PLAINTEXT: ssl=false, sasl with creds', async () => { + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_PLAINTEXT', + sasl: { mechanism: 'plain', username: 'alice', password: 'super-secret-1' }, + }); + expect(captured.last!.config.ssl).toBe(false); + const sasl = captured.last!.config.sasl as SASLOptions; + expect(sasl).toMatchObject({ + mechanism: 'plain', + username: 'alice', + password: 'super-secret-1', + }); + }); + + it('SASL_SSL: ssl with CA pem, sasl with creds', async () => { + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_SSL', + sasl: { mechanism: 'plain', username: 'alice', password: 'super-secret-2' }, + ssl: { caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----' }, + }); + const ssl = captured.last!.config.ssl as { ca?: string[]; rejectUnauthorized?: boolean }; + expect(ssl.rejectUnauthorized).toBe(true); + expect(ssl.ca).toEqual([ + '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + ]); + expect(captured.last!.config.sasl).toBeDefined(); + }); + + it('SSL (mTLS): cert + key flow into kafkajs config alongside CA', async () => { + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + certPem: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + keyPem: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + }, + }); + const ssl = captured.last!.config.ssl as { ca?: string[]; cert?: string; key?: string }; + expect(ssl.ca).toBeDefined(); + expect(ssl.cert).toContain('CERT'); + expect(ssl.key).toContain('KEY'); + expect(captured.last!.config.sasl).toBeUndefined(); + }); + + it('SSL (one-way TLS): CA-only succeeds; the kafkajs config carries the CA bundle and rejectUnauthorized', async () => { + // Real-world SSL deployments are commonly server-cert-only (CA in the + // trust store, no client cert/key). The probe must NOT force mTLS. + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----' }, + }); + expect(result.status).toBe('verified'); + const ssl = captured.last!.config.ssl as { + ca?: string[]; + cert?: string; + key?: string; + rejectUnauthorized?: boolean; + }; + expect(ssl.rejectUnauthorized).toBe(true); + expect(ssl.ca).toEqual([ + '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + ]); + expect(ssl.cert).toBeUndefined(); + expect(ssl.key).toBeUndefined(); + expect(captured.last!.config.sasl).toBeUndefined(); + }); + + it('SASL_PLAINTEXT without sasl creds throws', async () => { + const { probe } = await importProbe(); + await expect( + probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_PLAINTEXT', + }), + ).rejects.toThrow(/SASL credentials/); + }); +}); + +describe('probe — PEM filesystem escape hatch', () => { + let tmp = ''; + + beforeEach(async () => { + tmp = await mkdtemp(join(tmpdir(), 'kafka-probe-pem-')); + }); + + afterEach(async () => { + await rm(tmp, { recursive: true, force: true }); + }); + + it('reads CA pem from caPath when caPem is absent', async () => { + const caPath = join(tmp, 'ca.pem'); + await writeFile(caPath, '-----BEGIN CERTIFICATE-----\nFROM-DISK\n-----END CERTIFICATE-----'); + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_SSL', + sasl: { mechanism: 'plain', username: 'u', password: 'p-from-disk' }, + ssl: { caPath }, + }); + const ssl = captured.last!.config.ssl as { ca?: string[] }; + expect(ssl.ca?.[0]).toContain('FROM-DISK'); + }); + + it('reads cert/key pems from certPath/keyPath in mTLS mode', async () => { + const certPath = join(tmp, 'cert.pem'); + const keyPath = join(tmp, 'key.pem'); + await writeFile(certPath, '-----BEGIN CERTIFICATE-----\nDISK-CERT\n-----END CERTIFICATE-----'); + await writeFile(keyPath, '-----BEGIN PRIVATE KEY-----\nDISK-KEY\n-----END PRIVATE KEY-----'); + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { certPath, keyPath }, + }); + const ssl = captured.last!.config.ssl as { cert?: string; key?: string }; + expect(ssl.cert).toContain('DISK-CERT'); + expect(ssl.key).toContain('DISK-KEY'); + }); +}); + +describe('probe — outcomes', () => { + it('verified: topic present in cluster metadata', async () => { + nextAdminBehavior = { + fetchTopicMetadata: async ({ topics }) => ({ + topics: topics.map((name) => ({ name, partitions: [{}, {}] })), + }), + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('verified'); + expect(result.error).toBeUndefined(); + expect(Number.isNaN(Date.parse(result.probedAt))).toBe(false); + expect(result.securityProtocol).toBe('PLAINTEXT'); + }); + + it('failed: topic absent from cluster metadata', async () => { + nextAdminBehavior = { + fetchTopicMetadata: async () => ({ topics: [] }), + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('failed'); + expect(result.error).toMatch(/Topic "orders"/); + }); + + it('unreachable: connect throws (network error)', async () => { + nextAdminBehavior = { + connect: async () => { + const err = new Error('connect ECONNREFUSED 127.0.0.1:9092'); + (err as any).name = 'KafkaJSConnectionError'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('unreachable'); + expect(result.error).toBe('KafkaJSConnectionError'); + }); + + it('failed: KafkaJSSASLAuthenticationError thrown from connect → failed (NOT unreachable)', async () => { + // kafkajs surfaces SASL auth failures as a connect-time rejection. The + // probe must classify these as `failed` (auth/credential problem) — not + // `unreachable` (network/transport problem) — so operators are steered + // towards credential debugging, not network debugging. + nextAdminBehavior = { + connect: async () => { + const err = new Error('SASL Authentication failed for user'); + (err as any).name = 'KafkaJSSASLAuthenticationError'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_PLAINTEXT', + sasl: { mechanism: 'plain', username: 'alice', password: 'wrong-secret-zzz' }, + }); + expect(result.status).toBe('failed'); + expect(result.error).toBe('KafkaJSSASLAuthenticationError'); + // No credentials in the structured result. + const serialized = JSON.stringify(result); + expect(serialized).not.toContain('alice'); + expect(serialized).not.toContain('wrong-secret-zzz'); + }); + + it('failed: KafkaJSAuthenticationError (parent class) thrown from connect → failed', async () => { + // The parent kafkajs auth-error class. Anything inheriting from it is + // by definition an auth failure, even if the SASL-specific subclass is + // not what we got. + nextAdminBehavior = { + connect: async () => { + const err = new Error('Authentication failed'); + (err as any).name = 'KafkaJSAuthenticationError'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_PLAINTEXT', + sasl: { mechanism: 'plain', username: 'u', password: 'p' }, + }); + expect(result.status).toBe('failed'); + expect(result.error).toBe('KafkaJSAuthenticationError'); + }); + + it('unreachable: KafkaJSConnectionError thrown from connect stays unreachable', async () => { + // Network-class errors must not be reclassified as auth failures. The + // existing "unreachable: connect throws (network error)" test covers the + // KafkaJSConnectionError path generally; this guard is here so a future + // contributor cannot widen `isAuthErrorClass` and silently regress the + // network-failure mapping. + nextAdminBehavior = { + connect: async () => { + const err = new Error('connect ECONNREFUSED 127.0.0.1:9092'); + (err as any).name = 'KafkaJSConnectionError'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('unreachable'); + expect(result.error).toBe('KafkaJSConnectionError'); + }); + + it('unreachable: arbitrary connect-time errors (e.g. EAI_AGAIN) default to unreachable', async () => { + // Anything we cannot positively identify as auth must default to + // `unreachable`. EAI_AGAIN is a libc DNS-resolver retry signal that + // bubbles up as a non-kafkajs name; the probe should not pretend to know + // it's an auth failure. + nextAdminBehavior = { + connect: async () => { + const err = new Error('getaddrinfo EAI_AGAIN kafka.example.com'); + (err as any).name = 'EAI_AGAIN'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['kafka.example.com:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('unreachable'); + expect(result.error).toBe('EAI_AGAIN'); + }); + + it('failed: fetchTopicMetadata throws an Error → classified', async () => { + nextAdminBehavior = { + fetchTopicMetadata: async () => { + const err = new Error('UNKNOWN_TOPIC_OR_PARTITION'); + (err as any).name = 'KafkaJSProtocolError'; + throw err; + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('failed'); + expect(result.error).toBe('KafkaJSProtocolError'); + }); + + it('always disconnects, even on fetchTopicMetadata failure', async () => { + nextAdminBehavior = { + fetchTopicMetadata: async () => { + throw Object.assign(new Error('boom'), { name: 'KafkaJSConnectionError' }); + }, + }; + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(captured.last!.disconnect).toHaveBeenCalledTimes(1); + }); + + it('disconnect failure does not corrupt the probe result', async () => { + nextAdminBehavior = { + disconnect: async () => { + throw new Error('disconnect raced'); + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('verified'); + }); +}); + +describe('probe — credential discarding', () => { + it('ProbeResult does not echo SASL username or password under any outcome', async () => { + const cases: Array<{ behavior: AdminBehavior; expected: string }> = [ + { behavior: {}, expected: 'verified' }, + { + behavior: { + connect: async () => + Promise.reject(Object.assign(new Error('refused'), { name: 'KafkaJSConnectionError' })), + }, + expected: 'unreachable', + }, + { + behavior: { fetchTopicMetadata: async () => ({ topics: [] }) }, + expected: 'failed', + }, + ]; + + for (const { behavior, expected } of cases) { + nextAdminBehavior = behavior; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['kafka.local:9092'], + topic: 'orders', + securityProtocol: 'SASL_SSL', + sasl: { + mechanism: 'plain', + username: 'CRED-USER-MARKER', + password: 'CRED-PASS-MARKER', + }, + ssl: { + caPem: '-----BEGIN CERTIFICATE-----\nCA-PEM-MARKER\n-----END CERTIFICATE-----', + }, + }); + expect(result.status).toBe(expected); + const blob = JSON.stringify(result); + expect(blob).not.toContain('CRED-USER-MARKER'); + expect(blob).not.toContain('CRED-PASS-MARKER'); + expect(blob).not.toContain('CA-PEM-MARKER'); + } + }); +}); + +describe('probe — logging primitives never see credentials (regression guard)', () => { + // The probe production code today does NOT log anything itself — it relies + // on structured `ProbeResult` returns and never imports `Logger` or calls + // `console.*`. This test is defence-in-depth: if a future contributor adds + // `Logger.info(opts)` or `console.log(opts)` to the probe and accidentally + // hands raw credentials to a logging primitive, this assertion fails. The + // intent is "no credential leak through any logging primitive." + it('no credential substring appears in any captured Logger/console call', async () => { + const { Logger } = await import('@origintrail-official/dkg-core'); + + // Intercept at the prototype level so any Logger instance the probe might + // construct in the future is captured here. + const loggerSpies = [ + vi.spyOn(Logger.prototype, 'info').mockImplementation(() => {}), + vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {}), + vi.spyOn(Logger.prototype, 'error').mockImplementation(() => {}), + vi.spyOn(Logger.prototype, 'debug').mockImplementation(() => {}), + ]; + const consoleSpies = [ + vi.spyOn(console, 'log').mockImplementation(() => {}), + vi.spyOn(console, 'warn').mockImplementation(() => {}), + vi.spyOn(console, 'error').mockImplementation(() => {}), + vi.spyOn(console, 'debug').mockImplementation(() => {}), + ]; + + const SECRETS = [ + 'CRED-USER-LOG-MARKER', + 'CRED-PASS-LOG-MARKER', + 'CA-PEM-LOG-MARKER', + 'CERT-PEM-LOG-MARKER', + 'KEY-PEM-LOG-MARKER', + ] as const; + + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SASL_SSL', + sasl: { + mechanism: 'plain', + username: 'CRED-USER-LOG-MARKER', + password: 'CRED-PASS-LOG-MARKER', + }, + ssl: { + caPem: '-----BEGIN CERTIFICATE-----\nCA-PEM-LOG-MARKER\n-----END CERTIFICATE-----', + }, + }); + + // Also exercise the SSL/mTLS branch so cert+key PEMs flow through too. + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + caPem: '-----BEGIN CERTIFICATE-----\nCA-PEM-LOG-MARKER\n-----END CERTIFICATE-----', + certPem: '-----BEGIN CERTIFICATE-----\nCERT-PEM-LOG-MARKER\n-----END CERTIFICATE-----', + keyPem: '-----BEGIN PRIVATE KEY-----\nKEY-PEM-LOG-MARKER\n-----END PRIVATE KEY-----', + }, + }); + + // Stringify every captured argument across every spy and assert no secret + // ever made it into a logging primitive. The probe is supposed to log + // nothing today, so the typical case is "no calls at all" — but the + // assertion stays satisfied even if some non-credential debug log appears + // in the future. + const allCalls = [...loggerSpies, ...consoleSpies].flatMap((spy) => spy.mock.calls); + const blob = JSON.stringify(allCalls); + for (const secret of SECRETS) { + expect(blob).not.toContain(secret); + } + }); +}); + +describe('probe — timeout', () => { + it('returns failed when probeAdmin exceeds timeoutMs', async () => { + nextAdminBehavior = { + // Simulate a hung connect — never resolves until we abandon it. + connect: () => new Promise(() => {}), + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + timeoutMs: 50, + }); + // Timeout surfaces through the outer race as a generic Error → classified + // as 'Error' string. The probe itself never times out *as* unreachable; + // it bubbles a structured failure instead. + expect(['failed', 'unreachable']).toContain(result.status); + }, 1_000); +}); + +describe('probe — kafkajs config defaults', () => { + it('clientId defaults to dkg-kafka-probe and logLevel is NOTHING', async () => { + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(captured.last!.config.clientId).toBe('dkg-kafka-probe'); + expect(captured.last!.config.logLevel).toBe(0); + expect(captured.last!.config.retry).toEqual({ retries: 0 }); + }); + + it('clientId override is honored', async () => { + const { probe } = await importProbe(); + await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + clientId: 'custom-client', + }); + expect(captured.last!.config.clientId).toBe('custom-client'); + }); +}); + +describe('probe — buildKafkaConfig exhaustiveness guard', () => { + // The route is the only caller and validates `securityProtocol` before + // invoking the probe; the type system also narrows the switch arms via + // `never`. This test forces an unreachable arm by casting through the + // public input type, asserting the defensive throw fires and is not + // swallowed by the outer Promise.race / disconnect block. Driving this + // branch eliminates the last uncovered statements in `kafka-probe.ts` and + // guarantees a future contributor cannot silently delete the guard. + it('throws on an unrecognized securityProtocol value', async () => { + const { probe } = await importProbe(); + await expect( + probe({ + brokers: ['localhost:9092'], + topic: 'orders', + // Cast bypasses the type system to exercise the defensive default + // arm. Real callers can never reach this branch. + securityProtocol: 'ROT13' as unknown as 'PLAINTEXT', + }), + ).rejects.toThrow(/Unsupported securityProtocol: ROT13/); + }); +}); + +describe('probe — error classification branches', () => { + // `classifyError` returns a fixed dictionary keyed by `err.name`. Each arm + // strips an attacker-controllable error message down to a stable class name, + // so a caller that logs the result can never accidentally surface a + // credential substring. We exercise every named arm so the dictionary + // can't silently regress. + const ERROR_NAMES = [ + 'KafkaJSBrokerNotFound', + 'KafkaJSNumberOfRetriesExceeded', + 'KafkaJSRequestTimeoutError', + 'KafkaJSConnectionClosedError', + ] as const; + + for (const name of ERROR_NAMES) { + it(`classifies ${name} thrown from connect`, async () => { + nextAdminBehavior = { + connect: async () => { + throw Object.assign(new Error('inner-message-with-CRED-MARKER'), { name }); + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('unreachable'); + expect(result.error).toBe(name); + // Defence in depth: never echo the inner message. + expect(JSON.stringify(result)).not.toContain('CRED-MARKER'); + }); + } + + it('falls back to err.name when the error is not a known kafkajs class', async () => { + nextAdminBehavior = { + connect: async () => { + throw Object.assign(new Error('inner-message'), { name: 'TypeError' }); + }, + }; + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'PLAINTEXT', + }); + expect(result.status).toBe('unreachable'); + expect(result.error).toBe('TypeError'); + }); +}); + +describe('probe — SSL material defaults', () => { + it('SSL with no `ssl` block at all → falls back to {} and emits a TLS-only kafkajs config (rejectUnauthorized=true, no ca/cert/key)', async () => { + // Exercises the `ssl ?? {}` path in `buildSsl`. With no caller-supplied + // PEMs the probe still wires a TLS block — validation falls back to the + // host trust store (kafkajs default). This is a one-way-TLS handshake, + // not an mTLS handshake; brokers requiring mTLS will reject during the + // handshake and the failure surfaces as a structured probe outcome. + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + }); + expect(result.status).toBe('verified'); + const ssl = captured.last!.config.ssl as { + ca?: string[]; + cert?: string; + key?: string; + rejectUnauthorized?: boolean; + }; + expect(ssl.rejectUnauthorized).toBe(true); + expect(ssl.ca).toBeUndefined(); + expect(ssl.cert).toBeUndefined(); + expect(ssl.key).toBeUndefined(); + }); +}); + +describe('probe — SSL client cert/key XOR validation', () => { + // Half of an mTLS pair (cert without key, or key without cert) is a LOCAL + // input error: the caller intended mTLS but only supplied one half. The + // probe must throw so the route translates it to HTTP 400 (input + // validation), not let kafkajs fail later with a vague handshake error + // that gets mapped to 422 (probe failure). + + it('cert-only (no key) → throws an input error', async () => { + const { probe } = await importProbe(); + await expect( + probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + certPem: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + }, + }), + ).rejects.toThrow( + /SSL configuration requires both client cert and key together/, + ); + }); + + it('key-only (no cert) → throws an input error', async () => { + const { probe } = await importProbe(); + await expect( + probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + keyPem: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + }, + }), + ).rejects.toThrow( + /SSL configuration requires both client cert and key together/, + ); + }); + + it('CA-only (no client cert/key) → no throw (one-way TLS)', async () => { + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + caPem: '-----BEGIN CERTIFICATE-----\nCA\n-----END CERTIFICATE-----', + }, + }); + expect(result.status).toBe('verified'); + }); + + it('cert + key together → no throw (mTLS)', async () => { + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + ssl: { + certPem: '-----BEGIN CERTIFICATE-----\nCERT\n-----END CERTIFICATE-----', + keyPem: '-----BEGIN PRIVATE KEY-----\nKEY\n-----END PRIVATE KEY-----', + }, + }); + expect(result.status).toBe('verified'); + }); + + it('no ssl block at all → no throw (default trust store)', async () => { + // Already covered by the "SSL material defaults" describe above — this + // duplicate guard pins the contract here too: the cert/key XOR check + // must not trip on `ssl ?? {}` (both inputs absent is the legitimate + // one-way-TLS shape). + const { probe } = await importProbe(); + const result = await probe({ + brokers: ['localhost:9092'], + topic: 'orders', + securityProtocol: 'SSL', + }); + expect(result.status).toBe('verified'); + }); +}); diff --git a/packages/kafka/vitest.config.ts b/packages/kafka/vitest.config.ts index 2b68f1e59..6ce22f94b 100644 --- a/packages/kafka/vitest.config.ts +++ b/packages/kafka/vitest.config.ts @@ -8,6 +8,13 @@ export default defineConfig({ provider: 'v8', reporter: ['text', 'html', 'lcov', 'json-summary'], reportsDirectory: './coverage', + // Scope coverage to the package's production surface. Test helpers (which + // are wired up only when `DKG_KAFKA_INTEGRATION=1` and Docker is + // available) would otherwise drag the unit-test coverage numbers down. + // `src/index.ts` is a re-export barrel — it has no executable lines that + // unit tests can meaningfully credit, so it is excluded from the scope. + include: ['src/**'], + exclude: ['src/index.ts'], thresholds: kosavaKafkaCoverage, }, }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1ec1298b4..cde4461af 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -488,10 +488,23 @@ importers: version: 8.3.3(web-streams-polyfill@3.3.3) packages/kafka: + dependencies: + kafkajs: + specifier: 2.2.4 + version: 2.2.4 devDependencies: + '@origintrail-official/dkg-core': + specifier: workspace:* + version: link:../core + '@testcontainers/kafka': + specifier: 11.14.0 + version: 11.14.0 '@vitest/coverage-v8': specifier: ^4.0.18 version: 4.0.18(vitest@4.0.18(@types/node@22.19.11)(happy-dom@20.8.3(bufferutil@4.1.0)(utf-8-validate@5.0.10))(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) + testcontainers: + specifier: 11.14.0 + version: 11.14.0 vitest: specifier: ^4.0.18 version: 4.0.18(@types/node@22.19.11)(happy-dom@20.8.3(bufferutil@4.1.0)(utf-8-validate@5.0.10))(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) @@ -820,6 +833,9 @@ packages: resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} engines: {node: '>=6.9.0'} + '@balena/dockerignore@1.0.2': + resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} + '@bcoe/v8-coverage@1.0.2': resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} engines: {node: '>=18'} @@ -1287,6 +1303,20 @@ packages: '@ethersproject/wordlists@5.8.0': resolution: {integrity: sha512-2df9bbXicZws2Sb5S6ET493uJ0Z84Fjr3pC4tu/qlnZERibZCeUVuqdtt+7Tv9xxhUxHoIekIA7avrKUWHrezg==} + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.15': + resolution: {integrity: sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==} + engines: {node: '>=6'} + hasBin: true + + '@grpc/proto-loader@0.8.0': + resolution: {integrity: sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==} + engines: {node: '>=6'} + hasBin: true + '@hono/node-server@1.19.10': resolution: {integrity: sha512-hZ7nOssGqRgyV3FVVQdfi+U4q02uB23bpnYpdvNXkYTRRyWx84b7yf1ans+dnJ/7h41sGL3CeQTfO+ZGxuO+Iw==} engines: {node: '>=18.14.1'} @@ -1316,6 +1346,12 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@kwsites/file-exists@1.1.1': + resolution: {integrity: sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==} + '@leichtgewicht/ip-codec@2.0.5': resolution: {integrity: sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==} @@ -1836,6 +1872,9 @@ packages: '@protobufjs/codegen@2.0.4': resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + '@protobufjs/codegen@2.0.5': + resolution: {integrity: sha512-zgXFLzW3Ap33e6d0Wlj4MGIm6Ce8O89n/apUaGNB/jx+hw+ruWEp7EwGUshdLKVRCxZW12fp9r40E1mQrf/34g==} + '@protobufjs/eventemitter@1.1.0': resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} @@ -1848,6 +1887,9 @@ packages: '@protobufjs/inquire@1.1.0': resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + '@protobufjs/inquire@1.1.1': + resolution: {integrity: sha512-mnzgDV26ueAvk7rsbt9L7bE0SuAoqyuys/sMMrmVcN5x9VsxpcG3rqAUSgDyLp0UZlmNfIbQ4fHfCtreVBk8Ew==} + '@protobufjs/path@1.1.2': resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} @@ -1857,6 +1899,9 @@ packages: '@protobufjs/utf8@1.1.0': resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + '@protobufjs/utf8@1.1.1': + resolution: {integrity: sha512-oOAWABowe8EAbMyWKM0tYDKi8Yaox52D+HWZhAIJqQXbqe0xI/GV7FhLWqlEKreMkfDjshR5FKgi3mnle0h6Eg==} + '@rdfjs/types@2.0.1': resolution: {integrity: sha512-uyAzpugX7KekAXAHq26m3JlUIZJOC0uSBhpnefGV5i15bevDyyejoB7I+9MKeUrzXD8OOUI3+4FeV1wwQr5ihA==} @@ -2050,6 +2095,9 @@ packages: '@standard-schema/spec@1.1.0': resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + '@testcontainers/kafka@11.14.0': + resolution: {integrity: sha512-TwyI6t6zNmxA5CHnIeauMDiAw+O6eENBYaiNxx7hISvSEm54LuZYnwBxtJByhR6tBvWQTnhkZLfbck7vmw2x+Q==} + '@tsconfig/node10@1.0.12': resolution: {integrity: sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==} @@ -2146,6 +2194,12 @@ packages: '@types/dns-packet@5.6.5': resolution: {integrity: sha512-qXOC7XLOEe43ehtWJCMnQXvgcIpv6rPmQ1jXT98Ad8A3TB1Ue50jsCbSSSyuazScEuZ/Q026vHbrOTVkmwA+7Q==} + '@types/docker-modem@3.0.6': + resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} + + '@types/dockerode@4.0.1': + resolution: {integrity: sha512-cmUpB+dPN955PxBEuXE3f6lKO1hHiIGYJA46IVF3BJpNsZGvtBDcRnlrHYHtOH/B6vtDOyl2kZ2ShAu3mgc27Q==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} @@ -2165,6 +2219,9 @@ packages: '@types/n3@1.26.1': resolution: {integrity: sha512-TilYHzpU6ecXVJAbV+6o17Z8ZkWLWx6ZJD3IluaU4RiGHxqjU2or9fopxFHS6iXS6qcl5Mg1K3wSx9L8xxJaJQ==} + '@types/node@18.19.130': + resolution: {integrity: sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==} + '@types/node@22.19.11': resolution: {integrity: sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==} @@ -2191,6 +2248,15 @@ packages: '@types/secp256k1@4.0.7': resolution: {integrity: sha512-Rcvjl6vARGAKRO6jHeKMatGrvOMGrR/AR11N1x2LqintPCyDZ7NBhrh238Z2VZc7aM7KIwnFpFQ7fnfK4H/9Qw==} + '@types/ssh2-streams@0.1.13': + resolution: {integrity: sha512-faHyY3brO9oLEA0QlcO8N2wT7R0+1sHWZvQ+y3rMLwdY1ZyS1z0W3t65j9PqT4HmQ6ALzNe7RZlNuCNE0wBSWA==} + + '@types/ssh2@0.5.52': + resolution: {integrity: sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg==} + + '@types/ssh2@1.15.5': + resolution: {integrity: sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==} + '@types/stats.js@0.17.4': resolution: {integrity: sha512-jIBvWWShCvlBqBNIZt0KAshWpvSjhkwkEu4ZUcASoAvhmrgAUI2t1dXrjSL4xXVLB4FznPrIsX3nKXFl/Dt4vA==} @@ -2369,6 +2435,14 @@ packages: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} + archiver-utils@5.0.2: + resolution: {integrity: sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==} + engines: {node: '>= 14'} + + archiver@7.0.1: + resolution: {integrity: sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==} + engines: {node: '>= 14'} + arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} @@ -2390,6 +2464,9 @@ packages: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} + asn1@0.2.6: + resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} + assertion-error@1.1.0: resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} @@ -2403,9 +2480,15 @@ packages: ast-v8-to-istanbul@0.3.12: resolution: {integrity: sha512-BRRC8VRZY2R4Z4lFIL35MwNXmwVqBityvOIwETtsCSwvjl0IdgFsy9NhdaA6j74nUdtJJlIypeRhpDam19Wq3g==} + async-lock@1.4.1: + resolution: {integrity: sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==} + async@1.5.2: resolution: {integrity: sha512-nSVgobk4rv61R9PUSDtYt7mPVB2olxNR5RWJcAsH676/ef11bUZwvu7+RGYrYauVdDPcO519v68wRhXQtxsV9w==} + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} @@ -2420,10 +2503,59 @@ packages: axios@1.13.5: resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==} + b4a@1.8.1: + resolution: {integrity: sha512-aiqre1Nr0B/6DgE2N5vwTc+2/oQZ4Wh1t4NznYY4E00y8LCt6NqdRv81so00oo27D8MVKTpUa/MwUUtBLXCoDw==} + peerDependencies: + react-native-b4a: '*' + peerDependenciesMeta: + react-native-b4a: + optional: true + balanced-match@4.0.4: resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} engines: {node: 18 || 20 || >=22} + bare-events@2.8.2: + resolution: {integrity: sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==} + peerDependencies: + bare-abort-controller: '*' + peerDependenciesMeta: + bare-abort-controller: + optional: true + + bare-fs@4.7.1: + resolution: {integrity: sha512-WDRsyVN52eAx/lBamKD6uyw8H4228h/x0sGGGegOamM2cd7Pag88GfMQalobXI+HaEUxpCkbKQUDOQqt9wawRw==} + engines: {bare: '>=1.16.0'} + peerDependencies: + bare-buffer: '*' + peerDependenciesMeta: + bare-buffer: + optional: true + + bare-os@3.9.1: + resolution: {integrity: sha512-6M5XjcnsygQNPMCMPXSK379xrJFiZ/AEMNBmFEmQW8d/789VQATvriyi5r0HYTL9TkQ26rn3kgdTG3aisbrXkQ==} + engines: {bare: '>=1.14.0'} + + bare-path@3.0.0: + resolution: {integrity: sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==} + + bare-stream@2.13.1: + resolution: {integrity: sha512-Vp0cnjYyrEC4whYTymQ+YZi6pBpfiICZO3cfRG8sy67ZNWe951urv1x4eW1BKNngw3U+3fPYb5JQvHbCtxH7Ow==} + peerDependencies: + bare-abort-controller: '*' + bare-buffer: '*' + bare-events: '*' + peerDependenciesMeta: + bare-abort-controller: + optional: true + bare-buffer: + optional: true + bare-events: + optional: true + + bare-url@2.4.2: + resolution: {integrity: sha512-/9a2j4ac6ckpmAHvod/ob7x439OAHst/drc2Clnq+reRYd/ovddwcF4LfoxHyNk5AuGBnPg+HqFjmE/Zpq6v0A==} + base-x@3.0.11: resolution: {integrity: sha512-xz7wQ8xDhdyP7tQxwdteLYeFfS68tSMNCZ/Y37WJ4bhGfKPpqEIlmIyueQHqOyoPhE6xNUqjzRr8ra0eF9VRvA==} @@ -2435,6 +2567,9 @@ packages: engines: {node: '>=6.0.0'} hasBin: true + bcrypt-pbkdf@1.0.2: + resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} + bech32@1.1.4: resolution: {integrity: sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ==} @@ -2505,6 +2640,10 @@ packages: bs58check@2.1.2: resolution: {integrity: sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA==} + buffer-crc32@1.0.0: + resolution: {integrity: sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==} + engines: {node: '>=8.0.0'} + buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} @@ -2524,12 +2663,20 @@ packages: resolution: {integrity: sha512-ZMANVnAixE6AWWnPzlW2KpUrxhm9woycYvPOo67jWHyFowASTEd9s+QN1EIMsSDtwhIxN4sWE1jotpuDUIgyIw==} engines: {node: '>=6.14.2'} + buildcheck@0.0.7: + resolution: {integrity: sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==} + engines: {node: '>=10.0.0'} + bundle-require@5.1.0: resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.18' + byline@5.0.0: + resolution: {integrity: sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==} + engines: {node: '>=0.10.0'} + bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -2628,6 +2775,10 @@ packages: cliui@7.0.4: resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + clsx@2.1.1: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} @@ -2672,6 +2823,13 @@ packages: resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} engines: {node: '>= 12'} + compare-versions@6.1.1: + resolution: {integrity: sha512-4hm4VPpIecmlg59CHXnRDnqGplJFrbLG4aFEl5vl6cK1u76ws3LLvX7ikFnTDl5vo39sjWD6AaDPYodJp/NNHg==} + + compress-commons@6.0.2: + resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} + engines: {node: '>= 14'} + confbox@0.1.8: resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} @@ -2713,6 +2871,19 @@ packages: resolution: {integrity: sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==} engines: {node: '>= 0.10'} + cpu-features@0.0.10: + resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} + engines: {node: '>=10.0.0'} + + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + + crc32-stream@6.0.0: + resolution: {integrity: sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==} + engines: {node: '>= 14'} + create-hash@1.2.0: resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==} @@ -2917,6 +3088,18 @@ packages: resolution: {integrity: sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==} engines: {node: '>=6'} + docker-compose@1.4.2: + resolution: {integrity: sha512-rPHigTKGaEHpkUmfd69QgaOp+Os5vGJwG/Ry8lcr8W/382AmI+z/D7qoa9BybKIkqNppaIbs8RYeHSevdQjWww==} + engines: {node: '>= 6.0.0'} + + docker-modem@5.0.7: + resolution: {integrity: sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==} + engines: {node: '>= 8.0'} + + dockerode@4.0.12: + resolution: {integrity: sha512-/bCZd6KlGcjZO8Buqmi/vXuqEGVEZ0PNjx/biBNqJD3MhK9DmdiAuKxqfNhflgDESDIiBz3qF+0e55+CpnrUcw==} + engines: {node: '>= 8.0'} + dom-helpers@5.2.1: resolution: {integrity: sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==} @@ -3082,6 +3265,9 @@ packages: eventemitter3@5.0.4: resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==} + events-universal@1.0.1: + resolution: {integrity: sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==} + events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} @@ -3122,6 +3308,9 @@ packages: resolution: {integrity: sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==} engines: {node: '>=6.0.0'} + fast-fifo@1.3.2: + resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==} + fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -3282,6 +3471,10 @@ packages: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} + get-port@7.2.0: + resolution: {integrity: sha512-afP4W205ONCuMoPBqcR6PSXnzX35KTcJygfJfcp+QY+uwm3p20p1YczWXhlICIzGMCxYBQcySEcOgsJcrkyobg==} + engines: {node: '>=16'} + get-proto@1.0.1: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} @@ -3567,6 +3760,10 @@ packages: is-promise@4.0.0: resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + is-typed-array@1.1.15: resolution: {integrity: sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==} engines: {node: '>= 0.4'} @@ -3725,6 +3922,10 @@ packages: jsonschema@1.5.0: resolution: {integrity: sha512-K+A9hhqbn0f3pJX17Q/7H6yQfD/5OXgdrR5UE12gMXCiN9D5Xq2o5mddV2QEcX/bjla99ASsAAQUyMCCRWAEhw==} + kafkajs@2.2.4: + resolution: {integrity: sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==} + engines: {node: '>=14.0.0'} + kapsule@1.16.3: resolution: {integrity: sha512-4+5mNNf4vZDSwPhKprKwz3330iisPrb08JyMgbsdFrimBCKNHecua/WBwvVg3n7vwx0C1ARjfhwIpbrbd9n5wg==} engines: {node: '>=12'} @@ -3756,6 +3957,10 @@ packages: resolution: {integrity: sha512-CasD9OCEQSFIam2U8efFK81Yeg8vNMTBUqtMOHlrcWQHqUX3HeCl9Dr31u4toV7emlH8Mymk5+9p0lL6mKb/Xw==} engines: {node: '>=14.16'} + lazystream@1.0.1: + resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} + engines: {node: '>= 0.6.3'} + levn@0.3.0: resolution: {integrity: sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==} engines: {node: '>= 0.8.0'} @@ -3932,6 +4137,11 @@ packages: engines: {node: '>=10'} hasBin: true + mkdirp@3.0.1: + resolution: {integrity: sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==} + engines: {node: '>=10'} + hasBin: true + mlly@1.8.0: resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} @@ -3974,6 +4184,9 @@ packages: resolution: {integrity: sha512-Q6TPsTrlEoELXQ47tSBYcAZ800PQN9gtSImRUqQYoBq+Q7riIUAoDgf3tuMv6PuwonO86SBIx5GfOxvS4A/4kw==} engines: {node: '>=12.0'} + nan@2.26.2: + resolution: {integrity: sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==} + nanoid@3.3.11: resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -4283,6 +4496,17 @@ packages: prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + proper-lockfile@4.1.2: + resolution: {integrity: sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==} + + properties-reader@3.0.1: + resolution: {integrity: sha512-WPn+h9RGEExOKdu4bsF4HksG/uzd3cFq3MFtq8PsFeExPse5Ha/VOjQNyHhjboBFwGXGev6muJYTSPAOkROq2g==} + engines: {node: '>=18'} + + protobufjs@7.5.6: + resolution: {integrity: sha512-M71sTMB146U3u0di3yup8iM+zv8yPRNQVr1KK4tyBitl3qFvEGucq/rGDRShD2rsJhtN02RJaJ7j5X5hmy8SJg==} + engines: {node: '>=12.0.0'} + protobufjs@8.0.0: resolution: {integrity: sha512-jx6+sE9h/UryaCZhsJWbJtTEy47yXoGNYI4z8ZaRncM0zBKeRqjO2JEcOUYwrYGb1WLhXM1FfMzW3annvFv0rw==} engines: {node: '>=12.0.0'} @@ -4410,6 +4634,9 @@ packages: resolution: {integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + readdir-glob@1.1.3: + resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -4473,6 +4700,10 @@ packages: retimeable-signal@1.0.1: resolution: {integrity: sha512-Cy26CYfbWnYu8HMoJeDhaMpW/EYFIbne3vMf6G9RSrOyWYXbPehja/BEdzpqmM84uy2bfBD7NPZhoQ4GZEtgvg==} + retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -4603,6 +4834,9 @@ packages: siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -4654,9 +4888,19 @@ packages: resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} engines: {node: '>= 12'} + split-ca@1.0.1: + resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} + sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + ssh-remote-port-forward@1.0.4: + resolution: {integrity: sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ==} + + ssh2@1.17.0: + resolution: {integrity: sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==} + engines: {node: '>=10.16.0'} + stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -4671,6 +4915,9 @@ packages: std-env@3.10.0: resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + streamx@2.25.0: + resolution: {integrity: sha512-0nQuG6jf1w+wddNEEXCF4nTg3LtufWINB5eFEN+5TNZW7KWJp6x87+JFL43vaAUPyCfH1wID+mNVyW6OHtFamg==} + string-format@2.0.0: resolution: {integrity: sha512-bbEs3scLeYNXLecRRuk6uJxdXUSj6le/8rNPHChIJTn2V79aXVTR1EH2OH5zLKKoz0V02fOUKZZcw01pLUShZA==} @@ -4748,10 +4995,25 @@ packages: tar-fs@2.1.4: resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} + tar-fs@3.1.2: + resolution: {integrity: sha512-QGxxTxxyleAdyM3kpFs14ymbYmNFrfY+pHj7Z8FgtbZ7w2//VAgLMac7sT6nRpIHjppXO2AwwEOg0bPFVRcmXw==} + tar-stream@2.2.0: resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} engines: {node: '>=6'} + tar-stream@3.2.0: + resolution: {integrity: sha512-ojzvCvVaNp6aOTFmG7jaRD0meowIAuPc3cMMhSgKiVWws1GyHbGd/xvnyuRKcKlMpt3qvxx6r0hreCNITP9hIg==} + + teex@1.0.1: + resolution: {integrity: sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==} + + testcontainers@11.14.0: + resolution: {integrity: sha512-r9pniwv/iwzyHaI7gwAvAm4Y+IvjJg3vBWdjrUCaDMc2AXIr4jKbq7jJO18Mw2ybs73pZy1Aj7p/4RVBGMRWjg==} + + text-decoder@1.2.7: + resolution: {integrity: sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ==} + thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -4809,6 +5071,10 @@ packages: resolution: {integrity: sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==} engines: {node: '>=14.14'} + tmp@0.2.5: + resolution: {integrity: sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==} + engines: {node: '>=14.14'} + to-buffer@1.2.2: resolution: {integrity: sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw==} engines: {node: '>= 0.4'} @@ -4925,6 +5191,9 @@ packages: resolution: {integrity: sha512-OxbzDES66+x7nnKGg2MwBA1ypVsZoDTLHpeaP4giyiHSixbsiTaMyeJqbEyvBdp5Cm28fc+8GG6RdQtic0ijwQ==} hasBin: true + tweetnacl@0.14.5: + resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} + type-check@0.3.2: resolution: {integrity: sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==} engines: {node: '>= 0.8.0'} @@ -4999,6 +5268,9 @@ packages: resolution: {integrity: sha512-X2wH19RAPZE3+ldGicOkoj/SIA83OIxcJ6Cuaw23hf8Xc6fQpvZXY0SftE2JgS0QhYLUG4uwodSI3R53keyh7w==} engines: {node: '>=14'} + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@6.19.8: resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} @@ -5009,6 +5281,10 @@ packages: resolution: {integrity: sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==} engines: {node: '>=18.17'} + undici@7.25.0: + resolution: {integrity: sha512-xXnp4kTyor2Zq+J1FfPI6Eq3ew5h6Vl0F/8d9XU5zZQf1tX9s2Su1/3PiMmUANFULpmksxkClamIZcaUqryHsQ==} + engines: {node: '>=20.18.1'} + universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} @@ -5044,6 +5320,11 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + uuid@10.0.0: + resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} + deprecated: uuid@10 and below is no longer supported. For ESM codebases, update to uuid@latest. For CommonJS codebases, use uuid@11 (but be aware this version will likely be deprecated in 2028). + hasBin: true + uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true @@ -5341,6 +5622,10 @@ packages: resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} engines: {node: '>=10'} + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + yargs-unparser@2.0.0: resolution: {integrity: sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==} engines: {node: '>=10'} @@ -5349,6 +5634,10 @@ packages: resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} engines: {node: '>=10'} + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} @@ -5357,6 +5646,10 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + zip-stream@6.0.1: + resolution: {integrity: sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==} + engines: {node: '>= 14'} + zksync-ethers@5.11.1: resolution: {integrity: sha512-Znl2p0gporGnHbAO0KKM1TIQpyRQKCi8nf1kOlZuTVCvlgBwhweWjTy53le96ZOoR3J5LUXAk7aYil2czSLJZw==} engines: {node: '>=16.0.0'} @@ -5520,6 +5813,8 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 + '@balena/dockerignore@1.0.2': {} + '@bcoe/v8-coverage@1.0.2': {} '@chainsafe/as-chacha20poly1305@0.1.0': {} @@ -5998,6 +6293,25 @@ snapshots: '@ethersproject/properties': 5.8.0 '@ethersproject/strings': 5.8.0 + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.0 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.6 + yargs: 17.7.2 + + '@grpc/proto-loader@0.8.0': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.6 + yargs: 17.7.2 + '@hono/node-server@1.19.10(hono@4.12.4)': dependencies: hono: 4.12.4 @@ -6035,6 +6349,14 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-sdsl/ordered-map@4.4.2': {} + + '@kwsites/file-exists@1.1.1': + dependencies: + debug: 4.4.3(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + '@leichtgewicht/ip-codec@2.0.5': {} '@libp2p/autonat@3.0.18': @@ -6700,6 +7022,8 @@ snapshots: '@protobufjs/codegen@2.0.4': {} + '@protobufjs/codegen@2.0.5': {} + '@protobufjs/eventemitter@1.1.0': {} '@protobufjs/fetch@1.1.0': @@ -6711,12 +7035,16 @@ snapshots: '@protobufjs/inquire@1.1.0': {} + '@protobufjs/inquire@1.1.1': {} + '@protobufjs/path@1.1.2': {} '@protobufjs/pool@1.1.0': {} '@protobufjs/utf8@1.1.0': {} + '@protobufjs/utf8@1.1.1': {} + '@rdfjs/types@2.0.1': dependencies: '@types/node': 22.19.11 @@ -6890,6 +7218,16 @@ snapshots: '@standard-schema/spec@1.1.0': {} + '@testcontainers/kafka@11.14.0': + dependencies: + compare-versions: 6.1.1 + testcontainers: 11.14.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + - supports-color + '@tsconfig/node10@1.0.12': {} '@tsconfig/node12@1.0.11': {} @@ -6993,6 +7331,17 @@ snapshots: dependencies: '@types/node': 22.19.11 + '@types/docker-modem@3.0.6': + dependencies: + '@types/node': 22.19.11 + '@types/ssh2': 1.15.5 + + '@types/dockerode@4.0.1': + dependencies: + '@types/docker-modem': 3.0.6 + '@types/node': 22.19.11 + '@types/ssh2': 1.15.5 + '@types/estree@1.0.8': {} '@types/glob@7.2.0': @@ -7016,6 +7365,10 @@ snapshots: '@rdfjs/types': 2.0.1 '@types/node': 22.19.11 + '@types/node@18.19.130': + dependencies: + undici-types: 5.26.5 + '@types/node@22.19.11': dependencies: undici-types: 6.21.0 @@ -7044,6 +7397,19 @@ snapshots: dependencies: '@types/node': 22.19.11 + '@types/ssh2-streams@0.1.13': + dependencies: + '@types/node': 22.19.11 + + '@types/ssh2@0.5.52': + dependencies: + '@types/node': 22.19.11 + '@types/ssh2-streams': 0.1.13 + + '@types/ssh2@1.15.5': + dependencies: + '@types/node': 18.19.130 + '@types/stats.js@0.17.4': {} '@types/three@0.183.1': @@ -7225,6 +7591,30 @@ snapshots: normalize-path: 3.0.0 picomatch: 2.3.1 + archiver-utils@5.0.2: + dependencies: + glob: 10.5.0 + graceful-fs: 4.2.11 + is-stream: 2.0.1 + lazystream: 1.0.1 + lodash: 4.17.23 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + + archiver@7.0.1: + dependencies: + archiver-utils: 5.0.2 + async: 3.2.6 + buffer-crc32: 1.0.0 + readable-stream: 4.7.0 + readdir-glob: 1.1.3 + tar-stream: 3.2.0 + zip-stream: 6.0.1 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + arg@4.1.3: {} argparse@1.0.10: @@ -7239,6 +7629,10 @@ snapshots: array-union@2.1.0: {} + asn1@0.2.6: + dependencies: + safer-buffer: 2.1.2 + assertion-error@1.1.0: {} assertion-error@2.0.1: {} @@ -7262,8 +7656,12 @@ snapshots: estree-walker: 3.0.3 js-tokens: 10.0.0 + async-lock@1.4.1: {} + async@1.5.2: {} + async@3.2.6: {} + asynckit@0.4.0: {} at-least-node@1.0.0: {} @@ -7280,8 +7678,42 @@ snapshots: transitivePeerDependencies: - debug + b4a@1.8.1: {} + balanced-match@4.0.4: {} + bare-events@2.8.2: {} + + bare-fs@4.7.1: + dependencies: + bare-events: 2.8.2 + bare-path: 3.0.0 + bare-stream: 2.13.1(bare-events@2.8.2) + bare-url: 2.4.2 + fast-fifo: 1.3.2 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + + bare-os@3.9.1: {} + + bare-path@3.0.0: + dependencies: + bare-os: 3.9.1 + + bare-stream@2.13.1(bare-events@2.8.2): + dependencies: + streamx: 2.25.0 + teex: 1.0.1 + optionalDependencies: + bare-events: 2.8.2 + transitivePeerDependencies: + - react-native-b4a + + bare-url@2.4.2: + dependencies: + bare-path: 3.0.0 + base-x@3.0.11: dependencies: safe-buffer: 5.2.1 @@ -7290,6 +7722,10 @@ snapshots: baseline-browser-mapping@2.10.0: {} + bcrypt-pbkdf@1.0.2: + dependencies: + tweetnacl: 0.14.5 + bech32@1.1.4: {} better-sqlite3@11.10.0: @@ -7385,6 +7821,8 @@ snapshots: create-hash: 1.2.0 safe-buffer: 5.2.1 + buffer-crc32@1.0.0: {} + buffer-from@1.1.2: {} buffer-reverse@1.0.1: {} @@ -7406,11 +7844,16 @@ snapshots: node-gyp-build: 4.8.4 optional: true + buildcheck@0.0.7: + optional: true + bundle-require@5.1.0(esbuild@0.27.3): dependencies: esbuild: 0.27.3 load-tsconfig: 0.2.5 + byline@5.0.0: {} + bytes@3.1.2: {} cac@6.7.14: {} @@ -7520,6 +7963,12 @@ snapshots: strip-ansi: 6.0.1 wrap-ansi: 7.0.0 + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + clsx@2.1.1: {} color-convert@1.9.3: @@ -7560,6 +8009,16 @@ snapshots: commander@8.3.0: {} + compare-versions@6.1.1: {} + + compress-commons@6.0.2: + dependencies: + crc-32: 1.2.2 + crc32-stream: 6.0.0 + is-stream: 2.0.1 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + confbox@0.1.8: {} consola@3.4.2: {} @@ -7585,6 +8044,19 @@ snapshots: object-assign: 4.1.1 vary: 1.1.2 + cpu-features@0.0.10: + dependencies: + buildcheck: 0.0.7 + nan: 2.26.2 + optional: true + + crc-32@1.2.2: {} + + crc32-stream@6.0.0: + dependencies: + crc-32: 1.2.2 + readable-stream: 4.7.0 + create-hash@1.2.0: dependencies: cipher-base: 1.0.7 @@ -7789,6 +8261,31 @@ snapshots: dependencies: '@leichtgewicht/ip-codec': 2.0.5 + docker-compose@1.4.2: + dependencies: + yaml: 2.8.3 + + docker-modem@5.0.7: + dependencies: + debug: 4.4.3(supports-color@8.1.1) + readable-stream: 3.6.2 + split-ca: 1.0.1 + ssh2: 1.17.0 + transitivePeerDependencies: + - supports-color + + dockerode@4.0.12: + dependencies: + '@balena/dockerignore': 1.0.2 + '@grpc/grpc-js': 1.14.3 + '@grpc/proto-loader': 0.7.15 + docker-modem: 5.0.7 + protobufjs: 7.5.6 + tar-fs: 2.1.4 + uuid: 10.0.0 + transitivePeerDependencies: + - supports-color + dom-helpers@5.2.1: dependencies: '@babel/runtime': 7.28.6 @@ -8052,6 +8549,12 @@ snapshots: eventemitter3@5.0.4: {} + events-universal@1.0.1: + dependencies: + bare-events: 2.8.2 + transitivePeerDependencies: + - bare-abort-controller + events@3.3.0: {} eventsource-parser@3.0.6: {} @@ -8111,6 +8614,8 @@ snapshots: fast-equals@5.4.0: {} + fast-fifo@1.3.2: {} + fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -8299,6 +8804,8 @@ snapshots: hasown: 2.0.2 math-intrinsics: 1.1.0 + get-port@7.2.0: {} + get-proto@1.0.1: dependencies: dunder-proto: 1.0.1 @@ -8680,6 +9187,8 @@ snapshots: is-promise@4.0.0: {} + is-stream@2.0.1: {} + is-typed-array@1.1.15: dependencies: which-typed-array: 1.1.20 @@ -8840,6 +9349,8 @@ snapshots: jsonschema@1.5.0: {} + kafkajs@2.2.4: {} + kapsule@1.16.3: dependencies: lodash-es: 4.17.23 @@ -8882,6 +9393,10 @@ snapshots: ky@0.33.3: {} + lazystream@1.0.1: + dependencies: + readable-stream: 2.3.8 + levn@0.3.0: dependencies: prelude-ls: 1.1.2 @@ -9063,6 +9578,8 @@ snapshots: mkdirp@1.0.4: {} + mkdirp@3.0.1: {} + mlly@1.8.0: dependencies: acorn: 8.16.0 @@ -9136,6 +9653,9 @@ snapshots: buffer: 6.0.3 readable-stream: 4.7.0 + nan@2.26.2: + optional: true + nanoid@3.3.11: {} nanoid@5.1.6: {} @@ -9443,6 +9963,34 @@ snapshots: object-assign: 4.1.1 react-is: 16.13.1 + proper-lockfile@4.1.2: + dependencies: + graceful-fs: 4.2.11 + retry: 0.12.0 + signal-exit: 3.0.7 + + properties-reader@3.0.1: + dependencies: + '@kwsites/file-exists': 1.1.1 + mkdirp: 3.0.1 + transitivePeerDependencies: + - supports-color + + protobufjs@7.5.6: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.5 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.1 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.1 + '@types/node': 22.19.11 + long: 5.3.2 + protobufjs@8.0.0: dependencies: '@protobufjs/aspromise': 1.1.2 @@ -9603,6 +10151,10 @@ snapshots: process: 0.11.10 string_decoder: 1.3.0 + readdir-glob@1.1.3: + dependencies: + minimatch: 10.2.3 + readdirp@3.6.0: dependencies: picomatch: 2.3.1 @@ -9660,6 +10212,8 @@ snapshots: retimeable-signal@1.0.1: {} + retry@0.12.0: {} + reusify@1.1.0: {} rimraf@2.7.1: @@ -9854,6 +10408,8 @@ snapshots: siginfo@2.0.0: {} + signal-exit@3.0.7: {} + signal-exit@4.1.0: {} simple-concat@1.0.1: {} @@ -9921,8 +10477,23 @@ snapshots: source-map@0.7.6: {} + split-ca@1.0.1: {} + sprintf-js@1.0.3: {} + ssh-remote-port-forward@1.0.4: + dependencies: + '@types/ssh2': 0.5.52 + ssh2: 1.17.0 + + ssh2@1.17.0: + dependencies: + asn1: 0.2.6 + bcrypt-pbkdf: 1.0.2 + optionalDependencies: + cpu-features: 0.0.10 + nan: 2.26.2 + stackback@0.0.2: {} stacktrace-parser@0.1.11: @@ -9933,6 +10504,15 @@ snapshots: std-env@3.10.0: {} + streamx@2.25.0: + dependencies: + events-universal: 1.0.1 + fast-fifo: 1.3.2 + text-decoder: 1.2.7 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + string-format@2.0.0: {} string-width@4.2.3: @@ -10017,6 +10597,18 @@ snapshots: pump: 3.0.3 tar-stream: 2.2.0 + tar-fs@3.1.2: + dependencies: + pump: 3.0.3 + tar-stream: 3.2.0 + optionalDependencies: + bare-fs: 4.7.1 + bare-path: 3.0.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + tar-stream@2.2.0: dependencies: bl: 4.1.0 @@ -10025,6 +10617,53 @@ snapshots: inherits: 2.0.4 readable-stream: 3.6.2 + tar-stream@3.2.0: + dependencies: + b4a: 1.8.1 + bare-fs: 4.7.1 + fast-fifo: 1.3.2 + streamx: 2.25.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + + teex@1.0.1: + dependencies: + streamx: 2.25.0 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + + testcontainers@11.14.0: + dependencies: + '@balena/dockerignore': 1.0.2 + '@types/dockerode': 4.0.1 + archiver: 7.0.1 + async-lock: 1.4.1 + byline: 5.0.0 + debug: 4.4.3(supports-color@8.1.1) + docker-compose: 1.4.2 + dockerode: 4.0.12 + get-port: 7.2.0 + proper-lockfile: 4.1.2 + properties-reader: 3.0.1 + ssh-remote-port-forward: 1.0.4 + tar-fs: 3.1.2 + tmp: 0.2.5 + undici: 7.25.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + - supports-color + + text-decoder@1.2.7: + dependencies: + b4a: 1.8.1 + transitivePeerDependencies: + - react-native-b4a + thenify-all@1.6.0: dependencies: thenify: 3.3.1 @@ -10084,6 +10723,8 @@ snapshots: tmp@0.2.4: {} + tmp@0.2.5: {} + to-buffer@1.2.2: dependencies: isarray: 2.0.5 @@ -10203,6 +10844,8 @@ snapshots: turbo-windows-64: 2.8.10 turbo-windows-arm64: 2.8.10 + tweetnacl@0.14.5: {} + type-check@0.3.2: dependencies: prelude-ls: 1.1.2 @@ -10277,12 +10920,16 @@ snapshots: unbash@2.2.0: {} + undici-types@5.26.5: {} + undici-types@6.19.8: {} undici-types@6.21.0: {} undici@6.23.0: {} + undici@7.25.0: {} + universalify@0.1.2: {} universalify@2.0.1: {} @@ -10308,6 +10955,8 @@ snapshots: util-deprecate@1.0.2: {} + uuid@10.0.0: {} + uuid@8.3.2: {} v8-compile-cache-lib@3.0.1: {} @@ -10536,6 +11185,8 @@ snapshots: yargs-parser@20.2.9: {} + yargs-parser@21.1.1: {} + yargs-unparser@2.0.0: dependencies: camelcase: 6.3.0 @@ -10553,10 +11204,26 @@ snapshots: y18n: 5.0.8 yargs-parser: 20.2.9 + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + yn@3.1.1: {} yocto-queue@0.1.0: {} + zip-stream@6.0.1: + dependencies: + archiver-utils: 5.0.2 + compress-commons: 6.0.2 + readable-stream: 4.7.0 + zksync-ethers@5.11.1(ethers@5.8.0(bufferutil@4.1.0)(utf-8-validate@5.0.10)): dependencies: ethers: 5.8.0(bufferutil@4.1.0)(utf-8-validate@5.0.10) diff --git a/vitest.coverage.ts b/vitest.coverage.ts index 7123b7e22..5f5e9924c 100644 --- a/vitest.coverage.ts +++ b/vitest.coverage.ts @@ -160,9 +160,16 @@ export const kosavaEpcisCoverage: CoverageThresholds = { }; export const kosavaKafkaCoverage: CoverageThresholds = { + // Slice 04 ratchet, scoped to `src/**` (excluding the `src/index.ts` + // re-export barrel). All lines, statements, and functions are covered. The + // last two uncovered branches are micro-defensive paths: the + // `(err as { name?: string } | null)?.name ?? 'Error'` null-guard in + // `classifyError` (kafkajs never throws `null`) and the `if (timer)` clear + // in `runWithTimeout` (the timer is always assigned synchronously). 96 is + // the floor; current actual is 97.36. lines: 100, functions: 100, - branches: 50, + branches: 96, statements: 100, };