From ca4777d8b4a931759fa8333e209a833cfdc5717f Mon Sep 17 00:00:00 2001 From: stack72 Date: Tue, 5 May 2026 00:25:12 +0100 Subject: [PATCH] refactor(persistence): extract LockfileRepository (W2 prequel for swamp-club#231) (swamp-club#233) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Pure persistence-layer refactor that consolidates the scattered upstream_extensions.json read/write surface into a single `LockfileRepository`. No user-visible CLI behavior changes; lockfile JSON shape is byte-identical pre/post. Filed and lifecycled separately as a refactor-prequel for W2 (swamp-club#231) per its Phase A audit. The pre-committed mechanical threshold rule (>7 callsites → SPLIT) chose this carve-out: ~160 LOC new + 16 callsites touched. ## What ships - `src/infrastructure/persistence/lockfile_repository.ts` — sole gateway for upstream_extensions.json. ASYMMETRIC semantics: - Reads serve from a snapshot captured at construction time (preserves W1b's "snapshot frozen at construction" race-window contract documented in `extension_repository.ts:88-95`). - Writes acquire the advisory file lock, re-read disk, merge, atomic- write, release, and update the local cache. Two concurrent writers don't clobber each other. - 12 unit tests including a cross-instance snapshot-divergence regression and concurrent-writer contention. - `ExtensionRepository` constructor migrated to take `LockfileRepository` (replaces the W1b `getLockedVersion` closure). All 5 construction sites + the stub helper move in the same commit. - 9 `installExtension` callsites and 16 lockfile reader files migrated to the repository surface. - Duplicate `acquireLock` and `removeUpstreamExtension` helpers in pull.ts and rm.ts deleted (consolidated into the repository). - Three DI surfaces (`doctor.ts`, `list.ts`, `update.ts`) take `LockfileRepository` directly instead of function-shape DIs. - `emptyLockedVersionLookup` deleted (zero callers verified). ## Why split from W2 W2 lifecycle services (Install/Remove/Upgrade) need to write the lockfile as part of their unit-of-work; W1b only exposed read access. Bundling this refactor with W2's ~1300 LOC of new service code would have pushed the W2 PR over the auto-ship-on-merge soak threshold. Splitting matches the W1a → W1b precedent (each ships and soaks before the next builds on it). ## Test plan - [x] deno check, deno lint, deno fmt, deno run test (5397 passed, 0 failed) - [x] deno run compile (binary built) - [x] Author smoke against a real repo: pull + rm round-trip, list (log + JSON), doctor extensions, model type search, sequential pulls (writeEntry re-read-under-lock), extension update --check. All green. - [x] Cross-process concurrency: 50 concurrent Deno subprocesses writing distinct entries to the same lockfile, repeated 3 times. All 150 operations completed; no failures, no leftover .lock files, no structural corruption (~600-800ms per 50-worker run). - [ ] Reviewer smoke on a different repo - [ ] ~2-day diversity-matrix soak (Linux + macOS, mixed local/pulled, version transitions). REQUIRED before merge per the prequel- specific gate from swamp-club#231 Phase A. ## Forward-only revert posture No schema changes; no migration. Lockfile JSON shape is unchanged so a binary downgrade reads the same file back. Reverting is purely a code revert. ## Out of scope - W2 lifecycle services (swamp-club#231) — depends on this landing. - Cross-platform lockfile concurrency improvements — W3 territory. - ADV-13 test-seam audit (SQLite-commit-vs-lockfile-write fault injection) — stays in W2's plan v4 step 10; this prequel doesn't introduce that boundary. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/cli/auto_resolver_adapters.ts | 14 +- src/cli/auto_resolver_adapters_test.ts | 6 +- src/cli/commands/doctor_extensions.ts | 9 +- src/cli/commands/extension_outdated.ts | 2 +- src/cli/commands/extension_pull.ts | 17 +- src/cli/commands/extension_pull_test.ts | 42 ++- src/cli/commands/extension_rm.ts | 2 +- src/cli/commands/extension_rm_test.ts | 56 ++-- src/cli/commands/extension_search.ts | 9 +- src/cli/commands/extension_update.ts | 8 +- src/cli/commands/open.ts | 9 +- src/cli/create_extension_install_deps.ts | 5 +- src/cli/mod.ts | 19 +- src/cli/resolve_datastore.ts | 13 +- .../datastore/user_datastore_loader_test.ts | 6 +- src/domain/drivers/user_driver_loader_test.ts | 6 +- src/domain/models/user_model_loader_test.ts | 6 +- src/domain/reports/user_report_loader_test.ts | 6 +- src/domain/vaults/user_vault_loader_test.ts | 6 +- .../persistence/extension_repository.ts | 42 ++- .../persistence/extension_repository_test.ts | 13 +- .../persistence/lockfile_repository.ts | 213 ++++++++++++++ .../persistence/lockfile_repository_test.ts | 256 +++++++++++++++++ .../test_helpers/stub_extension_repository.ts | 38 ++- src/libswamp/extensions/doctor.ts | 10 +- src/libswamp/extensions/doctor_test.ts | 26 +- src/libswamp/extensions/enumerate_pulled.ts | 5 +- src/libswamp/extensions/install.ts | 18 +- src/libswamp/extensions/install_test.ts | 23 +- src/libswamp/extensions/layout.ts | 5 +- src/libswamp/extensions/list.ts | 19 +- src/libswamp/extensions/list_test.ts | 32 +-- src/libswamp/extensions/local_edits.ts | 6 +- src/libswamp/extensions/pull.ts | 232 ++++----------- src/libswamp/extensions/pull_test.ts | 9 +- src/libswamp/extensions/rm.ts | 126 ++------ src/libswamp/extensions/rm_test.ts | 271 +++++++++++------- src/libswamp/extensions/update.ts | 20 +- src/libswamp/extensions/update_test.ts | 61 ++-- src/libswamp/mod.ts | 5 +- 40 files changed, 1051 insertions(+), 620 deletions(-) create mode 100644 src/infrastructure/persistence/lockfile_repository.ts create mode 100644 src/infrastructure/persistence/lockfile_repository_test.ts diff --git a/src/cli/auto_resolver_adapters.ts b/src/cli/auto_resolver_adapters.ts index 76955c16..3249aa0e 100644 --- a/src/cli/auto_resolver_adapters.ts +++ b/src/cli/auto_resolver_adapters.ts @@ -22,7 +22,6 @@ import { SWAMP_SUBDIRS, swampPath, } from "../infrastructure/persistence/paths.ts"; -import { readUpstreamExtensions } from "../infrastructure/persistence/upstream_extensions.ts"; import type { DenoRuntime } from "../domain/runtime/deno_runtime.ts"; import { join } from "@std/path"; import type { @@ -35,6 +34,7 @@ import { enumeratePulledExtensionDirs, type ExtensionRegistryInfo, installExtension, + LockfileRepository, } from "../libswamp/mod.ts"; import { UserModelLoader } from "../domain/models/user_model_loader.ts"; import { UserVaultLoader } from "../domain/vaults/user_vault_loader.ts"; @@ -136,8 +136,8 @@ export function createAutoResolveInstallerAdapter( // output, not source. Clearing the bundle cache (a normal hygiene // operation) must not flip the inspection to truncated and steal // the user-WIP path from issue #121. - const upstream = await readUpstreamExtensions(lockfilePath); - const entry = upstream[extensionName]; + const inspectLockfileRepo = await LockfileRepository.create(lockfilePath); + const entry = inspectLockfileRepo.getEntry(extensionName); if (!entry) return { state: "missing" }; const path = swampPath(repoDir, "pulled-extensions", extensionName); try { @@ -176,6 +176,12 @@ export function createAutoResolveInstallerAdapter( // resolver cannot cover (e.g. two types resolving the same // extension concurrently). try { + // Construct a fresh LockfileRepository per install to capture a + // current snapshot — the InstallContext is single-use per its + // JSDoc. + const lockfileRepository = await LockfileRepository.create( + lockfilePath, + ); const result = await installExtension( { name: extensionName, version: null }, { @@ -183,7 +189,7 @@ export function createAutoResolveInstallerAdapter( downloadArchive, getChecksum, logger, - lockfilePath, + lockfileRepository, skillsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledSkills), repoDir, force: false, diff --git a/src/cli/auto_resolver_adapters_test.ts b/src/cli/auto_resolver_adapters_test.ts index 604d7931..39d1d4de 100644 --- a/src/cli/auto_resolver_adapters_test.ts +++ b/src/cli/auto_resolver_adapters_test.ts @@ -24,6 +24,7 @@ import { createAutoResolveInstallerAdapter } from "./auto_resolver_adapters.ts"; import type { DenoRuntime } from "../domain/runtime/deno_runtime.ts"; import { ExtensionCatalogStore } from "../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../infrastructure/persistence/lockfile_repository.ts"; import { modelRegistry } from "../domain/models/model.ts"; import { ModelType } from "../domain/models/model_type.ts"; import type { ModelDefinition } from "../domain/models/model.ts"; @@ -56,7 +57,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/cli/commands/doctor_extensions.ts b/src/cli/commands/doctor_extensions.ts index 6be3bcd8..c2916ff3 100644 --- a/src/cli/commands/doctor_extensions.ts +++ b/src/cli/commands/doctor_extensions.ts @@ -48,7 +48,6 @@ import { getExtensionLoadWarnings, resetExtensionLoadWarnings, } from "../../infrastructure/logging/extension_load_warnings.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; import { modelRegistry } from "../../domain/models/model.ts"; import { vaultTypeRegistry } from "../../domain/vaults/vault_type_registry.ts"; import { driverTypeRegistry } from "../../domain/drivers/driver_type_registry.ts"; @@ -56,6 +55,7 @@ import { datastoreTypeRegistry } from "../../domain/datastore/datastore_type_reg import { reportRegistry } from "../../domain/reports/report_registry.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { swampPath } from "../../infrastructure/persistence/paths.ts"; import { createDoctorExtensionsRenderer } from "../../presentation/renderers/doctor_extensions.ts"; import { @@ -119,12 +119,12 @@ export const doctorExtensionsCommand = new Command() // command in the same repo. // W1b: forceCatalogRescan(repoDir) → repository.invalidateAll(). try { - const upstream = await readUpstreamExtensions(lockfilePath); + const lockfileRepository = await LockfileRepository.create(lockfilePath); const rescanRepo = new ExtensionRepository({ catalog: new ExtensionCatalogStore( swampPath(repoDir, "_extension_catalog.db"), ), - getLockedVersion: (name) => upstream[name]?.version ?? null, + lockfileRepository, repoRoot: repoDir, }); try { @@ -177,12 +177,13 @@ export const doctorExtensionsCommand = new Command() const controller = new AbortController(); const renderer = createDoctorExtensionsRenderer(cliCtx.outputMode); + const doctorLockfileRepo = await LockfileRepository.create(lockfilePath); await consumeStream( doctorExtensions({ registries, getWarnings: getExtensionLoadWarnings, resetState: resetExtensionLoadWarnings, - readUpstreamExtensions: () => readUpstreamExtensions(lockfilePath), + lockfileRepository: doctorLockfileRepo, repoDir, skillsDir: repoRelativeSkillsDir, abortSignal: controller.signal, diff --git a/src/cli/commands/extension_outdated.ts b/src/cli/commands/extension_outdated.ts index 706b80e9..8c263972 100644 --- a/src/cli/commands/extension_outdated.ts +++ b/src/cli/commands/extension_outdated.ts @@ -118,7 +118,7 @@ export const extensionOutdatedCommand = new Command() const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); const ctx = createLibSwampContext({ logger: cliCtx.logger }); - const deps = createExtensionUpdateDeps({ + const deps = await createExtensionUpdateDeps({ lockfilePath, serverUrl: resolveServerUrl(), // outdated is read-only — installation is wired but never invoked diff --git a/src/cli/commands/extension_pull.ts b/src/cli/commands/extension_pull.ts index 98941884..982d61ce 100644 --- a/src/cli/commands/extension_pull.ts +++ b/src/cli/commands/extension_pull.ts @@ -42,6 +42,7 @@ import { extensionPull, type ExtensionPullDeps, type ExtensionRegistryInfo, + type LockfileRepository, parseExtensionRef, resolveServerUrl, validateExtensionName, @@ -62,9 +63,8 @@ export { type InstallContext, installExtension, type InstallResult, + LockfileRepository, parseExtensionRef, - removeUpstreamExtension, - updateUpstreamExtensions, validateExtensionName, } from "../../libswamp/mod.ts"; @@ -91,8 +91,11 @@ export interface PullContext { downloadArchive: (name: string, version: string) => Promise; getChecksum: (name: string, version: string) => Promise; logger: Logger; - /** Full path to the upstream_extensions.json lockfile. */ - lockfilePath: string; + /** + * Lockfile repository owning read+write of upstream_extensions.json. + * Captures a snapshot at construction; construct fresh per pull. + */ + lockfileRepository: LockfileRepository; /** Tool-aware skills destination (e.g. `.claude/skills/`). */ skillsDir: string; repoDir: string; @@ -115,7 +118,7 @@ export async function pullExtension( getExtension: ctx.getExtension, downloadArchive: ctx.downloadArchive, getChecksum: ctx.getChecksum, - lockfilePath: ctx.lockfilePath, + lockfileRepository: ctx.lockfileRepository, skillsDir: ctx.skillsDir, repoDir: ctx.repoDir, alreadyPulled: ctx.alreadyPulled, @@ -208,7 +211,7 @@ export const extensionPullCommand = new Command() // 7. Create deps via factory and pull const serverUrl = resolveServerUrl(); - const deps = createExtensionPullDeps( + const deps = await createExtensionPullDeps( serverUrl, lockfilePath, skillsDir, @@ -220,7 +223,7 @@ export const extensionPullCommand = new Command() downloadArchive: deps.downloadArchive, getChecksum: deps.getChecksum, logger: ctx.logger, - lockfilePath, + lockfileRepository: deps.lockfileRepository, skillsDir, repoDir, force: options.force ?? false, diff --git a/src/cli/commands/extension_pull_test.ts b/src/cli/commands/extension_pull_test.ts index 3415cd20..467d15df 100644 --- a/src/cli/commands/extension_pull_test.ts +++ b/src/cli/commands/extension_pull_test.ts @@ -21,8 +21,8 @@ import { assertEquals, assertThrows } from "@std/assert"; import { assertStringIncludes } from "@std/assert/string-includes"; import { detectConflicts, + LockfileRepository, parseExtensionRef, - updateUpstreamExtensions, } from "./extension_pull.ts"; import type { UpstreamExtensionEntry } from "../../infrastructure/persistence/upstream_extensions.ts"; import { UserError } from "../../domain/errors.ts"; @@ -56,7 +56,7 @@ Deno.test("parseExtensionRef throws on empty version after @", () => { assertStringIncludes(error.message, "Version cannot be empty"); }); -Deno.test("updateUpstreamExtensions persists files array", async () => { +Deno.test("LockfileRepository.writeEntry persists files array", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); @@ -64,11 +64,10 @@ Deno.test("updateUpstreamExtensions persists files array", async () => { "extensions/models/foo/bar.yaml", "extensions/models/foo/baz.ts", ]; - await updateUpstreamExtensions(lockfilePath, "@test/ext", "1.0.0", files); + const repo = await LockfileRepository.create(lockfilePath); + await repo.writeEntry("@test/ext", "1.0.0", files); - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(data["@test/ext"].version, "1.0.0"); @@ -79,22 +78,18 @@ Deno.test("updateUpstreamExtensions persists files array", async () => { } }); -Deno.test("updateUpstreamExtensions preserves existing entries", async () => { +Deno.test("LockfileRepository.writeEntry preserves existing entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - // Write first extension - await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ - "a.yaml", - ]); - // Write second extension - await updateUpstreamExtensions(lockfilePath, "@test/second", "2.0.0", [ - "b.yaml", - ]); - - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const repoFirst = await LockfileRepository.create(lockfilePath); + await repoFirst.writeEntry("@test/first", "1.0.0", ["a.yaml"]); + // Sibling instance simulates a second process; re-reads disk under + // lock so the merged write picks up the prior entry. + const repoSecond = await LockfileRepository.create(lockfilePath); + await repoSecond.writeEntry("@test/second", "2.0.0", ["b.yaml"]); + + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(data["@test/first"].version, "1.0.0"); @@ -106,15 +101,14 @@ Deno.test("updateUpstreamExtensions preserves existing entries", async () => { } }); -Deno.test("updateUpstreamExtensions handles empty files array", async () => { +Deno.test("LockfileRepository.writeEntry handles empty files array", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - await updateUpstreamExtensions(lockfilePath, "@test/empty", "1.0.0", []); + const repo = await LockfileRepository.create(lockfilePath); + await repo.writeEntry("@test/empty", "1.0.0", []); - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(data["@test/empty"].files, []); diff --git a/src/cli/commands/extension_rm.ts b/src/cli/commands/extension_rm.ts index e4cde303..8c30e5c0 100644 --- a/src/cli/commands/extension_rm.ts +++ b/src/cli/commands/extension_rm.ts @@ -105,7 +105,7 @@ export const extensionRemoveCommand = new Command() // Create libswamp context, deps, renderer const libCtx = createLibSwampContext({ logger: ctx.logger }); - const deps = createExtensionRmDeps(repoDir, lockfilePath); + const deps = await createExtensionRmDeps(repoDir, lockfilePath); const renderer = createExtensionRmRenderer(ctx.outputMode); const input = { extensionName: ref.name }; diff --git a/src/cli/commands/extension_rm_test.ts b/src/cli/commands/extension_rm_test.ts index c5f5aded..94e527fe 100644 --- a/src/cli/commands/extension_rm_test.ts +++ b/src/cli/commands/extension_rm_test.ts @@ -19,33 +19,26 @@ import { assertEquals } from "@std/assert"; import { join } from "@std/path"; -import { - removeUpstreamExtension, - updateUpstreamExtensions, -} from "./extension_pull.ts"; +import { LockfileRepository } from "./extension_pull.ts"; import { readUpstreamExtensions, type UpstreamExtensionEntry, } from "../../infrastructure/persistence/upstream_extensions.ts"; -Deno.test("removeUpstreamExtension removes entry and preserves others", async () => { +Deno.test("LockfileRepository.removeEntry removes entry and preserves others", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - // Set up two extensions - await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ - "a.yaml", - ]); - await updateUpstreamExtensions(lockfilePath, "@test/second", "2.0.0", [ - "b.yaml", - ]); + const repoFirst = await LockfileRepository.create(lockfilePath); + await repoFirst.writeEntry("@test/first", "1.0.0", ["a.yaml"]); + const repoSecond = await LockfileRepository.create(lockfilePath); + await repoSecond.writeEntry("@test/second", "2.0.0", ["b.yaml"]); - // Remove the first one - await removeUpstreamExtension(lockfilePath, "@test/first"); + // Remove the first one via a fresh instance. + const repoRm = await LockfileRepository.create(lockfilePath); + await repoRm.removeEntry("@test/first"); - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(data["@test/first"], undefined); @@ -56,20 +49,17 @@ Deno.test("removeUpstreamExtension removes entry and preserves others", async () } }); -Deno.test("removeUpstreamExtension handles non-existent extension gracefully", async () => { +Deno.test("LockfileRepository.removeEntry handles non-existent extension gracefully", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ - "a.yaml", - ]); + const repo = await LockfileRepository.create(lockfilePath); + await repo.writeEntry("@test/first", "1.0.0", ["a.yaml"]); - // Removing a non-existent entry should not throw - await removeUpstreamExtension(lockfilePath, "@test/nonexistent"); + // Removing a non-existent entry should not throw. + await repo.removeEntry("@test/nonexistent"); - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(data["@test/first"].version, "1.0.0"); @@ -78,16 +68,15 @@ Deno.test("removeUpstreamExtension handles non-existent extension gracefully", a } }); -Deno.test("removeUpstreamExtension handles missing JSON file", async () => { +Deno.test("LockfileRepository.removeEntry handles missing JSON file", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - // Should not throw even when file doesn't exist - await removeUpstreamExtension(lockfilePath, "@test/nonexistent"); + const repo = await LockfileRepository.create(lockfilePath); + // Should not throw even when file doesn't exist. + await repo.removeEntry("@test/nonexistent"); - const content = await Deno.readTextFile( - join(tmpDir, "upstream_extensions.json"), - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content) as Record; assertEquals(Object.keys(data).length, 0); @@ -100,7 +89,8 @@ Deno.test("readUpstreamExtensions reads existing entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - await updateUpstreamExtensions(lockfilePath, "@test/ext", "1.0.0", [ + const repo = await LockfileRepository.create(lockfilePath); + await repo.writeEntry("@test/ext", "1.0.0", [ "extensions/models/foo.yaml", ]); diff --git a/src/cli/commands/extension_search.ts b/src/cli/commands/extension_search.ts index 5b390b05..40309642 100644 --- a/src/cli/commands/extension_search.ts +++ b/src/cli/commands/extension_search.ts @@ -34,7 +34,11 @@ import { UserError } from "../../domain/errors.ts"; import { ExtensionApiClient, } from "../../infrastructure/http/extension_api_client.ts"; -import { type PullContext, pullExtension } from "./extension_pull.ts"; +import { + LockfileRepository, + type PullContext, + pullExtension, +} from "./extension_pull.ts"; import { consumeStream, createLibSwampContext, @@ -205,13 +209,14 @@ export const extensionSearchCommand = new Command() (msg) => ctx.logger.warn(msg), ); + const lockfileRepository = await LockfileRepository.create(lockfilePath); const pullCtx: PullContext = { getExtension: (name) => client.getExtension(name), downloadArchive: (name, version) => client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), logger: ctx.logger, - lockfilePath, + lockfileRepository, skillsDir: resolveSkillsDir(repoDir, resolvePrimaryTool(marker)), repoDir, force: false, diff --git a/src/cli/commands/extension_update.ts b/src/cli/commands/extension_update.ts index 8065cffe..27b6291e 100644 --- a/src/cli/commands/extension_update.ts +++ b/src/cli/commands/extension_update.ts @@ -113,11 +113,15 @@ export const extensionUpdateCommand = new Command() const serverUrl = resolveServerUrl(); const ctx = createLibSwampContext({ logger: cliCtx.logger }); - const deps = createExtensionUpdateDeps({ + const deps = await createExtensionUpdateDeps({ lockfilePath, serverUrl, installExtension: async (name: string, version: string) => { - const installCtx = createInstallContext(serverUrl, { + // Construct a fresh InstallContext per upgrade — captures a + // current snapshot of the lockfile per the + // InstallContext.lockfileRepository single-use rule. Reusing one + // context across multiple installs would expose stale state. + const installCtx = await createInstallContext(serverUrl, { logger: cliCtx.logger, lockfilePath, skillsDir, diff --git a/src/cli/commands/open.ts b/src/cli/commands/open.ts index f795dc87..0dc8a815 100644 --- a/src/cli/commands/open.ts +++ b/src/cli/commands/open.ts @@ -52,7 +52,7 @@ import { RepoMarkerRepository } from "../../infrastructure/persistence/repo_mark import { resolveModelsDir } from "../resolve_models_dir.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { swampPath } from "../../infrastructure/persistence/paths.ts"; import { isAbsolute } from "@std/path"; import { @@ -124,12 +124,12 @@ async function loadRepoIntoState( ? modelsDir : resolve(result.repoDir, modelsDir); const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); - const upstream = await readUpstreamExtensions(lockfilePath); + const lockfileRepository = await LockfileRepository.create(lockfilePath); const rescanRepo = new ExtensionRepository({ catalog: new ExtensionCatalogStore( swampPath(result.repoDir, "_extension_catalog.db"), ), - getLockedVersion: (name) => upstream[name]?.version ?? null, + lockfileRepository, repoRoot: result.repoDir, }); try { @@ -217,6 +217,7 @@ export const openCommand = new Command() if (editsStatus === "mismatch") { throw new LocalEditsError(name); } + const pullLockfileRepo = await LockfileRepository.create(lockfilePath); await pullExtension( { name, version: null }, { @@ -224,7 +225,7 @@ export const openCommand = new Command() downloadArchive: (n, v) => extClient.downloadArchive(n, v), getChecksum: (n, v) => extClient.getChecksum(n, v), logger: ctx.logger, - lockfilePath, + lockfileRepository: pullLockfileRepo, skillsDir: resolveSkillsDir(repoDir, resolvePrimaryTool(marker)), repoDir, // Force overwrite — the web UI has no stdin to answer the diff --git a/src/cli/create_extension_install_deps.ts b/src/cli/create_extension_install_deps.ts index acd22953..60812594 100644 --- a/src/cli/create_extension_install_deps.ts +++ b/src/cli/create_extension_install_deps.ts @@ -26,6 +26,7 @@ import { RepoMarkerRepository } from "../infrastructure/persistence/repo_marker_ import { ExtensionApiClient } from "../infrastructure/http/extension_api_client.ts"; import { type ExtensionInstallDeps, + LockfileRepository, resolveServerUrl, } from "../libswamp/mod.ts"; import { resolveModelsDir } from "./resolve_models_dir.ts"; @@ -72,12 +73,12 @@ export async function createExtensionInstallDeps( lockfilePath, repoDir: absoluteRepoDir, skillsDirRelative, - createInstallContext: (_name, _version) => ({ + createInstallContext: async (_name, _version) => ({ getExtension: (n) => client.getExtension(n), downloadArchive: (n, v) => client.downloadArchive(n, v), getChecksum: (n, v) => client.getChecksum(n, v), logger, - lockfilePath, + lockfileRepository: await LockfileRepository.create(lockfilePath), skillsDir: absoluteSkillsDir, repoDir: absoluteRepoDir, force: true, diff --git a/src/cli/mod.ts b/src/cli/mod.ts index 3d9fa50d..f0e6b5e6 100644 --- a/src/cli/mod.ts +++ b/src/cli/mod.ts @@ -21,7 +21,6 @@ import { Command } from "@cliffy/command"; import { setColorEnabled } from "@std/fmt/colors"; import { isAbsolute, join, resolve } from "@std/path"; import { swampPath } from "../infrastructure/persistence/paths.ts"; -import { readUpstreamExtensions } from "../infrastructure/persistence/upstream_extensions.ts"; import { enumeratePulledExtensionDirs } from "../libswamp/mod.ts"; import { getLogger, parseLogLevel } from "@logtape/logtape"; import { initializeLogging } from "../infrastructure/logging/logger.ts"; @@ -60,6 +59,7 @@ import { import { UserModelLoader } from "../domain/models/user_model_loader.ts"; import { ExtensionCatalogStore } from "../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../infrastructure/persistence/lockfile_repository.ts"; import { UserVaultLoader } from "../domain/vaults/user_vault_loader.ts"; import { UserDriverLoader } from "../domain/drivers/user_driver_loader.ts"; import { UserDatastoreLoader } from "../domain/datastore/user_datastore_loader.ts"; @@ -267,10 +267,10 @@ export async function configureExtensionLoaders( isAbsolute(repoModelsDir) ? repoModelsDir : resolve(repoDir, repoModelsDir), "upstream_extensions.json", ); - const upstream = await readUpstreamExtensions(lockfilePath); + const lockfileRepository = await LockfileRepository.create(lockfilePath); const repository = new ExtensionRepository({ catalog, - getLockedVersion: (name) => upstream[name]?.version ?? null, + lockfileRepository, repoRoot: repoDir, }); @@ -377,7 +377,10 @@ export function configureExtensionAutoResolver( catalog: new ExtensionCatalogStore( swampPath(repoDir, "_extension_catalog.db"), ), - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + join(resolve(repoDir, modelsDir), "upstream_extensions.json"), + {}, + ), repoRoot: repoDir, }), }), @@ -426,7 +429,10 @@ async function loadUserModels( catalog: new ExtensionCatalogStore( swampPath(repoDir, "_extension_catalog.db"), ), - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + join(absoluteModelsDir, "upstream_extensions.json"), + {}, + ), repoRoot: repoDir, }); @@ -759,7 +765,8 @@ async function checkForMissingPulledExtensions( : resolve(repoDir, modelsDir); const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); - const upstream = await readUpstreamExtensions(lockfilePath); + const lockfileRepository = await LockfileRepository.create(lockfilePath); + const upstream = lockfileRepository.getAllEntries(); const extensionNames = Object.keys(upstream); if (extensionNames.length === 0) return; diff --git a/src/cli/resolve_datastore.ts b/src/cli/resolve_datastore.ts index 655d0573..57ed765b 100644 --- a/src/cli/resolve_datastore.ts +++ b/src/cli/resolve_datastore.ts @@ -43,13 +43,13 @@ import { resolveDatastoreType } from "../domain/extensions/extension_auto_resolv import { getAutoResolver } from "../domain/extensions/auto_resolver_context.ts"; import { maybeAutoUpdateDatastoreExtension } from "../libswamp/extensions/datastore_auto_update.ts"; import { FileExtensionUpdateCheckRepository } from "../infrastructure/persistence/extension_update_check_repository.ts"; -import { readUpstreamExtensions } from "../infrastructure/persistence/upstream_extensions.ts"; import { ExtensionApiClient } from "../infrastructure/http/extension_api_client.ts"; import { DEFAULT_SWAMP_CLUB_URL } from "../domain/auth/auth_credentials.ts"; import { detectLocalEditsForExtension, enumeratePulledExtensionDirs, installExtension, + LockfileRepository, } from "../libswamp/mod.ts"; import { UserDatastoreLoader } from "../domain/datastore/user_datastore_loader.ts"; import { EmbeddedDenoRuntime } from "../infrastructure/runtime/embedded_deno_runtime.ts"; @@ -100,8 +100,8 @@ async function maybeAutoUpdateSwampDatastore( const result = await maybeAutoUpdateDatastoreExtension(type, { getInstalledVersion: async (name) => { - const upstream = await readUpstreamExtensions(lockfilePath); - return upstream[name]?.version ?? null; + const installedRepo = await LockfileRepository.create(lockfilePath); + return installedRepo.getLockedVersion(name); }, getLatestVersion: async (name) => { try { @@ -120,6 +120,11 @@ async function maybeAutoUpdateSwampDatastore( // derives per-extension destinations (models/workflows/vaults/ // drivers/datastores/reports) from `name`; only skillsDir is // caller-owned because skills land in a tool-specific dir. + // Construct a fresh LockfileRepository per install to capture + // a current snapshot — the InstallContext is single-use. + const lockfileRepository = await LockfileRepository.create( + lockfilePath, + ); await installExtension( { name, version }, { @@ -127,7 +132,7 @@ async function maybeAutoUpdateSwampDatastore( downloadArchive: (n, v) => extensionClient.downloadArchive(n, v), getChecksum: (n, v) => extensionClient.getChecksum(n, v), logger, - lockfilePath, + lockfileRepository, skillsDir: swampPath( resolvedRepoDir, SWAMP_SUBDIRS.pulledSkills, diff --git a/src/domain/datastore/user_datastore_loader_test.ts b/src/domain/datastore/user_datastore_loader_test.ts index 8f80be90..e8001cfc 100644 --- a/src/domain/datastore/user_datastore_loader_test.ts +++ b/src/domain/datastore/user_datastore_loader_test.ts @@ -27,6 +27,7 @@ import { import { bundleNamespace } from "../../infrastructure/persistence/paths.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { DenoRuntime } from "../runtime/deno_runtime.ts"; /** Stub runtime that returns "deno" as the binary path. */ @@ -43,7 +44,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/domain/drivers/user_driver_loader_test.ts b/src/domain/drivers/user_driver_loader_test.ts index 3e894f59..3d1f2264 100644 --- a/src/domain/drivers/user_driver_loader_test.ts +++ b/src/domain/drivers/user_driver_loader_test.ts @@ -24,6 +24,7 @@ import { driverTypeRegistry } from "./driver_type_registry.ts"; import { bundleNamespace } from "../../infrastructure/persistence/paths.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { DenoRuntime } from "../runtime/deno_runtime.ts"; const testDenoRuntime: DenoRuntime = { @@ -37,7 +38,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/domain/models/user_model_loader_test.ts b/src/domain/models/user_model_loader_test.ts index 30e69088..72c5659b 100644 --- a/src/domain/models/user_model_loader_test.ts +++ b/src/domain/models/user_model_loader_test.ts @@ -28,6 +28,7 @@ import { modelRegistry } from "./model.ts"; import { bundleNamespace } from "../../infrastructure/persistence/paths.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; /** W1b/(a-2): construct an ExtensionRepository wrapping a test catalog. */ function makeRepoForCatalog( @@ -36,7 +37,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/domain/reports/user_report_loader_test.ts b/src/domain/reports/user_report_loader_test.ts index 961c1c92..2b89b58d 100644 --- a/src/domain/reports/user_report_loader_test.ts +++ b/src/domain/reports/user_report_loader_test.ts @@ -24,6 +24,7 @@ import { reportRegistry } from "./report_registry.ts"; import { bundleNamespace } from "../../infrastructure/persistence/paths.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { DenoRuntime } from "../runtime/deno_runtime.ts"; /** Test DenoRuntime that returns the current deno binary path. */ @@ -38,7 +39,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/domain/vaults/user_vault_loader_test.ts b/src/domain/vaults/user_vault_loader_test.ts index e8cc9fb4..cb82fcb1 100644 --- a/src/domain/vaults/user_vault_loader_test.ts +++ b/src/domain/vaults/user_vault_loader_test.ts @@ -24,6 +24,7 @@ import { VaultTypeRegistry, vaultTypeRegistry } from "./vault_type_registry.ts"; import { bundleNamespace } from "../../infrastructure/persistence/paths.ts"; import { ExtensionCatalogStore } from "../../infrastructure/persistence/extension_catalog_store.ts"; import { ExtensionRepository } from "../../infrastructure/persistence/extension_repository.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { DenoRuntime } from "../runtime/deno_runtime.ts"; /** Stub runtime that returns "deno" as the binary path. */ @@ -40,7 +41,10 @@ function makeRepoForCatalog( ): ExtensionRepository { return new ExtensionRepository({ catalog, - getLockedVersion: () => null, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoRoot, }); } diff --git a/src/infrastructure/persistence/extension_repository.ts b/src/infrastructure/persistence/extension_repository.ts index b1096d85..037a2f52 100644 --- a/src/infrastructure/persistence/extension_repository.ts +++ b/src/infrastructure/persistence/extension_repository.ts @@ -26,6 +26,7 @@ import type { ExtensionTypeRow, } from "./extension_catalog_store.ts"; import { DuplicateTypeError } from "./duplicate_type_error.ts"; +import type { LockfileRepository } from "./lockfile_repository.ts"; import { type Extension, type ExtensionOrigin, @@ -81,18 +82,20 @@ export interface InvalidationGuardResult { * catalog deliberately have empty `extension_version` because the * pulled-extensions on-disk tree encodes only the name. Version is * owned by `upstream_extensions.json` (the lockfile) and consulted at - * read time. The repository takes a synchronous `getLockedVersion` - * closure injected at construction; callers pre-read the lockfile via - * {@link readUpstreamExtensions} and pass a closure over the result. + * read time. The repository takes a {@link LockfileRepository} injected + * at construction and asks it for the locked version on every fallback + * lookup. * - * **Snapshot frozen at construction.** The lockfile snapshot inside - * `getLockedVersion` is taken once when the caller pre-reads the - * lockfile. A long-lived repository instance does not refresh — re- - * construction is the recommended mechanism. The race window between - * lockfile read and write-back (process A reads v1, process B upgrades - * to v2 + rewrites lockfile, process A writes back v1) is acknowledged - * but deferred to W3's `ReconcileFromDisk` for convergence; SQLite's - * `busy_timeout` serializes the write itself. + * **Snapshot frozen at construction.** The lockfile snapshot lives one + * layer out, inside the {@link LockfileRepository}. That repository is + * itself constructed-with-snapshot per its own JSDoc; callers who need + * a fresh snapshot construct a new {@link LockfileRepository} and pass + * it to a new {@link ExtensionRepository}. Long-lived instances do NOT + * auto-refresh — re-construction is the recommended mechanism. The race + * window between lockfile read and write-back (process A reads v1, + * process B upgrades to v2 + rewrites lockfile, process A writes back + * v1) is acknowledged but deferred to W3's `ReconcileFromDisk` for + * convergence; SQLite's `busy_timeout` serializes the write itself. * * **Composition over inheritance.** The repository wraps an * {@link ExtensionCatalogStore} via composition, NOT inheritance, so @@ -124,7 +127,7 @@ export class ExtensionRepository { */ readonly legacyStore: ExtensionCatalogStore; - private readonly getLockedVersion: (name: string) => string | null; + private readonly lockfileRepository: LockfileRepository; private readonly repoRoot: string; /** * Tracks rows we've already info-logged for the empty-version @@ -137,11 +140,11 @@ export class ExtensionRepository { constructor(args: { catalog: ExtensionCatalogStore; - getLockedVersion: (name: string) => string | null; + lockfileRepository: LockfileRepository; repoRoot: string; }) { this.legacyStore = args.catalog; - this.getLockedVersion = args.getLockedVersion; + this.lockfileRepository = args.lockfileRepository; this.repoRoot = canonicalizePath(args.repoRoot); this.fallbackLoggedSourcePaths = new Set(); } @@ -400,7 +403,7 @@ export class ExtensionRepository { if (name !== null && (!version || version.length === 0)) { // Pulled row: name populated, version empty. Consult the lockfile. - const locked = this.getLockedVersion(name); + const locked = this.lockfileRepository.getLockedVersion(name); if (locked === null) { logger .warn`Dropping orphan pulled row at ${row.source_path}: lockfile has no entry for ${name}.`; @@ -619,12 +622,3 @@ function sourceToRow( extension_version: extension.version, }; } - -/** - * Convenience constructor for the empty-locked-version case (no - * lockfile present). Returns a closure that always returns null. - * Caller pattern: `getLockedVersion: emptyLockedVersionLookup()`. - */ -export function emptyLockedVersionLookup(): (name: string) => string | null { - return () => null; -} diff --git a/src/infrastructure/persistence/extension_repository_test.ts b/src/infrastructure/persistence/extension_repository_test.ts index 910824f0..9ffa7ba8 100644 --- a/src/infrastructure/persistence/extension_repository_test.ts +++ b/src/infrastructure/persistence/extension_repository_test.ts @@ -30,9 +30,10 @@ import type { ExtensionRepository } from "./extension_repository.ts"; import { ExtensionCatalogStore } from "./extension_catalog_store.ts"; import { DuplicateTypeError } from "./duplicate_type_error.ts"; import { - fixedLockedVersionLookup, + fixedLockedVersions, makeStubRepository, } from "./test_helpers/stub_extension_repository.ts"; +import type { UpstreamExtensionsMap } from "./upstream_extensions.ts"; import { type Extension, makeExtension, @@ -67,13 +68,13 @@ function withRepository( catalog: ExtensionCatalogStore, repoRoot: string, ) => void, - opts?: { getLockedVersion?: (name: string) => string | null }, + opts?: { lockedVersions?: UpstreamExtensionsMap }, ): void { const { repoRoot, dbPath } = makeTempLayout(); const { repository, catalog } = makeStubRepository({ dbPath, repoRoot, - getLockedVersion: opts?.getLockedVersion, + lockedVersions: opts?.lockedVersions, }); try { fn(repository, catalog, repoRoot); @@ -381,7 +382,7 @@ Deno.test("ExtensionRepository: lockfile fallback resolves empty version, writes const exts2 = repo.loadAll(); assertEquals(exts2.length, 1); assertEquals(exts2[0].version, "1.0.0"); - }, { getLockedVersion: fixedLockedVersionLookup({ "@scope/foo": "1.0.0" }) }); + }, { lockedVersions: fixedLockedVersions({ "@scope/foo": "1.0.0" }) }); }); // ===== Test #9: lockfile fallback orphan path ===== @@ -411,7 +412,7 @@ Deno.test("ExtensionRepository: lockfile fallback orphan-DELETEs a pulled row wh assertEquals(exts.length, 0); // The row was DELETEd as an orphan. assertEquals(cat.findAll().length, 0); - }, { getLockedVersion: () => null }); + }, { lockedVersions: {} }); }); // ===== Test #10: cold-start guard parity over all 5 kinds ===== @@ -543,7 +544,7 @@ Deno.test("ExtensionRepository: two pulled rows for same name resolve to same ve // Both source paths must appear in the error message. assertStringIncludes(thrown.message, "models/instance.ts"); assertStringIncludes(thrown.message, "models/extra/instance.ts"); - }, { getLockedVersion: fixedLockedVersionLookup({ "@scope/foo": "2.0.0" }) }); + }, { lockedVersions: fixedLockedVersions({ "@scope/foo": "2.0.0" }) }); }); // ===== Supporting tests ===== diff --git a/src/infrastructure/persistence/lockfile_repository.ts b/src/infrastructure/persistence/lockfile_repository.ts new file mode 100644 index 00000000..fff31aff --- /dev/null +++ b/src/infrastructure/persistence/lockfile_repository.ts @@ -0,0 +1,213 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { dirname } from "@std/path"; +import { atomicWriteTextFile } from "./atomic_write.ts"; +import { + readUpstreamExtensions, + type UpstreamExtensionEntry, + type UpstreamExtensionsMap, +} from "./upstream_extensions.ts"; + +const LOCK_RETRY_COUNT = 10; +const LOCK_RETRY_DELAY_MS = 100; + +/** Options accepted by {@link LockfileRepository.writeEntry}. */ +export interface WriteEntryOptions { + include?: string[]; + checksum?: string; + filesChecksum?: string; + serverUrl?: string; +} + +/** + * Sole gateway for read+write of `upstream_extensions.json` (the lockfile). + * + * **Asymmetric semantics — read carefully.** + * + * The repository captures a snapshot of the lockfile at construction time. + * The intent is to preserve the W1b "snapshot frozen at construction" + * contract that {@link ExtensionRepository} relies on (see its JSDoc) — the + * snapshot now lives one layer out, but the contract is identical: two + * repository instances constructed at different wall-clock times can see + * different states. To refresh, construct a new instance. + * + * - **Reads** ({@link getEntry}, {@link getAllEntries}, + * {@link getLockedVersion}) serve from the construction-time cache. They + * do NOT hit disk; subsequent disk mutations by sibling processes / + * instances are NOT reflected. + * + * - **Writes** ({@link writeEntry}, {@link removeEntry}) acquire the + * advisory file lock, re-read the CURRENT disk state, merge the new + * entry, atomic-write the merged result, release the lock, and update + * this instance's local cache to match. This preserves the + * pre-LockfileRepository concurrency semantic: two concurrent writers + * don't clobber each other; each sees the other's prior commits via the + * re-read step. The local cache update means the writer can read its + * own write back from this same instance. + * + * Future contributors: do NOT "fix" the cached read to be live. The + * snapshot semantics are deliberate. If a caller needs current disk state, + * they construct a new {@link LockfileRepository}. + * + * Filed as the W2 prequel for swamp-club#231. + */ +export class LockfileRepository { + readonly lockfilePath: string; + private cache: UpstreamExtensionsMap; + + /** + * Captures a snapshot of the lockfile at this moment. A missing file + * yields an empty cache (matches {@link readUpstreamExtensions}'s + * NotFound semantics). + */ + static async create(lockfilePath: string): Promise { + const cache = await readUpstreamExtensions(lockfilePath); + return new LockfileRepository(lockfilePath, cache); + } + + /** + * Constructs an instance with an explicit cache. Prefer + * {@link LockfileRepository.create} for production code; this constructor + * is the test seam for fixtures that need a known starting state without + * touching disk. + */ + constructor(lockfilePath: string, cache: UpstreamExtensionsMap = {}) { + this.lockfilePath = lockfilePath; + this.cache = cache; + } + + /** Returns the cached entry for `name`, or null if absent. */ + getEntry(name: string): UpstreamExtensionEntry | null { + return this.cache[name] ?? null; + } + + /** + * Returns the cached entry map. Callers receive a defensive shallow + * copy so external mutation cannot corrupt the cache. + */ + getAllEntries(): UpstreamExtensionsMap { + return { ...this.cache }; + } + + /** + * Returns the version string for `name`, or null if absent. Sugar over + * `getEntry(name)?.version ?? null`. Replaces the W1b + * `getLockedVersion` closure injected into {@link ExtensionRepository}. + */ + getLockedVersion(name: string): string | null { + return this.cache[name]?.version ?? null; + } + + /** + * Writes a new lockfile entry. Acquires the advisory lock, re-reads + * disk under the lock (so concurrent writes by siblings are not + * clobbered), merges, atomic-writes, releases the lock, and updates + * this instance's cache to match. + */ + async writeEntry( + name: string, + version: string, + files: string[], + options?: WriteEntryOptions, + ): Promise { + await Deno.mkdir(dirname(this.lockfilePath), { recursive: true }); + const lockFile = await this.acquireLock(); + try { + const current = await readUpstreamExtensions(this.lockfilePath); + current[name] = { + version, + pulledAt: new Date().toISOString(), + files, + ...(options?.include && options.include.length > 0 + ? { include: options.include } + : {}), + ...(options?.checksum ? { checksum: options.checksum } : {}), + ...(options?.filesChecksum + ? { filesChecksum: options.filesChecksum } + : {}), + ...(options?.serverUrl ? { serverUrl: options.serverUrl } : {}), + }; + await atomicWriteTextFile( + this.lockfilePath, + JSON.stringify(current, null, 2) + "\n", + ); + this.cache = current; + } finally { + await this.releaseLock(lockFile); + } + } + + /** + * Removes an entry by name. No-op if absent. Acquires the advisory + * lock, re-reads disk under the lock, deletes the key, atomic-writes + * the result, releases the lock, and updates this instance's cache. + */ + async removeEntry(name: string): Promise { + const lockFile = await this.acquireLock(); + try { + const current = await readUpstreamExtensions(this.lockfilePath); + delete current[name]; + await atomicWriteTextFile( + this.lockfilePath, + JSON.stringify(current, null, 2) + "\n", + ); + this.cache = current; + } finally { + await this.releaseLock(lockFile); + } + } + + private async acquireLock(): Promise { + const lockPath = `${this.lockfilePath}.lock`; + for (let attempt = 0; attempt < LOCK_RETRY_COUNT; attempt++) { + try { + return await Deno.open(lockPath, { + create: true, + createNew: true, + write: true, + }); + } catch (error) { + if (error instanceof Deno.errors.AlreadyExists) { + if (attempt < LOCK_RETRY_COUNT - 1) { + await new Promise((r) => setTimeout(r, LOCK_RETRY_DELAY_MS)); + continue; + } + throw new Error( + "Could not acquire lock on upstream_extensions.json. Another operation may be in progress. Please retry.", + ); + } + throw error; + } + } + throw new Error( + "Could not acquire lock on upstream_extensions.json.", + ); + } + + private async releaseLock(lockFile: Deno.FsFile): Promise { + lockFile.close(); + try { + await Deno.remove(`${this.lockfilePath}.lock`); + } catch { + // Best-effort cleanup; the lockfile may have been removed by a + // concurrent process or never created in the unhappy case. + } + } +} diff --git a/src/infrastructure/persistence/lockfile_repository_test.ts b/src/infrastructure/persistence/lockfile_repository_test.ts new file mode 100644 index 00000000..56aba6b8 --- /dev/null +++ b/src/infrastructure/persistence/lockfile_repository_test.ts @@ -0,0 +1,256 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { assertEquals, assertNotEquals } from "@std/assert"; +import { join } from "@std/path"; +import { LockfileRepository } from "./lockfile_repository.ts"; +import { atomicWriteTextFile } from "./atomic_write.ts"; + +async function withTempDir( + fn: (dir: string) => Promise, +): Promise { + const dir = await Deno.makeTempDir({ prefix: "swamp-lockfile-repo-test-" }); + try { + await fn(dir); + } finally { + if (Deno.build.os === "windows") { + await Deno.remove(dir, { recursive: true }).catch(() => {}); + } else { + await Deno.remove(dir, { recursive: true }); + } + } +} + +Deno.test("LockfileRepository.create: missing lockfile yields empty cache", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + + assertEquals(repo.getAllEntries(), {}); + assertEquals(repo.getEntry("@scope/missing"), null); + assertEquals(repo.getLockedVersion("@scope/missing"), null); + }); +}); + +Deno.test("LockfileRepository.create: existing lockfile populates cache", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const initial = { + "@scope/foo": { + version: "2026.01.01.1", + pulledAt: "2026-01-01T00:00:00.000Z", + files: ["models/foo.ts"], + }, + }; + await atomicWriteTextFile(path, JSON.stringify(initial, null, 2)); + + const repo = await LockfileRepository.create(path); + + assertEquals(repo.getEntry("@scope/foo")?.version, "2026.01.01.1"); + assertEquals(repo.getLockedVersion("@scope/foo"), "2026.01.01.1"); + assertEquals(Object.keys(repo.getAllEntries()), ["@scope/foo"]); + }); +}); + +Deno.test("LockfileRepository: cross-instance snapshot regression — repoA caches old, repoB sees new", async () => { + // Load-bearing test for ADV-1 (W2 prequel snapshot semantics). Mutates + // disk via a SIBLING LockfileRepository, NOT repoA.writeEntry, so the + // test exercises CROSS-INSTANCE staleness (the W1b race-window contract) + // rather than within-instance coherence (which writeEntry guarantees). + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const initial = { + "@scope/foo": { + version: "2026.01.01.1", + pulledAt: "2026-01-01T00:00:00.000Z", + }, + }; + await atomicWriteTextFile(path, JSON.stringify(initial, null, 2)); + + const repoA = await LockfileRepository.create(path); + assertEquals(repoA.getLockedVersion("@scope/foo"), "2026.01.01.1"); + + // Out-of-band write via a SIBLING instance. repoA's cache is now stale. + const sibling = await LockfileRepository.create(path); + await sibling.writeEntry("@scope/foo", "2026.05.05.1", []); + + // repoA still serves the OLD value from its construction-time cache. + assertEquals(repoA.getLockedVersion("@scope/foo"), "2026.01.01.1"); + + // A freshly-constructed repoB sees the NEW value. + const repoB = await LockfileRepository.create(path); + assertEquals(repoB.getLockedVersion("@scope/foo"), "2026.05.05.1"); + }); +}); + +Deno.test("LockfileRepository.writeEntry: creates file and updates own cache", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "nested", "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + + await repo.writeEntry("@scope/foo", "2026.05.05.1", ["models/foo.ts"], { + checksum: "abc123", + }); + + // Cache reflects the write immediately. + assertEquals(repo.getEntry("@scope/foo")?.version, "2026.05.05.1"); + assertEquals(repo.getEntry("@scope/foo")?.checksum, "abc123"); + + // Disk reflects the write. + const onDisk = JSON.parse(await Deno.readTextFile(path)); + assertEquals(onDisk["@scope/foo"].version, "2026.05.05.1"); + assertEquals(onDisk["@scope/foo"].files, ["models/foo.ts"]); + }); +}); + +Deno.test("LockfileRepository.writeEntry: omits empty/undefined optional fields", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + + await repo.writeEntry("@scope/foo", "2026.05.05.1", []); + + const entry = repo.getEntry("@scope/foo")!; + assertEquals(entry.version, "2026.05.05.1"); + assertEquals(entry.files, []); + assertEquals(entry.checksum, undefined); + assertEquals(entry.serverUrl, undefined); + assertEquals(entry.include, undefined); + }); +}); + +Deno.test("LockfileRepository.writeEntry: re-reads disk under lock to avoid clobbering siblings", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + + // Construct repoA with empty cache. + const repoA = await LockfileRepository.create(path); + + // Sibling writes entry B to disk while repoA's cache is still empty. + const sibling = await LockfileRepository.create(path); + await sibling.writeEntry("@scope/sibling", "1.0.0", []); + + // repoA writes entry A. The re-read-under-lock step picks up the + // sibling's entry B; both survive. + await repoA.writeEntry("@scope/a", "2.0.0", []); + + const onDisk = JSON.parse(await Deno.readTextFile(path)); + assertEquals(Object.keys(onDisk).sort(), ["@scope/a", "@scope/sibling"]); + }); +}); + +Deno.test("LockfileRepository.removeEntry: deletes key and persists", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + await repo.writeEntry("@scope/foo", "1.0.0", []); + await repo.writeEntry("@scope/bar", "2.0.0", []); + + await repo.removeEntry("@scope/foo"); + + assertEquals(repo.getEntry("@scope/foo"), null); + assertEquals(repo.getEntry("@scope/bar")?.version, "2.0.0"); + + const onDisk = JSON.parse(await Deno.readTextFile(path)); + assertEquals(Object.keys(onDisk), ["@scope/bar"]); + }); +}); + +Deno.test("LockfileRepository.removeEntry: missing key is a no-op", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + await repo.writeEntry("@scope/keep", "1.0.0", []); + + // Should not throw. + await repo.removeEntry("@scope/never-existed"); + + assertEquals(repo.getEntry("@scope/keep")?.version, "1.0.0"); + }); +}); + +Deno.test("LockfileRepository: getAllEntries returns a defensive copy", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + await repo.writeEntry("@scope/foo", "1.0.0", []); + + const map = repo.getAllEntries(); + delete map["@scope/foo"]; + + // Repo's internal cache must not be affected by external mutation. + assertNotEquals(repo.getEntry("@scope/foo"), null); + }); +}); + +Deno.test("LockfileRepository: concurrent writers all complete via acquireLock retry", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const N = 5; + + // Each writer constructs its own repo (mirrors how lifecycle services + // would behave under concurrency) and writes a unique entry. + const writers = Array.from({ length: N }, (_, i) => + (async () => { + const repo = await LockfileRepository.create(path); + await repo.writeEntry(`@scope/ext${i}`, `${i}.0.0`, []); + })()); + + await Promise.all(writers); + + // Final on-disk state has all N entries — count assertion, not + // elapsed-time assertion (CI-flake-prone). + const onDisk = JSON.parse(await Deno.readTextFile(path)); + assertEquals(Object.keys(onDisk).length, N); + for (let i = 0; i < N; i++) { + assertEquals(onDisk[`@scope/ext${i}`].version, `${i}.0.0`); + } + }); +}); + +Deno.test("LockfileRepository: cleans up .lock file on success path", async () => { + await withTempDir(async (dir) => { + const path = join(dir, "upstream_extensions.json"); + const repo = await LockfileRepository.create(path); + + await repo.writeEntry("@scope/foo", "1.0.0", []); + + // .lock file should not exist after success. + let lockExists = true; + try { + await Deno.stat(`${path}.lock`); + } catch (error) { + if (error instanceof Deno.errors.NotFound) lockExists = false; + else throw error; + } + assertEquals(lockExists, false); + }); +}); + +Deno.test("LockfileRepository: in-memory constructor takes explicit cache (test seam)", () => { + const repo = new LockfileRepository("/test/repo/upstream_extensions.json", { + "@scope/preset": { + version: "9.9.9", + pulledAt: "2026-01-01T00:00:00.000Z", + }, + }); + + assertEquals(repo.getLockedVersion("@scope/preset"), "9.9.9"); + assertEquals(repo.getEntry("@scope/preset")?.version, "9.9.9"); +}); diff --git a/src/infrastructure/persistence/test_helpers/stub_extension_repository.ts b/src/infrastructure/persistence/test_helpers/stub_extension_repository.ts index ff08c1f8..516494a0 100644 --- a/src/infrastructure/persistence/test_helpers/stub_extension_repository.ts +++ b/src/infrastructure/persistence/test_helpers/stub_extension_repository.ts @@ -29,6 +29,8 @@ import { ExtensionCatalogStore } from "../extension_catalog_store.ts"; import { ExtensionRepository } from "../extension_repository.ts"; +import { LockfileRepository } from "../lockfile_repository.ts"; +import type { UpstreamExtensionsMap } from "../upstream_extensions.ts"; /** * Constructs an ExtensionRepository wrapping a fresh @@ -43,31 +45,45 @@ import { ExtensionRepository } from "../extension_repository.ts"; * @param repoRoot The canonical repo root the repository should use * when resolving extensionRoot for pulled vs local origins. * Defaults to a sentinel value tests can use unconditionally. - * @param getLockedVersion Lockfile-fallback closure. Defaults to - * `() => null` (no lockfile entries available — orphan-DELETE - * semantics). Tests for the lockfile fallback override this. + * @param lockedVersions Lockfile-fallback fixture map keyed by + * extension name. Defaults to `{}` (no lockfile entries available — + * orphan-DELETE semantics). Tests for the lockfile fallback override + * this. Internally constructed into a {@link LockfileRepository} + * with a sentinel path so reads serve from the in-memory cache. */ export function makeStubRepository(args: { dbPath: string; repoRoot?: string; - getLockedVersion?: (name: string) => string | null; + lockedVersions?: UpstreamExtensionsMap; }): { repository: ExtensionRepository; catalog: ExtensionCatalogStore } { const catalog = new ExtensionCatalogStore(args.dbPath); + const lockfileRepository = new LockfileRepository( + "/test/repo/upstream_extensions.json", + args.lockedVersions ?? {}, + ); const repository = new ExtensionRepository({ catalog, - getLockedVersion: args.getLockedVersion ?? (() => null), + lockfileRepository, repoRoot: args.repoRoot ?? "/test/repo", }); return { repository, catalog }; } /** - * Synchronous convenience for tests that want a closure-style lockfile. - * Pass a plain object mapping extension name to version; the returned - * function looks up by name and returns null for misses. + * Synchronous convenience for tests that want a fixture lockfile keyed + * by name → version. Maps the name→version object into the full + * UpstreamExtensionsMap shape (synthesizing a placeholder pulledAt) so + * callers don't have to spell out the full entry shape per test. */ -export function fixedLockedVersionLookup( +export function fixedLockedVersions( versions: Readonly>, -): (name: string) => string | null { - return (name) => versions[name] ?? null; +): UpstreamExtensionsMap { + const map: UpstreamExtensionsMap = {}; + for (const [name, version] of Object.entries(versions)) { + map[name] = { + version, + pulledAt: "1970-01-01T00:00:00.000Z", + }; + } + return map; } diff --git a/src/libswamp/extensions/doctor.ts b/src/libswamp/extensions/doctor.ts index 0ebb070a..48e24082 100644 --- a/src/libswamp/extensions/doctor.ts +++ b/src/libswamp/extensions/doctor.ts @@ -20,6 +20,7 @@ import { walk } from "@std/fs"; import { join, relative } from "@std/path"; import type { ExtensionLoadWarning } from "../../infrastructure/logging/extension_load_warnings.ts"; +import type { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { UpstreamExtensionsMap } from "../../infrastructure/persistence/upstream_extensions.ts"; import type { SwampError } from "../errors.ts"; import { extractTopLevelRoot } from "./layout.ts"; @@ -126,11 +127,12 @@ export interface DoctorExtensionsDeps { /** Clears the captured warnings array + dedupe state. */ resetState: () => void; /** - * Reads upstream_extensions.json so the orphan-detection phase can - * walk every per-extension root. Missing lockfile yields {} (the + * Lockfile repository — captures upstream_extensions.json at + * construction so the orphan-detection phase can walk every + * per-extension root. Missing lockfile yields an empty cache (the * orphan walk becomes a no-op). */ - readUpstreamExtensions: () => Promise; + lockfileRepository: LockfileRepository; /** Repo root used to resolve repo-relative paths for filesystem walks. */ repoDir: string; /** @@ -337,7 +339,7 @@ export async function* doctorExtensions( // every loader passes. Errors are not folded into `overallStatus`. let orphanFiles: DoctorOrphanFile[] = []; if (!deps.abortSignal.aborted) { - const upstreamMap = await deps.readUpstreamExtensions(); + const upstreamMap = deps.lockfileRepository.getAllEntries(); orphanFiles = await detectOrphanFiles( upstreamMap, deps.repoDir, diff --git a/src/libswamp/extensions/doctor_test.ts b/src/libswamp/extensions/doctor_test.ts index 790746d3..61bf3051 100644 --- a/src/libswamp/extensions/doctor_test.ts +++ b/src/libswamp/extensions/doctor_test.ts @@ -20,6 +20,7 @@ import { assertEquals } from "@std/assert"; import type { ExtensionLoadWarning } from "../../infrastructure/logging/extension_load_warnings.ts"; import { resetExtensionLoadWarnings } from "../../infrastructure/logging/extension_load_warnings.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { DOCTOR_REGISTRY_ORDER, doctorExtensions, @@ -75,7 +76,10 @@ function buildDeps( resetState: () => { events.push({ fn: "resetState" }); }, - readUpstreamExtensions: () => Promise.resolve({}), + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + {}, + ), repoDir: options.repoDir ?? "/tmp/swamp-test-repo", skillsDir: options.skillsDir ?? ".claude/skills", abortSignal: new AbortController().signal, @@ -273,7 +277,10 @@ Deno.test( files: [".swamp/pulled-extensions/@x/y/models/tracked.ts"], }, }; - deps.readUpstreamExtensions = () => Promise.resolve(upstream); + deps.lockfileRepository = new LockfileRepository( + "/test/repo/upstream_extensions.json", + upstream, + ); const events = await collect(doctorExtensions(deps)); const completed = events.find((e) => e.kind === "completed"); @@ -317,7 +324,10 @@ Deno.test( files: [".swamp/pulled-extensions/@x/y/models/tracked.ts"], }, }; - deps.readUpstreamExtensions = () => Promise.resolve(upstream); + deps.lockfileRepository = new LockfileRepository( + "/test/repo/upstream_extensions.json", + upstream, + ); const events = await collect(doctorExtensions(deps)); const completed = events.find((e) => e.kind === "completed"); @@ -384,7 +394,10 @@ Deno.test( files: [".claude/skills/foo"], }, }; - deps.readUpstreamExtensions = () => Promise.resolve(upstream); + deps.lockfileRepository = new LockfileRepository( + "/test/repo/upstream_extensions.json", + upstream, + ); const events = await collect(doctorExtensions(deps)); const completed = events.find((e) => e.kind === "completed"); @@ -440,7 +453,10 @@ Deno.test( ], }, }; - deps.readUpstreamExtensions = () => Promise.resolve(upstream); + deps.lockfileRepository = new LockfileRepository( + "/test/repo/upstream_extensions.json", + upstream, + ); const events = await collect(doctorExtensions(deps)); const completed = events.find((e) => e.kind === "completed"); diff --git a/src/libswamp/extensions/enumerate_pulled.ts b/src/libswamp/extensions/enumerate_pulled.ts index 6663a64a..981245be 100644 --- a/src/libswamp/extensions/enumerate_pulled.ts +++ b/src/libswamp/extensions/enumerate_pulled.ts @@ -18,7 +18,7 @@ // along with Swamp. If not, see . import { join } from "@std/path"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { swampPath } from "../../infrastructure/persistence/paths.ts"; /** Types that can appear under a per-extension subtree. */ @@ -51,7 +51,8 @@ export async function enumeratePulledExtensionDirs( repoDir: string, type: PulledExtensionType, ): Promise { - const upstream = await readUpstreamExtensions(lockfilePath); + const repo = await LockfileRepository.create(lockfilePath); + const upstream = repo.getAllEntries(); const pulledRoot = swampPath(repoDir, "pulled-extensions"); const dirs: string[] = []; diff --git a/src/libswamp/extensions/install.ts b/src/libswamp/extensions/install.ts index 690290bc..0b6290e2 100644 --- a/src/libswamp/extensions/install.ts +++ b/src/libswamp/extensions/install.ts @@ -18,7 +18,7 @@ // along with Swamp. If not, see . import { join } from "@std/path"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { cleanupEmptyParentDirs } from "../../infrastructure/persistence/directory_cleanup.ts"; import type { LibSwampContext } from "../context.ts"; import type { SwampError } from "../errors.ts"; @@ -103,10 +103,17 @@ export interface ExtensionInstallDeps { * removed. */ skillsDirRelative?: string; + /** + * Async factory that constructs a fresh {@link InstallContext} for + * each install entry. The async return type matters: each context + * captures a snapshot of the lockfile via {@link LockfileRepository} + * at construction (per the InstallContext.lockfileRepository JSDoc), + * and that capture is itself an async file read. + */ createInstallContext: ( name: string, version: string, - ) => InstallContext; + ) => Promise; /** * Test seam. Defaults to the real `installExtension` from pull.ts; * tests can inject a stub so they don't need a real tar archive and @@ -140,7 +147,10 @@ export async function* extensionInstall( (async function* () { yield { kind: "resolving" }; - const upstream = await readUpstreamExtensions(deps.lockfilePath); + const lockfileRepository = await LockfileRepository.create( + deps.lockfilePath, + ); + const upstream = lockfileRepository.getAllEntries(); const entries: ExtensionInstallEntry[] = []; let installed = 0; let migrated = 0; @@ -172,7 +182,7 @@ export async function* extensionInstall( : { kind: "installing", name, version }; try { - const installCtx = deps.createInstallContext(name, version); + const installCtx = await deps.createInstallContext(name, version); // Thread the lockfile's stored checksum through as an integrity // anchor. installExtension verifies the freshly-downloaded archive // matches byte-for-byte and fails loudly on registry drift. diff --git a/src/libswamp/extensions/install_test.ts b/src/libswamp/extensions/install_test.ts index 35dd8df4..e047856e 100644 --- a/src/libswamp/extensions/install_test.ts +++ b/src/libswamp/extensions/install_test.ts @@ -30,6 +30,7 @@ import { import { pruneOrphanFiles } from "../../infrastructure/persistence/directory_cleanup.ts"; import { createLibSwampContext } from "../context.ts"; import type { InstallContext } from "./pull.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; async function collectEvents( @@ -55,7 +56,7 @@ Deno.test("extensionInstall: empty lockfile yields all up to date", async () => lockfilePath, repoDir: tmpDir, createInstallContext: () => { - throw new Error("should not be called"); + return Promise.reject(new Error("should not be called")); }, }), ); @@ -107,7 +108,9 @@ Deno.test("extensionInstall: skips extensions with all files present", async () lockfilePath, repoDir: tmpDir, createInstallContext: () => { - throw new Error("should not be called for up-to-date"); + return Promise.reject( + new Error("should not be called for up-to-date"), + ); }, }), ); @@ -146,7 +149,7 @@ Deno.test("extensionInstall: detects missing files and calls install", async () extensionInstall(ctx, { lockfilePath, repoDir: tmpDir, - createInstallContext: (_name, _version) => { + createInstallContext: async (_name, _version) => { installCalled = true; // Return a minimal context that won't actually pull // (installExtension will fail, which we catch) @@ -154,7 +157,7 @@ Deno.test("extensionInstall: detects missing files and calls install", async () getExtension: () => Promise.resolve(null), downloadArchive: () => Promise.reject(new Error("test stub")), getChecksum: () => Promise.resolve(null), - lockfilePath, + lockfileRepository: await LockfileRepository.create(lockfilePath), skillsDir: join(tmpDir, ".swamp/pulled-extensions/skills"), repoDir: tmpDir, force: true, @@ -198,7 +201,7 @@ Deno.test("extensionInstall: missing lockfile yields empty result", async () => lockfilePath, repoDir: tmpDir, createInstallContext: () => { - throw new Error("should not be called"); + return Promise.reject(new Error("should not be called")); }, }), ); @@ -241,7 +244,7 @@ Deno.test("extensionInstall: lockfile-anchored checksum mismatch fails with drif extensionInstall(ctx, { lockfilePath, repoDir: tmpDir, - createInstallContext: () => ({ + createInstallContext: async () => ({ getExtension: () => Promise.resolve({ name: "@fake/ext", @@ -251,7 +254,7 @@ Deno.test("extensionInstall: lockfile-anchored checksum mismatch fails with drif downloadArchive: () => Promise.resolve(new TextEncoder().encode("drifted content")), getChecksum: () => Promise.resolve(null), - lockfilePath, + lockfileRepository: await LockfileRepository.create(lockfilePath), skillsDir: "unused", repoDir: tmpDir, force: true, @@ -286,16 +289,16 @@ Deno.test("extensionInstall: lockfile-anchored checksum mismatch fails with drif * `installExtensionFn`. Fields the stub reads are populated; the rest * are placeholders. */ -function makeStubInstallContext( +async function makeStubInstallContext( tmpDir: string, lockfilePath: string, -): InstallContext { +): Promise { return { // deno-lint-ignore no-explicit-any getExtension: () => Promise.resolve(null as any), downloadArchive: () => Promise.reject(new Error("unused")), getChecksum: () => Promise.resolve(null), - lockfilePath, + lockfileRepository: await LockfileRepository.create(lockfilePath), skillsDir: join(tmpDir, ".swamp/pulled-extensions/skills"), repoDir: tmpDir, force: true, diff --git a/src/libswamp/extensions/layout.ts b/src/libswamp/extensions/layout.ts index d19caefd..f823cffc 100644 --- a/src/libswamp/extensions/layout.ts +++ b/src/libswamp/extensions/layout.ts @@ -18,7 +18,7 @@ // along with Swamp. If not, see . import { SWAMP_DATA_DIR } from "../../infrastructure/persistence/paths.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; /** * Generation of an on-disk extension layout. @@ -229,7 +229,8 @@ export function extractTopLevelRoot( export async function detectLegacyExtensionLayout( lockfilePath: string, ): Promise { - const upstream = await readUpstreamExtensions(lockfilePath); + const repo = await LockfileRepository.create(lockfilePath); + const upstream = repo.getAllEntries(); const legacy: LegacyFileEntry[] = []; for (const [name, entry] of Object.entries(upstream)) { diff --git a/src/libswamp/extensions/list.ts b/src/libswamp/extensions/list.ts index faffeba3..fccd4670 100644 --- a/src/libswamp/extensions/list.ts +++ b/src/libswamp/extensions/list.ts @@ -22,7 +22,7 @@ import { RepoPath } from "../../domain/repo/repo_path.ts"; import { RepoMarkerRepository, } from "../../infrastructure/persistence/repo_marker_repository.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { LibSwampContext } from "../context.ts"; import type { SwampError } from "../errors.ts"; @@ -45,16 +45,13 @@ export type ExtensionListEvent = | { kind: "completed"; data: ExtensionListData } | { kind: "error"; error: SwampError }; -/** Upstream extension entry as stored on disk. */ -interface UpstreamEntry { - version: string; - pulledAt?: string; - files?: string[]; -} - /** Dependencies for the extension list operation. */ export interface ExtensionListDeps { - readUpstreamExtensions: () => Promise>; + /** + * Lockfile repository pre-constructed by the caller. Captures a + * snapshot of upstream_extensions.json at construction. + */ + lockfileRepository: LockfileRepository; } /** Wires real infrastructure into ExtensionListDeps. */ @@ -69,7 +66,7 @@ export async function createExtensionListDeps( const absoluteModelsDir = resolve(repoDir, modelsDir); const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); return { - readUpstreamExtensions: () => readUpstreamExtensions(lockfilePath), + lockfileRepository: await LockfileRepository.create(lockfilePath), }; } @@ -84,7 +81,7 @@ export async function* extensionList( (async function* () { yield { kind: "resolving" }; - const upstreamData = await deps.readUpstreamExtensions(); + const upstreamData = deps.lockfileRepository.getAllEntries(); const entries: ExtensionListEntry[] = Object.entries(upstreamData) .map(([name, entry]) => ({ diff --git a/src/libswamp/extensions/list_test.ts b/src/libswamp/extensions/list_test.ts index 8e333a6b..45ae08a4 100644 --- a/src/libswamp/extensions/list_test.ts +++ b/src/libswamp/extensions/list_test.ts @@ -25,21 +25,23 @@ import { type ExtensionListDeps, type ExtensionListEvent, } from "./list.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; +import type { UpstreamExtensionsMap } from "../../infrastructure/persistence/upstream_extensions.ts"; -function makeDeps( - overrides?: Partial, -): ExtensionListDeps { +function makeDeps(upstream?: UpstreamExtensionsMap): ExtensionListDeps { + const cache: UpstreamExtensionsMap = upstream ?? { + "@ns/beta": { version: "1.0.0", pulledAt: "2026-01-02" }, + "@ns/alpha": { + version: "2.0.0", + pulledAt: "2026-01-01", + files: ["a.ts"], + }, + }; return { - readUpstreamExtensions: () => - Promise.resolve({ - "@ns/beta": { version: "1.0.0", pulledAt: "2026-01-02" }, - "@ns/alpha": { - version: "2.0.0", - pulledAt: "2026-01-01", - files: ["a.ts"], - }, - }), - ...overrides, + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + cache, + ), }; } @@ -62,9 +64,7 @@ Deno.test("extensionList yields sorted extensions", async () => { }); Deno.test("extensionList yields empty list when no extensions", async () => { - const deps = makeDeps({ - readUpstreamExtensions: () => Promise.resolve({}), - }); + const deps = makeDeps({}); const events = await collect( extensionList(createLibSwampContext(), deps), ); diff --git a/src/libswamp/extensions/local_edits.ts b/src/libswamp/extensions/local_edits.ts index 3663aff2..845dc435 100644 --- a/src/libswamp/extensions/local_edits.ts +++ b/src/libswamp/extensions/local_edits.ts @@ -20,7 +20,7 @@ import { join, resolve } from "@std/path"; import { UserError } from "../../domain/errors.ts"; import { readInstalledExtensionDigest } from "../../infrastructure/persistence/installed_extension_digest_reader.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; /** * Tri-state outcome of the local-edits check. @@ -47,8 +47,8 @@ export async function detectLocalEditsForExtension( lockfilePath: string, ): Promise { try { - const upstream = await readUpstreamExtensions(lockfilePath); - const stored = upstream[name]?.filesChecksum; + const repo = await LockfileRepository.create(lockfilePath); + const stored = repo.getEntry(name)?.filesChecksum; if (!stored) return "no-anchor"; const extRoot = join( resolve(repoDir), diff --git a/src/libswamp/extensions/pull.ts b/src/libswamp/extensions/pull.ts index 91740d36..6712d8e1 100644 --- a/src/libswamp/extensions/pull.ts +++ b/src/libswamp/extensions/pull.ts @@ -29,12 +29,8 @@ import { UserError } from "../../domain/errors.ts"; import { parseExtensionManifest } from "../../domain/extensions/extension_manifest.ts"; import { analyzeExtensionSafety } from "../../domain/extensions/extension_safety_analyzer.ts"; import { ExtensionApiClient } from "../../infrastructure/http/extension_api_client.ts"; -import { atomicWriteTextFile } from "../../infrastructure/persistence/atomic_write.ts"; import { pruneOrphanFiles } from "../../infrastructure/persistence/directory_cleanup.ts"; -import { - readUpstreamExtensions, - type UpstreamExtensionsMap, -} from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { bundleNamespace, swampPath, @@ -55,8 +51,6 @@ import { DEFAULT_SWAMP_CLUB_URL } from "../../domain/auth/auth_credentials.ts"; const SCOPED_NAME_PATTERN = /^@[a-z0-9_-]+\/[a-z0-9_-]+(\/[a-z0-9_-]+)*$/; const MAX_DEPENDENCY_DEPTH = 10; -const LOCK_RETRY_COUNT = 10; -const LOCK_RETRY_DELAY_MS = 100; /** Parsed extension reference from CLI argument. */ export interface ExtensionRef { @@ -125,8 +119,23 @@ export interface InstallContext { version: string, ) => Promise; logger?: Logger; - /** Full path to the upstream_extensions.json lockfile. */ - lockfilePath: string; + /** + * Lockfile repository owning read+write of upstream_extensions.json. + * + * **Single-use semantics.** The repository captures a snapshot at + * construction time (per its own JSDoc); reads serve from that snapshot. + * A given InstallContext therefore embeds a snapshot taken at the moment + * the context was constructed. DO NOT reuse the same context across + * multiple install operations — a sibling process or a prior + * installExtension() call may have written between constructions, and + * the snapshot would be stale relative to disk. Construct a fresh + * context per install via `createInstallContext` / + * `createExtensionPullDeps`. Today's installExtension() reads the + * snapshot exactly once at install time (line ~726, formerly + * upstreamMapBefore via readUpstreamExtensions); the migration + * preserves that timing. + */ + lockfileRepository: LockfileRepository; /** Tool-aware skills destination (e.g. `.claude/skills/`). */ skillsDir: string; repoDir: string; @@ -179,8 +188,12 @@ export interface ExtensionPullDeps { getExtension: (name: string) => Promise; downloadArchive: (name: string, version: string) => Promise; getChecksum: (name: string, version: string) => Promise; - /** Full path to the upstream_extensions.json lockfile. */ - lockfilePath: string; + /** + * Lockfile repository owning read+write of upstream_extensions.json. + * See {@link InstallContext.lockfileRepository} for snapshot semantics + * and the single-use rule. + */ + lockfileRepository: LockfileRepository; /** Tool-aware skills destination (e.g. `.claude/skills/`). */ skillsDir: string; repoDir: string; @@ -243,134 +256,6 @@ function isMacOsResourceFork(name: string): boolean { return name.startsWith("._"); } -/** - * Acquires an advisory lockfile. Retries with short backoff. - */ -async function acquireLock(lockPath: string): Promise { - for (let attempt = 0; attempt < LOCK_RETRY_COUNT; attempt++) { - try { - const file = await Deno.open(lockPath, { - create: true, - createNew: true, - write: true, - }); - return file; - } catch (error) { - if (error instanceof Deno.errors.AlreadyExists) { - if (attempt < LOCK_RETRY_COUNT - 1) { - await new Promise((r) => setTimeout(r, LOCK_RETRY_DELAY_MS)); - continue; - } - throw new UserError( - "Could not acquire lock on upstream_extensions.json. Another pull may be in progress. Please retry.", - ); - } - throw error; - } - } - throw new UserError("Could not acquire lock on upstream_extensions.json."); -} - -/** - * Updates upstream_extensions.json with a new entry, using a lockfile - * for concurrency safety and atomicWriteTextFile for crash safety. - * - * @param lockfilePath Full path to the upstream_extensions.json file. - */ -export async function updateUpstreamExtensions( - lockfilePath: string, - name: string, - version: string, - files: string[], - options?: { - include?: string[]; - checksum?: string; - filesChecksum?: string; - serverUrl?: string; - }, -): Promise { - const jsonPath = lockfilePath; - const lockPath = `${jsonPath}.lock`; - - // Ensure parent directory exists (lockfile may be in extensions/models/ - // which doesn't exist in a fresh repo that only has .swamp/) - await Deno.mkdir(dirname(jsonPath), { recursive: true }); - - const lockFile = await acquireLock(lockPath); - try { - let data: UpstreamExtensionsMap = {}; - try { - const content = await Deno.readTextFile(jsonPath); - data = JSON.parse(content) as UpstreamExtensionsMap; - } catch (error) { - if (!(error instanceof Deno.errors.NotFound)) { - throw error; - } - } - - data[name] = { - version, - pulledAt: new Date().toISOString(), - files, - ...(options?.include && options.include.length > 0 - ? { include: options.include } - : {}), - ...(options?.checksum ? { checksum: options.checksum } : {}), - ...(options?.filesChecksum - ? { filesChecksum: options.filesChecksum } - : {}), - ...(options?.serverUrl ? { serverUrl: options.serverUrl } : {}), - }; - - await atomicWriteTextFile(jsonPath, JSON.stringify(data, null, 2) + "\n"); - } finally { - lockFile.close(); - try { - await Deno.remove(lockPath); - } catch { - // Best-effort cleanup - } - } -} - -/** - * Removes an extension entry from upstream_extensions.json, using a lockfile - * for concurrency safety and atomicWriteTextFile for crash safety. - * - * @param lockfilePath Full path to the upstream_extensions.json file. - */ -export async function removeUpstreamExtension( - lockfilePath: string, - name: string, -): Promise { - const jsonPath = lockfilePath; - const lockPath = `${jsonPath}.lock`; - - const lockFile = await acquireLock(lockPath); - try { - let data: UpstreamExtensionsMap = {}; - try { - const content = await Deno.readTextFile(jsonPath); - data = JSON.parse(content) as UpstreamExtensionsMap; - } catch (error) { - if (!(error instanceof Deno.errors.NotFound)) { - throw error; - } - } - - delete data[name]; - - await atomicWriteTextFile(jsonPath, JSON.stringify(data, null, 2) + "\n"); - } finally { - lockFile.close(); - try { - await Deno.remove(lockPath); - } catch { - // Best-effort cleanup - } - } -} - /** * Checks if a file exists at the given path. */ @@ -722,9 +607,11 @@ export async function installExtension( // Snapshot the prior lockfile entry's `files[]` BEFORE extraction. // Used after extraction to compute the orphan diff (paths declared // by the prior version but absent from the new version) and prune - // them. Empty when this is a first-install (no prior entry). - const upstreamMapBefore = await readUpstreamExtensions(ctx.lockfilePath); - const oldFiles = upstreamMapBefore[ref.name]?.files ?? []; + // them. Empty when this is a first-install (no prior entry). The + // lockfile snapshot was captured at InstallContext construction + // (per createInstallContext / createExtensionPullDeps); callers MUST + // construct a fresh context per install (see InstallContext JSDoc). + const oldFiles = ctx.lockfileRepository.getEntry(ref.name)?.files ?? []; const extInfo = await ctx.getExtension(ref.name); if (!extInfo) { @@ -1140,18 +1027,16 @@ export async function installExtension( // Prune orphans: paths declared by the prior version's lockfile // entry that are NOT in the new version's extractedFiles[]. Done - // BEFORE updateUpstreamExtensions writes the new entry so a kill - // mid-prune leaves the lockfile pointing at the OLD version — the - // next install retries the diff. The inverse ordering (write then - // prune) would orphan paths the lockfile can't see if the prune - // never runs. + // BEFORE writeEntry persists the new entry so a kill mid-prune + // leaves the lockfile pointing at the OLD version — the next install + // retries the diff. The inverse ordering (write then prune) would + // orphan paths the lockfile can't see if the prune never runs. const orphanDiff = computeOrphanDiff(oldFiles, extractedFiles); const pruned = orphanDiff.length > 0 ? await pruneOrphanFiles(orphanDiff, repoDir) : []; - await updateUpstreamExtensions( - ctx.lockfilePath, + await ctx.lockfileRepository.writeEntry( ref.name, version, extractedFiles, @@ -1170,18 +1055,11 @@ export async function installExtension( continue; } - let isInstalled = false; - try { - const upstreamContent = await Deno.readTextFile(ctx.lockfilePath); - const upstream = JSON.parse( - upstreamContent, - ) as UpstreamExtensionsMap; - if (upstream[dep]) { - isInstalled = true; - } - } catch { - // File may not exist yet - } + // ctx.lockfileRepository reflects writes the parent install has + // made — writeEntry updates the cache on every commit, and child + // installs in this loop reuse the same repository instance, so + // their writes are visible too. + const isInstalled = ctx.lockfileRepository.getEntry(dep) !== null; if (!isInstalled) { const depRef = parseExtensionRef(dep); @@ -1239,7 +1117,7 @@ export async function* extensionPull( downloadArchive: deps.downloadArchive, getChecksum: deps.getChecksum, logger: ctx.logger, - lockfilePath: deps.lockfilePath, + lockfileRepository: deps.lockfileRepository, skillsDir: deps.skillsDir, repoDir: deps.repoDir, force: input.force, @@ -1264,19 +1142,26 @@ export async function* extensionPull( ); } -/** Wires real infrastructure into ExtensionPullDeps. */ -export function createExtensionPullDeps( +/** + * Wires real infrastructure into ExtensionPullDeps. Constructs a fresh + * {@link LockfileRepository} that captures a snapshot at this moment — + * the returned deps object is therefore single-use per the + * {@link InstallContext.lockfileRepository} JSDoc. Construct fresh deps + * per install operation; do not reuse across multiple installs. + */ +export async function createExtensionPullDeps( serverUrl: string, lockfilePath: string, skillsDir: string, repoDir: string, -): ExtensionPullDeps { +): Promise { const client = new ExtensionApiClient(serverUrl); + const lockfileRepository = await LockfileRepository.create(lockfilePath); return { getExtension: (name) => client.getExtension(name), downloadArchive: (name, version) => client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), - lockfilePath, + lockfileRepository, skillsDir, repoDir, alreadyPulled: new Set(), @@ -1284,8 +1169,14 @@ export function createExtensionPullDeps( }; } -/** Creates an InstallContext from an ExtensionApiClient (for extension_update compatibility). */ -export function createInstallContext( +/** + * Creates an InstallContext from an ExtensionApiClient (for + * extension_update compatibility). Like {@link createExtensionPullDeps}, + * constructs a fresh snapshot-captured {@link LockfileRepository}; the + * returned context is single-use per the + * {@link InstallContext.lockfileRepository} JSDoc. + */ +export async function createInstallContext( serverUrl: string, opts: { lockfilePath: string; @@ -1294,14 +1185,15 @@ export function createInstallContext( force: boolean; logger?: Logger; }, -): InstallContext { +): Promise { const client = new ExtensionApiClient(serverUrl); + const lockfileRepository = await LockfileRepository.create(opts.lockfilePath); return { getExtension: (name) => client.getExtension(name), downloadArchive: (name, version) => client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), logger: opts.logger, - lockfilePath: opts.lockfilePath, + lockfileRepository, skillsDir: opts.skillsDir, repoDir: opts.repoDir, force: opts.force, diff --git a/src/libswamp/extensions/pull_test.ts b/src/libswamp/extensions/pull_test.ts index 974d8747..65eb65b5 100644 --- a/src/libswamp/extensions/pull_test.ts +++ b/src/libswamp/extensions/pull_test.ts @@ -23,9 +23,9 @@ import { join } from "@std/path"; import { computeOrphanDiff, parseExtensionRef, - updateUpstreamExtensions, validateExtensionName, } from "./pull.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import { UserError } from "../../domain/errors.ts"; Deno.test("parseExtensionRef: parses name without version", () => { @@ -81,13 +81,12 @@ Deno.test("validateExtensionName: rejects invalid names", () => { ); }); -Deno.test("updateUpstreamExtensions: writes and updates entries", async () => { +Deno.test("LockfileRepository.writeEntry: writes and updates entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { const lockfilePath = join(tmpDir, "upstream_extensions.json"); - await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ - "a.yaml", - ]); + const repo = await LockfileRepository.create(lockfilePath); + await repo.writeEntry("@test/first", "1.0.0", ["a.yaml"]); const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content); diff --git a/src/libswamp/extensions/rm.ts b/src/libswamp/extensions/rm.ts index a5903b15..9b644e29 100644 --- a/src/libswamp/extensions/rm.ts +++ b/src/libswamp/extensions/rm.ts @@ -18,27 +18,15 @@ // along with Swamp. If not, see . import { dirname, join, resolve } from "@std/path"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; +import type { UpstreamExtensionsMap } from "../../infrastructure/persistence/upstream_extensions.ts"; import { parseExtensionManifest } from "../../domain/extensions/extension_manifest.ts"; -import { atomicWriteTextFile } from "../../infrastructure/persistence/atomic_write.ts"; import { UserError } from "../../domain/errors.ts"; import type { LibSwampContext } from "../context.ts"; import type { SwampError } from "../errors.ts"; import { notFound } from "../errors.ts"; import { withGeneratorSpan } from "../../infrastructure/tracing/mod.ts"; -const LOCK_RETRY_COUNT = 10; -const LOCK_RETRY_DELAY_MS = 100; - -/** Upstream extension entry for rm operations. */ -export interface UpstreamEntry { - version: string; - pulledAt: string; - files?: string[]; -} - -/** Map of extension name to upstream entry. */ -export type UpstreamMap = Record; /** Preview data returned before confirmation. */ export interface ExtensionRmPreview { @@ -69,21 +57,20 @@ export interface ExtensionRmInput { /** Dependencies for the extension rm operation. */ export interface ExtensionRmDeps { - readUpstreamExtensions: (lockfilePath: string) => Promise; findDependents: ( repoDir: string, - upstreamData: UpstreamMap, + upstreamData: UpstreamExtensionsMap, targetName: string, ) => Promise; removeFile: (path: string) => Promise; readDirEntries: (path: string) => Promise; removeDir: (path: string) => Promise; - removeUpstreamExtension: ( - lockfilePath: string, - name: string, - ) => Promise; - /** Full path to the upstream_extensions.json lockfile. */ - lockfilePath: string; + /** + * Lockfile repository owning read+write of upstream_extensions.json. + * Captures a snapshot at construction (per its own JSDoc); construct + * fresh deps per rm operation via {@link createExtensionRmDeps}. + */ + lockfileRepository: LockfileRepository; repoDir: string; } @@ -93,7 +80,7 @@ export interface ExtensionRmDeps { */ export async function findDependents( repoDir: string, - upstreamData: UpstreamMap, + upstreamData: UpstreamExtensionsMap, targetName: string, ): Promise { const dependents: string[] = []; @@ -155,73 +142,6 @@ async function pruneEmptyDirs( return removed; } -/** - * Acquires an advisory lockfile. Retries with short backoff. - * Returns a cleanup function to release the lock. - */ -async function acquireLock(lockPath: string): Promise { - for (let attempt = 0; attempt < LOCK_RETRY_COUNT; attempt++) { - try { - const file = await Deno.open(lockPath, { - create: true, - createNew: true, - write: true, - }); - return file; - } catch (error) { - if (error instanceof Deno.errors.AlreadyExists) { - if (attempt < LOCK_RETRY_COUNT - 1) { - await new Promise((r) => setTimeout(r, LOCK_RETRY_DELAY_MS)); - continue; - } - throw new Error( - "Could not acquire lock on upstream_extensions.json. Another operation may be in progress.", - ); - } - throw error; - } - } - throw new Error("Could not acquire lock on upstream_extensions.json."); -} - -/** - * Removes an extension entry from upstream_extensions.json, using a lockfile - * for concurrency safety and atomicWriteTextFile for crash safety. - * - * @param lockfilePath Full path to the upstream_extensions.json file. - */ -export async function removeUpstreamExtension( - lockfilePath: string, - name: string, -): Promise { - const jsonPath = lockfilePath; - const lockPath = `${jsonPath}.lock`; - - const lockFile = await acquireLock(lockPath); - try { - let data: UpstreamMap = {}; - try { - const content = await Deno.readTextFile(jsonPath); - data = JSON.parse(content) as UpstreamMap; - } catch (error) { - if (!(error instanceof Deno.errors.NotFound)) { - throw error; - } - } - - delete data[name]; - - await atomicWriteTextFile(jsonPath, JSON.stringify(data, null, 2) + "\n"); - } finally { - lockFile.close(); - try { - await Deno.remove(lockPath); - } catch { - // Best-effort cleanup - } - } -} - /** Gathers preview info for the extension rm operation. */ export async function extensionRmPreview( ctx: LibSwampContext, @@ -230,7 +150,7 @@ export async function extensionRmPreview( ): Promise { ctx.logger.debug`Looking up extension: ${input.extensionName}`; - const upstreamData = await deps.readUpstreamExtensions(deps.lockfilePath); + const upstreamData = deps.lockfileRepository.getAllEntries(); const entry = upstreamData[input.extensionName]; if (!entry) { @@ -271,8 +191,7 @@ export async function* extensionRm( (async function* () { yield { kind: "deleting" }; - const upstreamData = await deps.readUpstreamExtensions(deps.lockfilePath); - const entry = upstreamData[input.extensionName]; + const entry = deps.lockfileRepository.getEntry(input.extensionName); if (!entry || !entry.files) { yield { @@ -307,10 +226,7 @@ export async function* extensionRm( const dirsRemoved = await pruneEmptyDirs(parentDirs, deps.repoDir, deps); - await deps.removeUpstreamExtension( - deps.lockfilePath, - input.extensionName, - ); + await deps.lockfileRepository.removeEntry(input.extensionName); yield { kind: "completed", @@ -326,13 +242,18 @@ export async function* extensionRm( ); } -/** Wires real infrastructure into ExtensionRmDeps. */ -export function createExtensionRmDeps( +/** + * Wires real infrastructure into ExtensionRmDeps. Constructs a fresh + * {@link LockfileRepository} that captures a snapshot at this moment; + * the returned deps object is single-use per the + * {@link ExtensionRmDeps.lockfileRepository} JSDoc. + */ +export async function createExtensionRmDeps( repoDir: string, lockfilePath: string, -): ExtensionRmDeps { +): Promise { + const lockfileRepository = await LockfileRepository.create(lockfilePath); return { - readUpstreamExtensions, findDependents, removeFile: async (path: string) => { const stat = await Deno.stat(path); @@ -346,8 +267,7 @@ export function createExtensionRmDeps( return entries; }, removeDir: (path: string) => Deno.remove(path), - removeUpstreamExtension, - lockfilePath, + lockfileRepository, repoDir, }; } diff --git a/src/libswamp/extensions/rm_test.ts b/src/libswamp/extensions/rm_test.ts index 1f45ab65..24c65f20 100644 --- a/src/libswamp/extensions/rm_test.ts +++ b/src/libswamp/extensions/rm_test.ts @@ -26,103 +26,148 @@ import { type ExtensionRmDeps, type ExtensionRmEvent, extensionRmPreview, - type UpstreamMap, } from "./rm.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; +import type { UpstreamExtensionsMap } from "../../infrastructure/persistence/upstream_extensions.ts"; import { UserError } from "../../domain/errors.ts"; function fakeCtx() { return createLibSwampContext(); } -function fakeDeps( - overrides: Partial = {}, -): ExtensionRmDeps { - const defaultUpstream: UpstreamMap = { - "@test/ext": { - version: "1.0.0", - pulledAt: "2026-01-01T00:00:00Z", - files: ["models/ext/model.yaml", "models/ext/model.ts"], +const DEFAULT_UPSTREAM: UpstreamExtensionsMap = { + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: ["models/ext/model.yaml", "models/ext/model.ts"], + }, +}; + +/** + * Spins up a real temp lockfile pre-seeded with `upstream`, returns a + * LockfileRepository whose writeEntry/removeEntry hit that real file. + * Caller cleans up via the returned `cleanup` fn. + */ +async function withFakeLockfile( + upstream: UpstreamExtensionsMap, +): Promise< + { lockfileRepository: LockfileRepository; cleanup: () => Promise } +> { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_rm_test_" }); + const lockfilePath = `${tmpDir}/upstream_extensions.json`; + if (Object.keys(upstream).length > 0) { + await Deno.writeTextFile(lockfilePath, JSON.stringify(upstream, null, 2)); + } + const lockfileRepository = await LockfileRepository.create(lockfilePath); + return { + lockfileRepository, + cleanup: async () => { + if (Deno.build.os === "windows") { + await Deno.remove(tmpDir, { recursive: true }).catch(() => {}); + } else { + await Deno.remove(tmpDir, { recursive: true }); + } }, }; +} + +async function fakeDeps( + overrides: Partial & { + upstream?: UpstreamExtensionsMap; + } = {}, +): Promise<{ deps: ExtensionRmDeps; cleanup: () => Promise }> { + const { upstream, ...rest } = overrides; + const { lockfileRepository, cleanup } = await withFakeLockfile( + upstream ?? DEFAULT_UPSTREAM, + ); return { - readUpstreamExtensions: () => Promise.resolve(defaultUpstream), - findDependents: () => Promise.resolve([]), - removeFile: () => Promise.resolve(), - readDirEntries: () => Promise.resolve([]), - removeDir: () => Promise.resolve(), - removeUpstreamExtension: () => Promise.resolve(), - lockfilePath: "/fake/models/upstream_extensions.json", - repoDir: "/fake/repo", - ...overrides, + deps: { + findDependents: () => Promise.resolve([]), + removeFile: () => Promise.resolve(), + readDirEntries: () => Promise.resolve([]), + removeDir: () => Promise.resolve(), + lockfileRepository, + repoDir: "/fake/repo", + ...rest, + }, + cleanup, }; } Deno.test("extensionRmPreview: returns preview for installed extension", async () => { const ctx = fakeCtx(); - const deps = fakeDeps(); - - const preview = await extensionRmPreview(ctx, deps, { - extensionName: "@test/ext", - }); + const { deps, cleanup } = await fakeDeps(); + try { + const preview = await extensionRmPreview(ctx, deps, { + extensionName: "@test/ext", + }); - assertEquals(preview.name, "@test/ext"); - assertEquals(preview.version, "1.0.0"); - assertEquals(preview.fileCount, 2); - assertEquals(preview.dependents, []); + assertEquals(preview.name, "@test/ext"); + assertEquals(preview.version, "1.0.0"); + assertEquals(preview.fileCount, 2); + assertEquals(preview.dependents, []); + } finally { + await cleanup(); + } }); Deno.test("extensionRmPreview: includes dependents", async () => { const ctx = fakeCtx(); - const deps = fakeDeps({ + const { deps, cleanup } = await fakeDeps({ findDependents: () => Promise.resolve(["@test/other"]), }); + try { + const preview = await extensionRmPreview(ctx, deps, { + extensionName: "@test/ext", + }); - const preview = await extensionRmPreview(ctx, deps, { - extensionName: "@test/ext", - }); - - assertEquals(preview.dependents, ["@test/other"]); + assertEquals(preview.dependents, ["@test/other"]); + } finally { + await cleanup(); + } }); Deno.test("extensionRmPreview: throws not_found for missing extension", async () => { const ctx = fakeCtx(); - const deps = fakeDeps({ - readUpstreamExtensions: () => Promise.resolve({}), - }); - - await assertRejects( - () => extensionRmPreview(ctx, deps, { extensionName: "@test/missing" }), - UserError, - "is not installed", - ); + const { deps, cleanup } = await fakeDeps({ upstream: {} }); + try { + await assertRejects( + () => extensionRmPreview(ctx, deps, { extensionName: "@test/missing" }), + UserError, + "is not installed", + ); + } finally { + await cleanup(); + } }); Deno.test("extensionRmPreview: throws validation_failed when no file tracking", async () => { const ctx = fakeCtx(); - const deps = fakeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ - "@test/ext": { - version: "1.0.0", - pulledAt: "2026-01-01T00:00:00Z", - }, - }), + const { deps, cleanup } = await fakeDeps({ + upstream: { + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + }, + }, }); - - await assertRejects( - () => extensionRmPreview(ctx, deps, { extensionName: "@test/ext" }), - UserError, - "file tracking", - ); + try { + await assertRejects( + () => extensionRmPreview(ctx, deps, { extensionName: "@test/ext" }), + UserError, + "file tracking", + ); + } finally { + await cleanup(); + } }); Deno.test("extensionRm: deletes files and yields completed", async () => { const removedFiles: string[] = []; - let upstreamRemoved = false; const ctx = fakeCtx(); - const deps = fakeDeps({ + const { deps, cleanup } = await fakeDeps({ removeFile: (path: string) => { removedFiles.push(path); return Promise.resolve(); @@ -137,76 +182,84 @@ Deno.test("extensionRm: deletes files and yields completed", async () => { isSymlink: false, }, ]), - removeUpstreamExtension: () => { - upstreamRemoved = true; - return Promise.resolve(); - }, }); - - await assertCompletes( - extensionRm(ctx, deps, { extensionName: "@test/ext" }), - { - kind: "completed", - data: { - name: "@test/ext", - version: "1.0.0", - filesDeleted: 2, - filesSkipped: 0, - dirsRemoved: 0, + try { + await assertCompletes( + extensionRm(ctx, deps, { extensionName: "@test/ext" }), + { + kind: "completed", + data: { + name: "@test/ext", + version: "1.0.0", + filesDeleted: 2, + filesSkipped: 0, + dirsRemoved: 0, + }, }, - }, - ); + ); - assertEquals(removedFiles.length, 2); - assertEquals(upstreamRemoved, true); + assertEquals(removedFiles.length, 2); + // After completion the entry must be gone (writeEntry / removeEntry + // update the cache in lockstep with disk). + assertEquals(deps.lockfileRepository.getEntry("@test/ext"), null); + } finally { + await cleanup(); + } }); Deno.test("extensionRm: counts skipped files when NotFound", async () => { const ctx = fakeCtx(); - const deps = fakeDeps({ + const { deps, cleanup } = await fakeDeps({ removeFile: () => { throw new Deno.errors.NotFound("not found"); }, }); - - await assertCompletes( - extensionRm(ctx, deps, { extensionName: "@test/ext" }), - { - kind: "completed", - data: { - name: "@test/ext", - version: "1.0.0", - filesDeleted: 0, - filesSkipped: 2, - dirsRemoved: 0, + try { + await assertCompletes( + extensionRm(ctx, deps, { extensionName: "@test/ext" }), + { + kind: "completed", + data: { + name: "@test/ext", + version: "1.0.0", + filesDeleted: 0, + filesSkipped: 2, + dirsRemoved: 0, + }, }, - }, - ); + ); + } finally { + await cleanup(); + } }); Deno.test("extensionRm: yields error for missing extension", async () => { const ctx = fakeCtx(); - const deps = fakeDeps({ - readUpstreamExtensions: () => Promise.resolve({}), - }); - - await assertErrors( - extensionRm(ctx, deps, { extensionName: "@test/missing" }), - "not_found", - ); + const { deps, cleanup } = await fakeDeps({ upstream: {} }); + try { + await assertErrors( + extensionRm(ctx, deps, { extensionName: "@test/missing" }), + "not_found", + ); + } finally { + await cleanup(); + } }); Deno.test("extensionRm: events include deleting then completed", async () => { const ctx = fakeCtx(); - const deps = fakeDeps(); - - const events = await collect( - extensionRm(ctx, deps, { extensionName: "@test/ext" }), - ); + const { deps, cleanup } = await fakeDeps(); + try { + const events = await collect( + extensionRm(ctx, deps, { extensionName: "@test/ext" }), + ); - assertEquals(events.length, 2); - assertEquals(events[0].kind, "deleting"); - assertEquals(events[1].kind, "completed"); + assertEquals(events.length, 2); + assertEquals(events[0].kind, "deleting"); + assertEquals(events[1].kind, "completed"); + } finally { + await cleanup(); + } }); // --- issue 120 regression coverage --- @@ -270,7 +323,7 @@ Deno.test("extensionRm: removing one sibling leaves the other intact under a sha }); const ctx = createLibSwampContext({}); - const deps = createExtensionRmDeps(tmpDir, lockfilePath); + const deps = await createExtensionRmDeps(tmpDir, lockfilePath); const events = await collect( extensionRm(ctx, deps, { extensionName: "@swamp/aws/ec2" }), ); @@ -372,7 +425,7 @@ Deno.test("extensionRmPreview: resolves dependents via the tracked per-extension }); const ctx = createLibSwampContext({}); - const deps = createExtensionRmDeps(tmpDir, lockfilePath); + const deps = await createExtensionRmDeps(tmpDir, lockfilePath); const preview = await extensionRmPreview(ctx, deps, { extensionName: "@fake/base", }); diff --git a/src/libswamp/extensions/update.ts b/src/libswamp/extensions/update.ts index 60f5991c..c5b6b25f 100644 --- a/src/libswamp/extensions/update.ts +++ b/src/libswamp/extensions/update.ts @@ -24,7 +24,7 @@ import { type ExtensionUpdateStatus, } from "../../domain/extensions/extension_update_service.ts"; import { ExtensionApiClient } from "../../infrastructure/http/extension_api_client.ts"; -import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; import type { LibSwampContext } from "../context.ts"; import type { SwampError } from "../errors.ts"; import { validationFailed } from "../errors.ts"; @@ -62,10 +62,12 @@ export interface ExtensionUpdateInput { /** Dependencies for the extension update operation. */ export interface ExtensionUpdateDeps { - /** Read installed upstream extensions. Returns map of name -> { version }. */ - readUpstreamExtensions: () => Promise< - Record - >; + /** + * Lockfile repository — the snapshot of installed extensions to + * evaluate for updates. Captures upstream_extensions.json at + * construction. + */ + lockfileRepository: LockfileRepository; /** Get extension info from registry (latest version). */ getExtension: ( name: string, @@ -83,19 +85,19 @@ export interface ExtensionUpdateDeps { } /** Wires real infrastructure into ExtensionUpdateDeps. */ -export function createExtensionUpdateDeps(options: { +export async function createExtensionUpdateDeps(options: { lockfilePath: string; serverUrl?: string; installExtension: ( name: string, version: string, ) => Promise; -}): ExtensionUpdateDeps { +}): Promise { const extensionClient = new ExtensionApiClient( options.serverUrl ?? resolveServerUrl(), ); return { - readUpstreamExtensions: () => readUpstreamExtensions(options.lockfilePath), + lockfileRepository: await LockfileRepository.create(options.lockfilePath), getExtension: async (name) => { try { const info = await extensionClient.getExtension(name); @@ -121,7 +123,7 @@ export async function* extensionUpdate( (async function* () { ctx.logger.debug`Executing extension update`; - const upstream = await deps.readUpstreamExtensions(); + const upstream = deps.lockfileRepository.getAllEntries(); const installedNames = Object.keys(upstream); if (installedNames.length === 0) { diff --git a/src/libswamp/extensions/update_test.ts b/src/libswamp/extensions/update_test.ts index 6e9adebb..9360893b 100644 --- a/src/libswamp/extensions/update_test.ts +++ b/src/libswamp/extensions/update_test.ts @@ -27,15 +27,38 @@ import { type ExtensionUpdateEvent, type ExtensionUpdateInput, } from "./update.ts"; +import { LockfileRepository } from "../../infrastructure/persistence/lockfile_repository.ts"; +import type { UpstreamExtensionsMap } from "../../infrastructure/persistence/upstream_extensions.ts"; + +/** + * Builds a fixture UpstreamExtensionsMap from a shorthand + * `{ name: version }` map, synthesizing a placeholder pulledAt so each + * test doesn't have to spell out the full entry shape. + */ +function shorthandUpstream( + versions: Readonly>, +): UpstreamExtensionsMap { + const map: UpstreamExtensionsMap = {}; + for (const [name, version] of Object.entries(versions)) { + map[name] = { version, pulledAt: "1970-01-01T00:00:00.000Z" }; + } + return map; +} function makeDeps( - overrides: Partial = {}, + overrides: Partial & { + upstream?: Record; + } = {}, ): ExtensionUpdateDeps { + const { upstream, ...rest } = overrides; return { - readUpstreamExtensions: () => Promise.resolve({}), + lockfileRepository: new LockfileRepository( + "/test/repo/upstream_extensions.json", + shorthandUpstream(upstream ?? {}), + ), getExtension: () => Promise.resolve(null), installExtension: () => Promise.resolve(undefined), - ...overrides, + ...rest, }; } @@ -71,10 +94,7 @@ Deno.test("extensionUpdate: result() resolves on empty installation", async () = }); Deno.test("extensionUpdate: specific extension not installed yields error", async () => { - const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/other": { version: "2026.01.01.1" } }), - }); + const deps = makeDeps({ upstream: { "@ns/other": "2026.01.01.1" } }); const input: ExtensionUpdateInput = { extensionName: "@ns/missing", checkOnly: false, @@ -94,11 +114,10 @@ Deno.test("extensionUpdate: specific extension not installed yields error", asyn Deno.test("extensionUpdate: check mode with updates available", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ - "@ns/a": { version: "2026.01.01.1" }, - "@ns/b": { version: "2026.02.01.1" }, - }), + upstream: { + "@ns/a": "2026.01.01.1", + "@ns/b": "2026.02.01.1", + }, getExtension: (name) => { if (name === "@ns/a") { return Promise.resolve({ latestVersion: "2026.03.01.1" }); @@ -127,8 +146,7 @@ Deno.test("extensionUpdate: check mode with updates available", async () => { Deno.test("extensionUpdate: check mode all up to date", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.02.01.1" } }), + upstream: { "@ns/a": "2026.02.01.1" }, getExtension: () => Promise.resolve({ latestVersion: "2026.02.01.1" }), }); const input: ExtensionUpdateInput = { checkOnly: true }; @@ -148,8 +166,7 @@ Deno.test("extensionUpdate: check mode all up to date", async () => { Deno.test("extensionUpdate: update mode successfully updates", async () => { const installed: string[] = []; const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.01.01.1" } }), + upstream: { "@ns/a": "2026.01.01.1" }, getExtension: () => Promise.resolve({ latestVersion: "2026.03.01.1" }), installExtension: (name, version) => { installed.push(`${name}@${version}`); @@ -181,8 +198,7 @@ Deno.test("extensionUpdate: update mode successfully updates", async () => { Deno.test("extensionUpdate: update mode with install failure", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.01.01.1" } }), + upstream: { "@ns/a": "2026.01.01.1" }, getExtension: () => Promise.resolve({ latestVersion: "2026.03.01.1" }), installExtension: () => { return Promise.reject(new Error("Network timeout")); @@ -204,8 +220,7 @@ Deno.test("extensionUpdate: update mode with install failure", async () => { Deno.test("extensionUpdate: registry fetch failure records not_found", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.01.01.1" } }), + upstream: { "@ns/a": "2026.01.01.1" }, getExtension: () => Promise.resolve(null), }); const input: ExtensionUpdateInput = { checkOnly: true }; @@ -251,8 +266,7 @@ Deno.test( "extensionUpdate: emits orphans-pruned event when installExtension returns pruned paths", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.01.01.1" } }), + upstream: { "@ns/a": "2026.01.01.1" }, getExtension: () => Promise.resolve({ latestVersion: "2026.03.01.1" }), installExtension: (name, version) => Promise.resolve( @@ -288,8 +302,7 @@ Deno.test( "extensionUpdate: NO orphans-pruned event when result has empty pruned list", async () => { const deps = makeDeps({ - readUpstreamExtensions: () => - Promise.resolve({ "@ns/a": { version: "2026.01.01.1" } }), + upstream: { "@ns/a": "2026.01.01.1" }, getExtension: () => Promise.resolve({ latestVersion: "2026.03.01.1" }), installExtension: (name, version) => Promise.resolve(buildInstallResult(name, version, [])), diff --git a/src/libswamp/mod.ts b/src/libswamp/mod.ts index 8d58c833..3515729c 100644 --- a/src/libswamp/mod.ts +++ b/src/libswamp/mod.ts @@ -648,7 +648,6 @@ export { type InstallResult, parseExtensionRef, resolveServerUrl, - updateUpstreamExtensions, validateExtensionName, } from "./extensions/pull.ts"; export { @@ -660,9 +659,11 @@ export { type ExtensionRmInput, type ExtensionRmPreview, extensionRmPreview, - removeUpstreamExtension, } from "./extensions/rm.ts"; +// Lockfile repository — sole gateway for upstream_extensions.json. +export { LockfileRepository } from "../infrastructure/persistence/lockfile_repository.ts"; + // Extension layout detection export { classifyExtensionFile,