diff --git a/openspec/changes/feat-task-caching/design.md b/openspec/changes/archive/2026-04-03-feat-task-caching/design.md similarity index 100% rename from openspec/changes/feat-task-caching/design.md rename to openspec/changes/archive/2026-04-03-feat-task-caching/design.md diff --git a/openspec/changes/feat-task-caching/proposal.md b/openspec/changes/archive/2026-04-03-feat-task-caching/proposal.md similarity index 100% rename from openspec/changes/feat-task-caching/proposal.md rename to openspec/changes/archive/2026-04-03-feat-task-caching/proposal.md diff --git a/openspec/changes/feat-task-caching/specs/task-runner/spec.md b/openspec/changes/archive/2026-04-03-feat-task-caching/specs/task-runner/spec.md similarity index 100% rename from openspec/changes/feat-task-caching/specs/task-runner/spec.md rename to openspec/changes/archive/2026-04-03-feat-task-caching/specs/task-runner/spec.md diff --git a/openspec/changes/feat-task-caching/tasks.md b/openspec/changes/archive/2026-04-03-feat-task-caching/tasks.md similarity index 71% rename from openspec/changes/feat-task-caching/tasks.md rename to openspec/changes/archive/2026-04-03-feat-task-caching/tasks.md index 9afb30a..56947b8 100644 --- a/openspec/changes/feat-task-caching/tasks.md +++ b/openspec/changes/archive/2026-04-03-feat-task-caching/tasks.md @@ -1,12 +1,12 @@ ## 1. Implementation -- [ ] 1.1 Define `ICacheProvider` interface in `src/contracts/ICacheProvider.ts` with `get(key)`, `set(key, result, ttl)`, and `delete(key)` methods. -- [ ] 1.2 Implement `MemoryCacheProvider` in `src/utils/MemoryCacheProvider.ts` as the default in-memory cache implementation. -- [ ] 1.3 Update `TaskStep` interface in `src/TaskStep.ts` to include optional `cache` configuration: +- [x] 1.1 Define `ICacheProvider` interface in `src/contracts/ICacheProvider.ts` with `get(key)`, `set(key, result, ttl)`, and `delete(key)` methods. +- [x] 1.2 Implement `MemoryCacheProvider` in `src/utils/MemoryCacheProvider.ts` as the default in-memory cache implementation. +- [x] 1.3 Update `TaskStep` interface in `src/TaskStep.ts` to include optional `cache` configuration: - `key`: `(context: TContext) => string | Promise` - `ttl`: `number` (optional, default to infinite) - `restore`: `(context: TContext, cachedResult: TaskResult) => void | Promise` (optional, to re-apply context side effects) -- [ ] 1.4 Create `CachingExecutionStrategy` in `src/strategies/CachingExecutionStrategy.ts`. +- [x] 1.4 Create `CachingExecutionStrategy` in `src/strategies/CachingExecutionStrategy.ts`. - It should implement `IExecutionStrategy`. - It should accept an inner `IExecutionStrategy` and an `ICacheProvider`. - In `execute`: @@ -18,7 +18,7 @@ - Execute inner strategy. - If successful, store result in cache provider using `ttl`. - Return result. -- [ ] 1.5 Update `TaskRunner.ts` to support configuring the cache provider and wrapping the execution strategy with `CachingExecutionStrategy` if caching is enabled. -- [ ] 1.6 Add unit tests for `MemoryCacheProvider`. -- [ ] 1.7 Add unit tests for `CachingExecutionStrategy`, verifying cache hits, misses, and restoration of context. -- [ ] 1.8 Add integration tests in `tests/TaskRunnerCaching.test.ts` to verify end-to-end caching behavior with context updates. +- [x] 1.5 Update `TaskRunner.ts` to support configuring the cache provider and wrapping the execution strategy with `CachingExecutionStrategy` if caching is enabled. +- [x] 1.6 Add unit tests for `MemoryCacheProvider`. +- [x] 1.7 Add unit tests for `CachingExecutionStrategy`, verifying cache hits, misses, and restoration of context. +- [x] 1.8 Add integration tests in `tests/TaskRunnerCaching.test.ts` to verify end-to-end caching behavior with context updates. diff --git a/openspec/specs/task-runner/spec.md b/openspec/specs/task-runner/spec.md index 83cc950..f5b3adf 100644 --- a/openspec/specs/task-runner/spec.md +++ b/openspec/specs/task-runner/spec.md @@ -172,3 +172,61 @@ The system SHALL record timing metrics for each executed task, including start t #### Scenario: Failed execution - **WHEN** a task fails - **THEN** the task result contains the start timestamp, end timestamp, and duration in milliseconds +## ADDED Requirements + +### Requirement: Task Caching Configuration + +The `TaskStep` interface SHALL support an optional `cache` property of type `TaskCacheConfig`. + +#### Scenario: Cache Config Structure + +- **GIVEN** a `TaskCacheConfig` object +- **THEN** it SHALL support: + - `key`: A function returning a unique string key based on the context. + - `ttl`: Optional time-to-live in milliseconds. + - `restore`: Optional function to restore context side effects from a cached result. + +### Requirement: Caching Execution Strategy + +The system SHALL provide a `CachingExecutionStrategy` that implements `IExecutionStrategy` and wraps another `IExecutionStrategy`. + +#### Scenario: Cache Miss Execution + +- **WHEN** the `CachingExecutionStrategy` executes a task with a cache key that is NOT present in the cache provider +- **THEN** it SHALL execute the task using the inner strategy. +- **AND** it SHALL store the result in the cache provider if execution is successful. +- **AND** it SHALL return the result. + +#### Scenario: Cache Hit Execution + +- **WHEN** the `CachingExecutionStrategy` executes a task with a cache key that IS present in the cache provider +- **THEN** it SHALL NOT execute the inner strategy. +- **AND** it SHALL invoke the `restore` function (if provided) with the current context and the cached result. +- **AND** it SHALL return the cached result. + +#### Scenario: Cache Expiration + +- **WHEN** a cached item's TTL has expired +- **THEN** the cache provider SHALL NOT return the item. +- **AND** the strategy SHALL proceed as a cache miss. + +### Requirement: Cache Provider Interface + +The system SHALL define an `ICacheProvider` interface for pluggable caching backends. + +#### Scenario: Interface Methods + +- **GIVEN** an `ICacheProvider` implementation +- **THEN** it SHALL support: + - `get(key: string): Promise` + - `set(key: string, value: TaskResult, ttl?: number): Promise` + - `delete(key: string): Promise` + +### Requirement: Default Memory Cache + +The system SHALL provide a `MemoryCacheProvider` as the default implementation of `ICacheProvider`. + +#### Scenario: In-Memory Storage + +- **WHEN** items are set in `MemoryCacheProvider` +- **THEN** they are stored in memory and retrieved correctly until process termination or expiration. diff --git a/src/TaskRunner.ts b/src/TaskRunner.ts index 63ba44e..e121345 100644 --- a/src/TaskRunner.ts +++ b/src/TaskRunner.ts @@ -17,6 +17,8 @@ import { RetryingExecutionStrategy } from "./strategies/RetryingExecutionStrateg import { Plugin } from "./contracts/Plugin.js"; import { PluginManager } from "./PluginManager.js"; import { DryRunExecutionStrategy } from "./strategies/DryRunExecutionStrategy.js"; +import { ICacheProvider } from "./contracts/ICacheProvider.js"; +import { CachingExecutionStrategy } from "./strategies/CachingExecutionStrategy.js"; const MERMAID_ID_REGEX = /[^a-zA-Z0-9_-]/g; @@ -32,6 +34,7 @@ export class TaskRunner { new RetryingExecutionStrategy(new StandardExecutionStrategy()); private readonly pluginManager: PluginManager; + private cacheProvider?: ICacheProvider; /** * @param context The shared context object to be passed to each task. @@ -84,6 +87,16 @@ export class TaskRunner { return this; } + /** + * Sets the cache provider for task caching. + * @param provider The cache provider. + * @returns The TaskRunner instance for chaining. + */ + public setCacheProvider(provider: ICacheProvider): this { + this.cacheProvider = provider; + return this; + } + /** * Generates a Mermaid.js graph representation of the task workflow. * @param steps The list of tasks to visualize. @@ -195,6 +208,11 @@ export class TaskRunner { const stateManager = new TaskStateManager(this.eventBus); let strategy = this.executionStrategy; + + if (this.cacheProvider && !config?.dryRun) { + strategy = new CachingExecutionStrategy(strategy, this.cacheProvider); + } + if (config?.dryRun) { strategy = new DryRunExecutionStrategy(); } diff --git a/src/TaskStep.ts b/src/TaskStep.ts index 10fed0c..dab3637 100644 --- a/src/TaskStep.ts +++ b/src/TaskStep.ts @@ -9,6 +9,12 @@ import { TaskLoopConfig } from "./contracts/TaskLoopConfig.js"; export interface TaskStep { /** A unique identifier for this task. */ name: string; + /** Optional cache configuration. */ + cache?: { + key: (context: TContext) => string | Promise; + ttl?: number; + restore?: (context: TContext, cachedResult: TaskResult) => void | Promise; + }; /** An optional list of task names that must complete successfully before this step can run. */ dependencies?: string[]; /** Optional retry configuration for the task. */ diff --git a/src/contracts/ICacheProvider.ts b/src/contracts/ICacheProvider.ts new file mode 100644 index 0000000..ba5c789 --- /dev/null +++ b/src/contracts/ICacheProvider.ts @@ -0,0 +1,27 @@ +import { TaskResult } from "../TaskResult.js"; + +/** + * Interface for cache providers used by the CachingExecutionStrategy. + */ +export interface ICacheProvider { + /** + * Retrieves a cached result by its key. + * @param key The cache key. + * @returns The cached TaskResult or undefined if not found. + */ + get(key: string): Promise | TaskResult | undefined; + + /** + * Stores a result in the cache. + * @param key The cache key. + * @param result The task result to cache. + * @param ttl Optional time-to-live in milliseconds. + */ + set(key: string, result: TaskResult, ttl?: number): Promise | void; + + /** + * Deletes a cached result by its key. + * @param key The cache key. + */ + delete(key: string): Promise | void; +} diff --git a/src/strategies/CachingExecutionStrategy.ts b/src/strategies/CachingExecutionStrategy.ts new file mode 100644 index 0000000..c14d0cb --- /dev/null +++ b/src/strategies/CachingExecutionStrategy.ts @@ -0,0 +1,47 @@ +import { IExecutionStrategy } from "./IExecutionStrategy.js"; +import { TaskStep } from "../TaskStep.js"; +import { TaskResult } from "../TaskResult.js"; +import { ICacheProvider } from "../contracts/ICacheProvider.js"; + +/** + * Execution strategy that wraps another strategy and adds caching capabilities. + */ +export class CachingExecutionStrategy implements IExecutionStrategy { + constructor( + private readonly innerStrategy: IExecutionStrategy, + private readonly cacheProvider: ICacheProvider + ) {} + + async execute( + step: TaskStep, + context: TContext, + signal?: AbortSignal + ): Promise { + if (!step.cache) { + return this.innerStrategy.execute(step, context, signal); + } + + const cacheKey = await step.cache.key(context); + const cachedResult = await this.cacheProvider.get(cacheKey); + + if (cachedResult) { + if (step.cache.restore) { + await step.cache.restore(context, cachedResult); + } + + return { + ...cachedResult, + status: "skipped", + message: cachedResult.message ? `${cachedResult.message} (cached)` : "Task skipped (cached)", + }; + } + + const result = await this.innerStrategy.execute(step, context, signal); + + if (result.status === "success") { + await this.cacheProvider.set(cacheKey, result, step.cache.ttl); + } + + return result; + } +} diff --git a/src/utils/MemoryCacheProvider.ts b/src/utils/MemoryCacheProvider.ts new file mode 100644 index 0000000..e6916b4 --- /dev/null +++ b/src/utils/MemoryCacheProvider.ts @@ -0,0 +1,44 @@ +import { ICacheProvider } from "../contracts/ICacheProvider.js"; +import { TaskResult } from "../TaskResult.js"; + +interface CacheEntry { + result: TaskResult; + expiresAt?: number; +} + +/** + * A simple in-memory implementation of ICacheProvider. + */ +export class MemoryCacheProvider implements ICacheProvider { + private readonly cache = new Map(); + + get(key: string): TaskResult | undefined { + const entry = this.cache.get(key); + if (!entry) { + return undefined; + } + + if (entry.expiresAt !== undefined && Date.now() > entry.expiresAt) { + this.cache.delete(key); + return undefined; + } + + return entry.result; + } + + set(key: string, result: TaskResult, ttl?: number): void { + const entry: CacheEntry = { + result, + }; + + if (ttl !== undefined) { + entry.expiresAt = Date.now() + ttl; + } + + this.cache.set(key, entry); + } + + delete(key: string): void { + this.cache.delete(key); + } +} diff --git a/tests/CachingExecutionStrategy.test.ts b/tests/CachingExecutionStrategy.test.ts new file mode 100644 index 0000000..bb0b9a5 --- /dev/null +++ b/tests/CachingExecutionStrategy.test.ts @@ -0,0 +1,148 @@ +import { describe, it, expect, vi } from "vitest"; +import { CachingExecutionStrategy } from "../src/strategies/CachingExecutionStrategy.js"; +import { MemoryCacheProvider } from "../src/utils/MemoryCacheProvider.js"; +import { IExecutionStrategy } from "../src/strategies/IExecutionStrategy.js"; +import { TaskStep } from "../src/TaskStep.js"; + +interface TestContext { + value: number; +} + +describe("CachingExecutionStrategy", () => { + it("should execute task normally if no cache config is present", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn().mockResolvedValue({ status: "success", message: "Run" }), + }; + const cacheProvider = new MemoryCacheProvider(); + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const step: TaskStep = { + name: "task1", + run: vi.fn(), + }; + + const result = await strategy.execute(step, { value: 1 }); + expect(result.status).toBe("success"); + expect(mockInnerStrategy.execute).toHaveBeenCalledTimes(1); + expect(cacheProvider.get("task1")).toBeUndefined(); + }); + + it("should cache result on success", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn().mockResolvedValue({ status: "success", data: "result1" }), + }; + const cacheProvider = new MemoryCacheProvider(); + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const step: TaskStep = { + name: "task1", + cache: { + key: () => "my-key", + }, + run: vi.fn(), + }; + + const result = await strategy.execute(step, { value: 1 }); + expect(result.status).toBe("success"); + expect(mockInnerStrategy.execute).toHaveBeenCalledTimes(1); + + const cached = cacheProvider.get("my-key"); + expect(cached).toEqual({ status: "success", data: "result1" }); + }); + + it("should not cache result on failure", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn().mockResolvedValue({ status: "failure", error: "fail" }), + }; + const cacheProvider = new MemoryCacheProvider(); + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const step: TaskStep = { + name: "task1", + cache: { + key: () => "my-key", + }, + run: vi.fn(), + }; + + const result = await strategy.execute(step, { value: 1 }); + expect(result.status).toBe("failure"); + expect(mockInnerStrategy.execute).toHaveBeenCalledTimes(1); + + const cached = cacheProvider.get("my-key"); + expect(cached).toBeUndefined(); + }); + + it("should return cached result and skip execution", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn(), + }; + const cacheProvider = new MemoryCacheProvider(); + cacheProvider.set("my-key", { status: "success", data: "result1" }); + + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const step: TaskStep = { + name: "task1", + cache: { + key: () => "my-key", + }, + run: vi.fn(), + }; + + const result = await strategy.execute(step, { value: 1 }); + expect(result.status).toBe("skipped"); + expect(result.message).toBe("Task skipped (cached)"); + expect(result.data).toBe("result1"); + expect(mockInnerStrategy.execute).not.toHaveBeenCalled(); + }); + + it("should append (cached) to existing message", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn(), + }; + const cacheProvider = new MemoryCacheProvider(); + cacheProvider.set("my-key", { status: "success", message: "Success!" }); + + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const step: TaskStep = { + name: "task1", + cache: { + key: () => "my-key", + }, + run: vi.fn(), + }; + + const result = await strategy.execute(step, { value: 1 }); + expect(result.status).toBe("skipped"); + expect(result.message).toBe("Success! (cached)"); + expect(mockInnerStrategy.execute).not.toHaveBeenCalled(); + }); + + it("should call restore function if provided", async () => { + const mockInnerStrategy: IExecutionStrategy = { + execute: vi.fn(), + }; + const cacheProvider = new MemoryCacheProvider(); + cacheProvider.set("my-key", { status: "success", data: 42 }); + + const strategy = new CachingExecutionStrategy(mockInnerStrategy, cacheProvider); + + const context: TestContext = { value: 1 }; + const step: TaskStep = { + name: "task1", + cache: { + key: () => "my-key", + restore: (ctx, res) => { + ctx.value = res.data as number; + }, + }, + run: vi.fn(), + }; + + await strategy.execute(step, context); + expect(context.value).toBe(42); + expect(mockInnerStrategy.execute).not.toHaveBeenCalled(); + }); +}); diff --git a/tests/MemoryCacheProvider.test.ts b/tests/MemoryCacheProvider.test.ts new file mode 100644 index 0000000..5751724 --- /dev/null +++ b/tests/MemoryCacheProvider.test.ts @@ -0,0 +1,45 @@ +import { describe, it, expect, vi } from "vitest"; +import { MemoryCacheProvider } from "../src/utils/MemoryCacheProvider.js"; +import { TaskResult } from "../src/TaskResult.js"; + +describe("MemoryCacheProvider", () => { + it("should return undefined for a non-existent key", () => { + const provider = new MemoryCacheProvider(); + expect(provider.get("non-existent")).toBeUndefined(); + }); + + it("should store and retrieve a value", () => { + const provider = new MemoryCacheProvider(); + const result: TaskResult = { status: "success", message: "Hello" }; + provider.set("key1", result); + expect(provider.get("key1")).toEqual(result); + }); + + it("should delete a value", () => { + const provider = new MemoryCacheProvider(); + const result: TaskResult = { status: "success", message: "Hello" }; + provider.set("key1", result); + provider.delete("key1"); + expect(provider.get("key1")).toBeUndefined(); + }); + + it("should expire values after TTL", () => { + vi.useFakeTimers(); + const provider = new MemoryCacheProvider(); + const result: TaskResult = { status: "success", message: "Hello" }; + + // Set with 100ms TTL + provider.set("key1", result, 100); + expect(provider.get("key1")).toEqual(result); + + // Advance time by 50ms (still valid) + vi.advanceTimersByTime(50); + expect(provider.get("key1")).toEqual(result); + + // Advance time by another 51ms (total 101ms, expired) + vi.advanceTimersByTime(51); + expect(provider.get("key1")).toBeUndefined(); + + vi.useRealTimers(); + }); +}); diff --git a/tests/TaskRunnerCaching.test.ts b/tests/TaskRunnerCaching.test.ts new file mode 100644 index 0000000..bf0dea4 --- /dev/null +++ b/tests/TaskRunnerCaching.test.ts @@ -0,0 +1,87 @@ +import { describe, it, expect } from "vitest"; +import { TaskRunner } from "../src/TaskRunner.js"; +import { TaskStep } from "../src/TaskStep.js"; +import { MemoryCacheProvider } from "../src/utils/MemoryCacheProvider.js"; + +interface TestContext { + executionCount: number; + data: string[]; +} + +describe("TaskRunner Caching Integration", () => { + it("should cache task outputs and restore context across runs", async () => { + const context: TestContext = { executionCount: 0, data: [] }; + const runner = new TaskRunner(context); + const cacheProvider = new MemoryCacheProvider(); + runner.setCacheProvider(cacheProvider); + + const step: TaskStep = { + name: "expensiveTask", + cache: { + key: () => "expensive-task-key", + restore: (ctx, res) => { + if (res.data) { + ctx.data.push(res.data as string); + } + }, + }, + run: async (ctx) => { + ctx.executionCount++; + ctx.data.push("run-data"); + return { status: "success", data: "run-data" }; + }, + }; + + // First run (cache miss) + const results1 = await runner.execute([step]); + expect(results1.get("expensiveTask")?.status).toBe("success"); + expect(context.executionCount).toBe(1); + expect(context.data).toEqual(["run-data"]); + + // Second run (cache hit) + // Create new context to verify restore works correctly + const context2: TestContext = { executionCount: 0, data: [] }; + const runner2 = new TaskRunner(context2); + runner2.setCacheProvider(cacheProvider); // Reuse the same cache + + const results2 = await runner2.execute([step]); + expect(results2.get("expensiveTask")?.status).toBe("skipped"); + expect(results2.get("expensiveTask")?.message).toBe("Task skipped (cached)"); + expect(results2.get("expensiveTask")?.data).toBe("run-data"); + + // Context should not have been updated by the run method, but by restore + expect(context2.executionCount).toBe(0); + expect(context2.data).toEqual(["run-data"]); + }); + + it("should ignore cache during dry runs", async () => { + const context: TestContext = { executionCount: 0, data: [] }; + const runner = new TaskRunner(context); + const cacheProvider = new MemoryCacheProvider(); + + // Seed cache + cacheProvider.set("expensive-task-key", { status: "success", data: "cached-data" }); + runner.setCacheProvider(cacheProvider); + + const step: TaskStep = { + name: "expensiveTask", + cache: { + key: () => "expensive-task-key", + }, + run: async (ctx) => { + ctx.executionCount++; + return { status: "success" }; + }, + }; + + const results = await runner.execute([step], { dryRun: true }); + + // In a dry run, the status is typically success with a dry-run message, + // and the execution strategy doesn't hit the cache strategy at all. + expect(results.get("expensiveTask")?.status).toBe("success"); + expect(results.get("expensiveTask")?.message).toBe("Dry run: simulated success expensiveTask"); + // Should not have the cached data + expect(results.get("expensiveTask")?.data).toBeUndefined(); + expect(context.executionCount).toBe(0); + }); +});