diff --git a/.claude/commands/add-adapter.md b/.claude/commands/add-adapter.md new file mode 100644 index 0000000..ffc6dbd --- /dev/null +++ b/.claude/commands/add-adapter.md @@ -0,0 +1,57 @@ +# Add a new adapter + +Create a new tool adapter for the specified package and tool. Follow these steps exactly. + +## 1. Determine the target + +Ask which **package** (hooks, mcp, agents, skills, or rules) and which **AI tool** to add. + +## 2. Study the reference adapter + +Read the Claude Code adapter for the target package — it's always the most complete: +- `packages/{package}/src/adapters/claude-code.ts` +- `packages/{package}/src/adapters/claude-code.test.ts` + +Also read `packages/{package}/src/adapters/base.ts` for the base class interface. + +## 3. Create the adapter file + +Create `packages/{package}/src/adapters/{tool-name}.ts`: + +**For hooks adapters** (complex — event mapping required): +- Define `EVENT_MAP: Record` mapping all 15 universal events to tool-native events (empty array for unsupported) +- Define `REVERSE_MAP: Record` as the inverse +- Extend `BaseAdapter`, implement: `id`, `name`, `version`, `capabilities`, `detect()`, `generate()`, `mapEvent()`, `mapNativeEvent()`, `uninstall()` +- Use `this.commandExists()` and `this.existsSync()` from base class in `detect()` + +**For mcp/agents/skills/rules adapters** (simpler — no event mapping): +- Extend the package's `BaseAdapter` +- Implement: `id`, `name`, `version`, `detect()`, `generate()`, `install()` +- For agents: consider using `createMarkdownAdapter()` factory from `markdown-adapter.ts` + +End the file with self-registration: +```typescript +const adapter = new MyToolAdapter(); +registry.register(adapter); +export { MyToolAdapter }; +export default adapter; +``` + +## 4. Register in all.ts + +Add `import "./{tool-name}.js";` to `packages/{package}/src/adapters/all.ts`. + +## 5. Write tests + +Create `packages/{package}/src/adapters/{tool-name}.test.ts` covering: +- Metadata: `id`, `name`, `version` +- Capabilities (hooks only): all flags, `supportedEvents`, `blockableEvents` +- `mapEvent()` / `mapNativeEvent()` (hooks only): every map entry + unknown events returning `[]` +- `generate()`: file paths, content format, deduplication, empty input, multi-event +- `detect()`: both found and not-found paths + +Target 100% coverage. Use the Claude Code adapter test as template. + +## 6. Verify + +Run `npm run check` — must pass with 0 errors and 0 warnings. diff --git a/.claude/commands/release-prep.md b/.claude/commands/release-prep.md new file mode 100644 index 0000000..2633606 --- /dev/null +++ b/.claude/commands/release-prep.md @@ -0,0 +1,21 @@ +# Prepare for release + +Verify the codebase is ready for semantic-release on merge to master. + +## Steps + +1. Run `npm run check` — all four stages must pass (lint, format, typecheck, test) + +2. Check recent commits follow conventional commit format (Angular preset): + - `feat:` → minor release + - `fix:`, `perf:`, `refactor:`, `revert:` → patch release + - `docs:`, `chore:`, `style:`, `test:`, `ci:` → no release + - `BREAKING CHANGE:` in footer → major release + +3. Run `npm run release:publish:dry` to preview what would be published + +4. Verify all workspace package.json files have consistent versions + +5. Check that `scripts/sync-versions.js` and `scripts/publish-workspaces.js` are intact and unmodified + +Report the results: which packages will be published, what version bump is expected, and any issues found. diff --git a/.claude/commands/run-check.md b/.claude/commands/run-check.md new file mode 100644 index 0000000..5024a6c --- /dev/null +++ b/.claude/commands/run-check.md @@ -0,0 +1,11 @@ +# Run full check pipeline + +Run `npm run check` which executes lint, format check, typecheck, and test in sequence. + +If any stage fails: +1. Read the error output carefully +2. Fix the issue +3. Re-run only the failing stage to verify the fix +4. Run the full `npm run check` again to confirm everything passes + +Do not stop until all four stages pass with zero errors and zero warnings. diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..b59b2af --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,61 @@ +{ + "$schema": "https://json.schemastore.org/claude-code-settings.json", + "permissions": { + "allow": [ + "Bash(npm run *)", + "Bash(npm test*)", + "Bash(npx vitest *)", + "Bash(npx turbo *)", + "Bash(npx oxlint *)", + "Bash(npx oxfmt *)", + "Bash(npx tsc *)", + "Bash(git status*)", + "Bash(git diff*)", + "Bash(git log*)", + "Bash(node scripts/*)" + ], + "deny": [ + "Bash(npm publish*)", + "Bash(npx semantic-release*)", + "Bash(git push --force*)" + ] + }, + "hooks": { + "PostToolUse": [ + { + "matcher": "Write|Edit", + "hooks": [ + { + "type": "command", + "command": "file_path=$(echo \"$TOOL_INPUT\" | node -e \"process.stdout.write(JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')).file_path||'')\" 2>/dev/null); [[ \"$file_path\" == *.ts ]] && [[ -f \"$file_path\" ]] && npx oxfmt \"$file_path\" 2>/dev/null; exit 0" + } + ] + }, + { + "hooks": [ + { + "type": "command", + "command": "node .claude/hooks/ai-hooks-runner.js", + "timeout": 10, + "description": "ai-hooks: PostToolUse" + } + ] + } + ], + "PreToolUse": [ + { + "hooks": [ + { + "type": "command", + "command": "node .claude/hooks/ai-hooks-runner.js", + "timeout": 10, + "description": "ai-hooks: PreToolUse" + } + ] + } + ] + }, + "env": { + "CLAUDE_BASH_MAINTAIN_PROJECT_WORKING_DIR": "1" + } +} diff --git a/.gitignore b/.gitignore index b48e789..17d1255 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ dist/ !.env.example coverage/ .DS_Store +.claude/settings.local.json diff --git a/.releaserc.json b/.releaserc.json index 6deb817..da9554e 100644 --- a/.releaserc.json +++ b/.releaserc.json @@ -31,7 +31,7 @@ "@semantic-release/npm", { "npmPublish": true, - "pkgRoot": "packages/core", + "pkgRoot": "packages/hooks", "access": "public" } ], diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..bcb55a2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,93 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Universal hook engine and configuration management for AI coding tools. Monorepo (`@premierstudio/*`) providing a unified event/hook system that translates to 9+ AI tool-specific formats (Claude Code, Codex, Cursor, Gemini CLI, Kiro, OpenCode, Cline, Factory Droid, Amp). + +## Commands + +```bash +npm run check # Full verification: lint + format + typecheck + test (run before PRs) +npm run build # Build all packages (turbo) +npm test # Run all tests (vitest) +npx vitest run packages/hooks # Run tests for a single package +npx vitest run packages/hooks/src/adapters/claude-code.test.ts # Single test file +npm run test:watch # Watch mode +npm run lint # oxlint +npm run lint:fix # oxlint --fix +npm run fmt # oxfmt (auto-format) +npm run fmt:check # Check formatting +npm run typecheck # tsc across all packages (turbo) +``` + +## Architecture + +### Monorepo Structure + +Six packages managed by npm workspaces + Turborepo: + +| Package | npm name | Purpose | +|---------|----------|---------| +| `packages/hooks` | `@premierstudio/ai-hooks` | Core engine: hook engine, adapters, config loader, built-in hooks | +| `packages/mcp` | `@premierstudio/ai-mcp` | MCP server configuration management | +| `packages/agents` | `@premierstudio/ai-agents` | Agent configuration for AI tools | +| `packages/skills` | `@premierstudio/ai-skills` | Skills/prompts configuration | +| `packages/rules` | `@premierstudio/ai-rules` | Project rules configuration | +| `packages/cli` | `@premierstudio/ai-tools` | Unified CLI routing to all engines | + +The `cli` package depends on all others. The other five packages are independent of each other. + +### Repeated Package Pattern + +Each engine package (hooks, mcp, agents, skills, rules) follows the same internal layout: + +- `src/adapters/` — Tool-specific implementations (one per AI tool) + `base.ts` base class + `registry.ts` global registry +- `src/config/` — `defineConfig()` helper and `loadConfig()` dynamic importer +- `src/cli/` — Package-specific CLI commands with `bin.ts` entry point +- `src/types/` — TypeScript type definitions +- `src/index.ts` — Public API barrel export + +### Hook Engine (packages/hooks) — the most complex package + +Express.js-style middleware chain. For runtime internals, event mapping, and adapter implementation details, see `packages/hooks/CLAUDE.md`. + +- **HookEngine** (`runtime/engine.ts`): Central orchestrator. Registers hooks, dispatches events via `emit()`. +- **Chain execution** (`runtime/chain.ts`): Hooks sorted by priority (lower = first), `next()` middleware pattern. Stops on block. Enforces per-hook timeouts. +- **15 universal events** (`types/events.ts`): session:start/end, prompt:submit/response, tool:before/after, file:read/write/edit/delete, shell:before/after, mcp:before/after, notification. Before events are blockable; after events are observe-only. +- **Built-in hooks** (`hooks/builtin.ts`): block-dangerous-commands, scan-secrets, protect-sensitive-files, audit-shell. + +### Adapter System (shared across all engine packages) + +Each adapter extends its package's `BaseAdapter` and self-registers into a global registry on import. Importing `adapters/all.ts` registers all adapters for that engine. The hooks package has the most complex adapters (event mapping, blocking); other packages have simpler adapters (config file generation only). + +### Config System + +Each engine has a `defineConfig()` helper (exception: rules uses `defineRulesConfig()`). The hooks package additionally has a `hook()` fluent builder, `loadConfig()` dynamic importer, and preset composition via `extends`. Other engines have trivial config helpers that pass through the object. + +### Simpler Engine Packages (mcp, agents, skills, rules) + +All follow the same adapter-registry-CLI pattern as hooks but without the runtime engine. Each package's CLAUDE.md documents what's unique: + +- **mcp**: Transport types (stdio/SSE), server definitions, import/sync across tools +- **agents**: Reusable `createMarkdownAdapter()` factory, YAML frontmatter, model/tools fields +- **skills**: Simplest — just name + content as markdown, no metadata +- **rules**: Scoping system (always/glob/manual/agent), priority, YAML frontmatter + +## Code Conventions + +- **Strict TypeScript**: `noUnusedLocals`, `noUnusedParameters`, `noUncheckedIndexedAccess` all enabled +- **No `any`**: `typescript/no-explicit-any` is an error in oxlint +- **No `_variable` prefix** for unused params — use `void variable` instead +- **ESM only**: All packages are `"type": "module"`, target ES2023/Node 22 +- **Formatting**: oxfmt — double quotes, trailing commas, 100 char width, 2-space indent +- **Tests**: vitest with globals, co-located as `*.test.ts` next to source files, target 100% coverage + +## Build & Release + +- **tsup** builds each package to ESM with declaration maps +- **Turborepo** orchestrates build order (packages build in dependency order) +- **semantic-release** on master: analyzes conventional commits, bumps versions, publishes to npm +- `scripts/sync-versions.js` keeps all workspace package versions in lockstep +- `scripts/publish-workspaces.js` publishes non-primary packages after the core package diff --git a/package-lock.json b/package-lock.json index 0d66ff0..a21b9a0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,11 +1,11 @@ { - "name": "@premierstudio/ai-hooks-monorepo", + "name": "@premierstudio/ai-tools-monorepo", "version": "1.1.8", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "@premierstudio/ai-hooks-monorepo", + "name": "@premierstudio/ai-tools-monorepo", "version": "1.1.8", "workspaces": [ "packages/*" @@ -83,6 +83,8 @@ }, "node_modules/@ampproject/remapping": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -117,6 +119,8 @@ }, "node_modules/@babel/helper-string-parser": { "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, "license": "MIT", "engines": { @@ -125,6 +129,8 @@ }, "node_modules/@babel/helper-validator-identifier": { "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "dev": true, "license": "MIT", "engines": { @@ -133,6 +139,8 @@ }, "node_modules/@babel/parser": { "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", "dev": true, "license": "MIT", "dependencies": { @@ -147,6 +155,8 @@ }, "node_modules/@babel/types": { "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "dev": true, "license": "MIT", "dependencies": { @@ -159,33 +169,14 @@ }, "node_modules/@bcoe/v8-coverage": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", "dev": true, "license": "MIT", "engines": { "node": ">=18" } }, - "node_modules/@clack/core": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.4.2.tgz", - "integrity": "sha512-NYQfcEy8MWIxrT5Fj8nIVchfRFA26yYKJcvBS7WlUIlw2OmQOY9DhGGXMovyI5J5PpxrCPGkgUi207EBrjpBvg==", - "license": "MIT", - "dependencies": { - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, - "node_modules/@clack/prompts": { - "version": "0.10.1", - "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.10.1.tgz", - "integrity": "sha512-Q0T02vx8ZM9XSv9/Yde0jTmmBQufZhPJfYAg2XrrrxWWaZgq1rr8nU8Hv710BQ1dhoP8rtY7YUdpGej2Qza/cw==", - "license": "MIT", - "dependencies": { - "@clack/core": "0.4.2", - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -267,6 +258,8 @@ }, "node_modules/@esbuild/darwin-arm64": { "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", "cpu": [ "arm64" ], @@ -639,6 +632,8 @@ }, "node_modules/@isaacs/cliui": { "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", "dev": true, "license": "ISC", "dependencies": { @@ -653,8 +648,51 @@ "node": ">=12" } }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, "license": "MIT", "engines": { @@ -663,6 +701,8 @@ }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "dev": true, "license": "MIT", "dependencies": { @@ -672,6 +712,8 @@ }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, "license": "MIT", "engines": { @@ -680,11 +722,15 @@ }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "dev": true, "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "dev": true, "license": "MIT", "dependencies": { @@ -722,9 +768,9 @@ } }, "node_modules/@octokit/endpoint": { - "version": "11.0.2", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.2.tgz", - "integrity": "sha512-4zCpzP1fWc7QlqunZ5bSEjxc6yLAlRTnDwKtgXfcI/FxxGoqedDG8V2+xJ60bV2kODqcGB+nATdtap/XYq2NZQ==", + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.3.tgz", + "integrity": "sha512-FWFlNxghg4HrXkD3ifYbS/IdL/mDHjh9QcsNyhQjN8dplUoZbejsdpmuqdA76nxj2xoWPs7p8uX2SNr9rYu0Ag==", "dev": true, "license": "MIT", "dependencies": { @@ -774,9 +820,9 @@ } }, "node_modules/@octokit/plugin-retry": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-8.0.3.tgz", - "integrity": "sha512-vKGx1i3MC0za53IzYBSBXcrhmd+daQDzuZfYDd52X5S0M2otf3kVZTVP8bLA3EkU0lTvd1WEC2OlNNa4G+dohA==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-8.1.0.tgz", + "integrity": "sha512-O1FZgXeiGb2sowEr/hYTr6YunGdSAFWnr2fyW39Ah85H8O33ELASQxcvOFF5LE6Tjekcyu2ms4qAzJVhSaJxTw==", "dev": true, "license": "MIT", "dependencies": { @@ -809,16 +855,17 @@ } }, "node_modules/@octokit/request": { - "version": "10.0.7", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.7.tgz", - "integrity": "sha512-v93h0i1yu4idj8qFPZwjehoJx4j3Ntn+JhXsdJrG9pYaX6j/XRz2RmasMUHtNgQD39nrv/VwTWSqK0RNXR8upA==", + "version": "10.0.8", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.8.tgz", + "integrity": "sha512-SJZNwY9pur9Agf7l87ywFi14W+Hd9Jg6Ifivsd33+/bGUQIjNujdFiXII2/qSlN2ybqUHfp5xpekMEjIBTjlSw==", "dev": true, "license": "MIT", "dependencies": { - "@octokit/endpoint": "^11.0.2", + "@octokit/endpoint": "^11.0.3", "@octokit/request-error": "^7.0.2", "@octokit/types": "^16.0.0", "fast-content-type-parse": "^3.0.0", + "json-with-bigint": "^3.5.3", "universal-user-agent": "^7.0.2" }, "engines": { @@ -884,6 +931,8 @@ }, "node_modules/@oxfmt/binding-darwin-arm64": { "version": "0.32.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-darwin-arm64/-/binding-darwin-arm64-0.32.0.tgz", + "integrity": "sha512-m6wQojz/hn94XdZugFPtdFbOvXbOSYEqPsR2gyLyID3BvcrC2QsJyT1o3gb4BZEGtZrG1NiKVGwDRLM0dHd2mg==", "cpu": [ "arm64" ], @@ -1170,9 +1219,9 @@ } }, "node_modules/@oxlint/binding-android-arm-eabi": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm-eabi/-/binding-android-arm-eabi-1.47.0.tgz", - "integrity": "sha512-UHqo3te9K/fh29brCuQdHjN+kfpIi9cnTPABuD5S9wb9ykXYRGTOOMVuSV/CK43sOhU4wwb2nT1RVjcbrrQjFw==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm-eabi/-/binding-android-arm-eabi-1.49.0.tgz", + "integrity": "sha512-2WPoh/2oK9r/i2R4o4J18AOrm3HVlWiHZ8TnuCaS4dX8m5ZzRmHW0I3eLxEurQLHWVruhQN7fHgZnah+ag5iQg==", "cpu": [ "arm" ], @@ -1187,9 +1236,9 @@ } }, "node_modules/@oxlint/binding-android-arm64": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm64/-/binding-android-arm64-1.47.0.tgz", - "integrity": "sha512-xh02lsTF1TAkR+SZrRMYHR/xCx8Wg2MAHxJNdHVpAKELh9/yE9h4LJeqAOBbIb3YYn8o/D97U9VmkvkfJfrHfw==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm64/-/binding-android-arm64-1.49.0.tgz", + "integrity": "sha512-YqJAGvNB11EzoKm1euVhZntb79alhMvWW/j12bYqdvVxn6xzEQWrEDCJg9BPo3A3tBCSUBKH7bVkAiCBqK/L1w==", "cpu": [ "arm64" ], @@ -1204,7 +1253,9 @@ } }, "node_modules/@oxlint/binding-darwin-arm64": { - "version": "1.47.0", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-darwin-arm64/-/binding-darwin-arm64-1.49.0.tgz", + "integrity": "sha512-WFocCRlvVkMhChCJ2qpJfp1Gj/IjvyjuifH9Pex8m8yHonxxQa3d8DZYreuDQU3T4jvSY8rqhoRqnpc61Nlbxw==", "cpu": [ "arm64" ], @@ -1219,9 +1270,9 @@ } }, "node_modules/@oxlint/binding-darwin-x64": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-darwin-x64/-/binding-darwin-x64-1.47.0.tgz", - "integrity": "sha512-hP2bOI4IWNS+F6pVXWtRshSTuJ1qCRZgDgVUg6EBUqsRy+ExkEPJkx+YmIuxgdCduYK1LKptLNFuQLJP8voPbQ==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-darwin-x64/-/binding-darwin-x64-1.49.0.tgz", + "integrity": "sha512-BN0KniwvehbUfYztOMwEDkYoojGm/narf5oJf+/ap+6PnzMeWLezMaVARNIS0j3OdMkjHTEP8s3+GdPJ7WDywQ==", "cpu": [ "x64" ], @@ -1236,9 +1287,9 @@ } }, "node_modules/@oxlint/binding-freebsd-x64": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-freebsd-x64/-/binding-freebsd-x64-1.47.0.tgz", - "integrity": "sha512-F55jIEH5xmGu7S661Uho8vGiLFk0bY3A/g4J8CTKiLJnYu/PSMZ2WxFoy5Hji6qvFuujrrM9Q8XXbMO0fKOYPg==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-freebsd-x64/-/binding-freebsd-x64-1.49.0.tgz", + "integrity": "sha512-SnkAc/DPIY6joMCiP/+53Q+N2UOGMU6ULvbztpmvPJNF/jYPGhNbKtN982uj2Gs6fpbxYkmyj08QnpkD4fbHJA==", "cpu": [ "x64" ], @@ -1253,9 +1304,9 @@ } }, "node_modules/@oxlint/binding-linux-arm-gnueabihf": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.47.0.tgz", - "integrity": "sha512-wxmOn/wns/WKPXUC1fo5mu9pMZPVOu8hsynaVDrgmmXMdHKS7on6bA5cPauFFN9tJXNdsjW26AK9lpfu3IfHBQ==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.49.0.tgz", + "integrity": "sha512-6Z3EzRvpQVIpO7uFhdiGhdE8Mh3S2VWKLL9xuxVqD6fzPhyI3ugthpYXlCChXzO8FzcYIZ3t1+Kau+h2NY1hqA==", "cpu": [ "arm" ], @@ -1270,9 +1321,9 @@ } }, "node_modules/@oxlint/binding-linux-arm-musleabihf": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-1.47.0.tgz", - "integrity": "sha512-KJTmVIA/GqRlM2K+ZROH30VMdydEU7bDTY35fNg3tOPzQRIs2deLZlY/9JWwdWo1F/9mIYmpbdCmPqtKhWNOPg==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-1.49.0.tgz", + "integrity": "sha512-wdjXaQYAL/L25732mLlngfst4Jdmi/HLPVHb3yfCoP5mE3lO/pFFrmOJpqWodgv29suWY74Ij+RmJ/YIG5VuzQ==", "cpu": [ "arm" ], @@ -1287,9 +1338,9 @@ } }, "node_modules/@oxlint/binding-linux-arm64-gnu": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.47.0.tgz", - "integrity": "sha512-PF7ELcFg1GVlS0X0ZB6aWiXobjLrAKer3T8YEkwIoO8RwWiAMkL3n3gbleg895BuZkHVlJ2kPRUwfrhHrVkD1A==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.49.0.tgz", + "integrity": "sha512-oSHpm8zmSvAG1BWUumbDRSg7moJbnwoEXKAkwDf/xTQJOzvbUknq95NVQdw/AduZr5dePftalB8rzJNGBogUMg==", "cpu": [ "arm64" ], @@ -1304,9 +1355,9 @@ } }, "node_modules/@oxlint/binding-linux-arm64-musl": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.47.0.tgz", - "integrity": "sha512-4BezLRO5cu0asf0Jp1gkrnn2OHiXrPPPEfBTxq1k5/yJ2zdGGTmZxHD2KF2voR23wb8Elyu3iQawXo7wvIZq0Q==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.49.0.tgz", + "integrity": "sha512-xeqkMOARgGBlEg9BQuPDf6ZW711X6BT5qjDyeM5XNowCJeTSdmMhpePJjTEiVbbr3t21sIlK8RE6X5bc04nWyQ==", "cpu": [ "arm64" ], @@ -1321,9 +1372,9 @@ } }, "node_modules/@oxlint/binding-linux-ppc64-gnu": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.47.0.tgz", - "integrity": "sha512-aI5ds9jq2CPDOvjeapiIj48T/vlWp+f4prkxs+FVzrmVN9BWIj0eqeJ/hV8WgXg79HVMIz9PU6deI2ki09bR1w==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.49.0.tgz", + "integrity": "sha512-uvcqRO6PnlJGbL7TeePhTK5+7/JXbxGbN+C6FVmfICDeeRomgQqrfVjf0lUrVpUU8ii8TSkIbNdft3M+oNlOsQ==", "cpu": [ "ppc64" ], @@ -1338,9 +1389,9 @@ } }, "node_modules/@oxlint/binding-linux-riscv64-gnu": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-1.47.0.tgz", - "integrity": "sha512-mO7ycp9Elvgt5EdGkQHCwJA6878xvo9tk+vlMfT1qg++UjvOMB8INsOCQIOH2IKErF/8/P21LULkdIrocMw9xA==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-1.49.0.tgz", + "integrity": "sha512-Dw1HkdXAwHNH+ZDserHP2RzXQmhHtpsYYI0hf8fuGAVCIVwvS6w1+InLxpPMY25P8ASRNiFN3hADtoh6lI+4lg==", "cpu": [ "riscv64" ], @@ -1355,9 +1406,9 @@ } }, "node_modules/@oxlint/binding-linux-riscv64-musl": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-1.47.0.tgz", - "integrity": "sha512-24D0wsYT/7hDFn3Ow32m3/+QT/1ZwrUhShx4/wRDAmz11GQHOZ1k+/HBuK/MflebdnalmXWITcPEy4BWTi7TCA==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-1.49.0.tgz", + "integrity": "sha512-EPlMYaA05tJ9km/0dI9K57iuMq3Tw+nHst7TNIegAJZrBPtsOtYaMFZEaWj02HA8FI5QvSnRHMt+CI+RIhXJBQ==", "cpu": [ "riscv64" ], @@ -1372,9 +1423,9 @@ } }, "node_modules/@oxlint/binding-linux-s390x-gnu": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.47.0.tgz", - "integrity": "sha512-8tPzPne882mtML/uy3mApvdCyuVOpthJ7xUv3b67gVfz63hOOM/bwO0cysSkPyYYFDFRn6/FnUb7Jhmsesntvg==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.49.0.tgz", + "integrity": "sha512-yZiQL9qEwse34aMbnMb5VqiAWfDY+fLFuoJbHOuzB1OaJZbN1MRF9Nk+W89PIpGr5DNPDipwjZb8+Q7wOywoUQ==", "cpu": [ "s390x" ], @@ -1389,9 +1440,9 @@ } }, "node_modules/@oxlint/binding-linux-x64-gnu": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.47.0.tgz", - "integrity": "sha512-q58pIyGIzeffEBhEgbRxLFHmHfV9m7g1RnkLiahQuEvyjKNiJcvdHOwKH2BdgZxdzc99Cs6hF5xTa86X40WzPw==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.49.0.tgz", + "integrity": "sha512-CcCDwMMXSchNkhdgvhVn3DLZ4EnBXAD8o8+gRzahg+IdSt/72y19xBgShJgadIRF0TsRcV/MhDUMwL5N/W54aQ==", "cpu": [ "x64" ], @@ -1406,9 +1457,9 @@ } }, "node_modules/@oxlint/binding-linux-x64-musl": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-musl/-/binding-linux-x64-musl-1.47.0.tgz", - "integrity": "sha512-e7DiLZtETZUCwTa4EEHg9G+7g3pY+afCWXvSeMG7m0TQ29UHHxMARPaEQUE4mfKgSqIWnJaUk2iZzRPMRdga5g==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-musl/-/binding-linux-x64-musl-1.49.0.tgz", + "integrity": "sha512-u3HfKV8BV6t6UCCbN0RRiyqcymhrnpunVmLFI8sEa5S/EBu+p/0bJ3D7LZ2KT6PsBbrB71SWq4DeFrskOVgIZg==", "cpu": [ "x64" ], @@ -1423,9 +1474,9 @@ } }, "node_modules/@oxlint/binding-openharmony-arm64": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-openharmony-arm64/-/binding-openharmony-arm64-1.47.0.tgz", - "integrity": "sha512-3AFPfQ0WKMleT/bKd7zsks3xoawtZA6E/wKf0DjwysH7wUiMMJkNKXOzYq1R/00G98JFgSU1AkrlOQrSdNNhlg==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-openharmony-arm64/-/binding-openharmony-arm64-1.49.0.tgz", + "integrity": "sha512-dRDpH9fw+oeUMpM4br0taYCFpW6jQtOuEIec89rOgDA1YhqwmeRcx0XYeCv7U48p57qJ1XZHeMGM9LdItIjfzA==", "cpu": [ "arm64" ], @@ -1440,9 +1491,9 @@ } }, "node_modules/@oxlint/binding-win32-arm64-msvc": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.47.0.tgz", - "integrity": "sha512-cLMVVM6TBxp+N7FldQJ2GQnkcLYEPGgiuEaXdvhgvSgODBk9ov3jed+khIXSAWtnFOW0wOnG3RjwqPh0rCuheA==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.49.0.tgz", + "integrity": "sha512-6rrKe/wL9tn0qnOy76i1/0f4Dc3dtQnibGlU4HqR/brVHlVjzLSoaH0gAFnLnznh9yQ6gcFTBFOPrcN/eKPDGA==", "cpu": [ "arm64" ], @@ -1457,9 +1508,9 @@ } }, "node_modules/@oxlint/binding-win32-ia32-msvc": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.47.0.tgz", - "integrity": "sha512-VpFOSzvTnld77/Edje3ZdHgZWnlTb5nVWXyTgjD3/DKF/6t5bRRbwn3z77zOdnGy44xAMvbyAwDNOSeOdVUmRA==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.49.0.tgz", + "integrity": "sha512-CXHLWAtLs2xG/aVy1OZiYJzrULlq0QkYpI6cd7VKMrab+qur4fXVE/B1Bp1m0h1qKTj5/FTGg6oU4qaXMjS/ug==", "cpu": [ "ia32" ], @@ -1474,9 +1525,9 @@ } }, "node_modules/@oxlint/binding-win32-x64-msvc": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.47.0.tgz", - "integrity": "sha512-+q8IWptxXx2HMTM6JluR67284t0h8X/oHJgqpxH1siowxPMqZeIpAcWCUq+tY+Rv2iQK8TUugjZnSBQAVV5CmA==", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.49.0.tgz", + "integrity": "sha512-VteIelt78kwzSglOozaQcs6BCS4Lk0j+QA+hGV0W8UeyaqQ3XpbZRhDU55NW1PPvCy1tg4VXsTlEaPovqto7nQ==", "cpu": [ "x64" ], @@ -1492,6 +1543,8 @@ }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", "dev": true, "license": "MIT", "optional": true, @@ -1544,18 +1597,34 @@ "node": ">=12" } }, + "node_modules/@premierstudio/ai-agents": { + "resolved": "packages/agents", + "link": true + }, "node_modules/@premierstudio/ai-hooks": { - "resolved": "packages/core", + "resolved": "packages/hooks", + "link": true + }, + "node_modules/@premierstudio/ai-mcp": { + "resolved": "packages/mcp", + "link": true + }, + "node_modules/@premierstudio/ai-rules": { + "resolved": "packages/rules", + "link": true + }, + "node_modules/@premierstudio/ai-skills": { + "resolved": "packages/skills", "link": true }, - "node_modules/@premierstudio/plannable": { - "resolved": "packages/plannable", + "node_modules/@premierstudio/ai-tools": { + "resolved": "packages/cli", "link": true }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", - "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.58.0.tgz", + "integrity": "sha512-mr0tmS/4FoVk1cnaeN244A/wjvGDNItZKR8hRhnmCzygyRXYtKF5jVDSIILR1U97CTzAYmbgIj/Dukg62ggG5w==", "cpu": [ "arm" ], @@ -1567,9 +1636,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", - "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.58.0.tgz", + "integrity": "sha512-+s++dbp+/RTte62mQD9wLSbiMTV+xr/PeRJEc/sFZFSBRlHPNPVaf5FXlzAL77Mr8FtSfQqCN+I598M8U41ccQ==", "cpu": [ "arm64" ], @@ -1581,7 +1650,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.57.1", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.58.0.tgz", + "integrity": "sha512-MFWBwTcYs0jZbINQBXHfSrpSQJq3IUOakcKPzfeSznONop14Pxuqa0Kg19GD0rNBMPQI2tFtu3UzapZpH0Uc1Q==", "cpu": [ "arm64" ], @@ -1593,9 +1664,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", - "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.58.0.tgz", + "integrity": "sha512-yiKJY7pj9c9JwzuKYLFaDZw5gma3fI9bkPEIyofvVfsPqjCWPglSHdpdwXpKGvDeYDms3Qal8qGMEHZ1M/4Udg==", "cpu": [ "x64" ], @@ -1607,9 +1678,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", - "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.58.0.tgz", + "integrity": "sha512-x97kCoBh5MOevpn/CNK9W1x8BEzO238541BGWBc315uOlN0AD/ifZ1msg+ZQB05Ux+VF6EcYqpiagfLJ8U3LvQ==", "cpu": [ "arm64" ], @@ -1621,9 +1692,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", - "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.58.0.tgz", + "integrity": "sha512-Aa8jPoZ6IQAG2eIrcXPpjRcMjROMFxCt1UYPZZtCxRV68WkuSigYtQ/7Zwrcr2IvtNJo7T2JfDXyMLxq5L4Jlg==", "cpu": [ "x64" ], @@ -1635,9 +1706,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", - "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.58.0.tgz", + "integrity": "sha512-Ob8YgT5kD/lSIYW2Rcngs5kNB/44Q2RzBSPz9brf2WEtcGR7/f/E9HeHn1wYaAwKBni+bdXEwgHvUd0x12lQSA==", "cpu": [ "arm" ], @@ -1649,9 +1720,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", - "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.58.0.tgz", + "integrity": "sha512-K+RI5oP1ceqoadvNt1FecL17Qtw/n9BgRSzxif3rTL2QlIu88ccvY+Y9nnHe/cmT5zbH9+bpiJuG1mGHRVwF4Q==", "cpu": [ "arm" ], @@ -1663,9 +1734,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", - "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.58.0.tgz", + "integrity": "sha512-T+17JAsCKUjmbopcKepJjHWHXSjeW7O5PL7lEFaeQmiVyw4kkc5/lyYKzrv6ElWRX/MrEWfPiJWqbTvfIvjM1Q==", "cpu": [ "arm64" ], @@ -1677,9 +1748,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", - "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.58.0.tgz", + "integrity": "sha512-cCePktb9+6R9itIJdeCFF9txPU7pQeEHB5AbHu/MKsfH/k70ZtOeq1k4YAtBv9Z7mmKI5/wOLYjQ+B9QdxR6LA==", "cpu": [ "arm64" ], @@ -1691,9 +1762,9 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", - "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.58.0.tgz", + "integrity": "sha512-iekUaLkfliAsDl4/xSdoCJ1gnnIXvoNz85C8U8+ZxknM5pBStfZjeXgB8lXobDQvvPRCN8FPmmuTtH+z95HTmg==", "cpu": [ "loong64" ], @@ -1705,9 +1776,9 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", - "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.58.0.tgz", + "integrity": "sha512-68ofRgJNl/jYJbxFjCKE7IwhbfxOl1muPN4KbIqAIe32lm22KmU7E8OPvyy68HTNkI2iV/c8y2kSPSm2mW/Q9Q==", "cpu": [ "loong64" ], @@ -1719,9 +1790,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", - "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.58.0.tgz", + "integrity": "sha512-dpz8vT0i+JqUKuSNPCP5SYyIV2Lh0sNL1+FhM7eLC457d5B9/BC3kDPp5BBftMmTNsBarcPcoz5UGSsnCiw4XQ==", "cpu": [ "ppc64" ], @@ -1733,9 +1804,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", - "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.58.0.tgz", + "integrity": "sha512-4gdkkf9UJ7tafnweBCR/mk4jf3Jfl0cKX9Np80t5i78kjIH0ZdezUv/JDI2VtruE5lunfACqftJ8dIMGN4oHew==", "cpu": [ "ppc64" ], @@ -1747,9 +1818,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", - "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.58.0.tgz", + "integrity": "sha512-YFS4vPnOkDTD/JriUeeZurFYoJhPf9GQQEF/v4lltp3mVcBmnsAdjEWhr2cjUCZzZNzxCG0HZOvJU44UGHSdzw==", "cpu": [ "riscv64" ], @@ -1761,9 +1832,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", - "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.58.0.tgz", + "integrity": "sha512-x2xgZlFne+QVNKV8b4wwaCS8pwq3y14zedZ5DqLzjdRITvreBk//4Knbcvm7+lWmms9V9qFp60MtUd0/t/PXPw==", "cpu": [ "riscv64" ], @@ -1775,9 +1846,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", - "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.58.0.tgz", + "integrity": "sha512-jIhrujyn4UnWF8S+DHSkAkDEO3hLX0cjzxJZPLF80xFyzyUIYgSMRcYQ3+uqEoyDD2beGq7Dj7edi8OnJcS/hg==", "cpu": [ "s390x" ], @@ -1789,9 +1860,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", - "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.58.0.tgz", + "integrity": "sha512-+410Srdoh78MKSJxTQ+hZ/Mx+ajd6RjjPwBPNd0R3J9FtL6ZA0GqiiyNjCO9In0IzZkCNrpGymSfn+kgyPQocg==", "cpu": [ "x64" ], @@ -1803,9 +1874,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", - "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.58.0.tgz", + "integrity": "sha512-ZjMyby5SICi227y1MTR3VYBpFTdZs823Rs/hpakufleBoufoOIB6jtm9FEoxn/cgO7l6PM2rCEl5Kre5vX0QrQ==", "cpu": [ "x64" ], @@ -1817,9 +1888,9 @@ ] }, "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", - "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.58.0.tgz", + "integrity": "sha512-ds4iwfYkSQ0k1nb8LTcyXw//ToHOnNTJtceySpL3fa7tc/AsE+UpUFphW126A6fKBGJD5dhRvg8zw1rvoGFxmw==", "cpu": [ "x64" ], @@ -1831,9 +1902,9 @@ ] }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", - "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.58.0.tgz", + "integrity": "sha512-fd/zpJniln4ICdPkjWFhZYeY/bpnaN9pGa6ko+5WD38I0tTqk9lXMgXZg09MNdhpARngmxiCg0B0XUamNw/5BQ==", "cpu": [ "arm64" ], @@ -1845,9 +1916,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", - "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.58.0.tgz", + "integrity": "sha512-YpG8dUOip7DCz3nr/JUfPbIUo+2d/dy++5bFzgi4ugOGBIox+qMbbqt/JoORwvI/C9Kn2tz6+Bieoqd5+B1CjA==", "cpu": [ "arm64" ], @@ -1859,9 +1930,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", - "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.58.0.tgz", + "integrity": "sha512-b9DI8jpFQVh4hIXFr0/+N/TzLdpBIoPzjt0Rt4xJbW3mzguV3mduR9cNgiuFcuL/TeORejJhCWiAXe3E/6PxWA==", "cpu": [ "ia32" ], @@ -1873,9 +1944,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", - "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.58.0.tgz", + "integrity": "sha512-CSrVpmoRJFN06LL9xhkitkwUcTZtIotYAF5p6XOR2zW0Zz5mzb3IPpcoPhB02frzMHFNo1reQ9xSF5fFm3hUsQ==", "cpu": [ "x64" ], @@ -1887,9 +1958,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.57.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", - "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.58.0.tgz", + "integrity": "sha512-QFsBgQNTnh5K0t/sBsjJLq24YVqEIVkGpfN2VHsnN90soZyhaiA9UUHufcctVNL4ypJY0wrwad0wslx2KJQ1/w==", "cpu": [ "x64" ], @@ -2330,6 +2401,8 @@ }, "node_modules/@types/chai": { "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", "dev": true, "license": "MIT", "dependencies": { @@ -2339,16 +2412,22 @@ }, "node_modules/@types/deep-eql": { "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", "dev": true, "license": "MIT" }, "node_modules/@types/estree": { "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, "node_modules/@types/node": { "version": "22.19.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.11.tgz", + "integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==", "dev": true, "license": "MIT", "dependencies": { @@ -2364,6 +2443,8 @@ }, "node_modules/@vitest/coverage-v8": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2396,6 +2477,8 @@ }, "node_modules/@vitest/expect": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, "license": "MIT", "dependencies": { @@ -2411,6 +2494,8 @@ }, "node_modules/@vitest/mocker": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2436,6 +2521,8 @@ }, "node_modules/@vitest/pretty-format": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, "license": "MIT", "dependencies": { @@ -2447,6 +2534,8 @@ }, "node_modules/@vitest/runner": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2460,6 +2549,8 @@ }, "node_modules/@vitest/snapshot": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2473,6 +2564,8 @@ }, "node_modules/@vitest/spy": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, "license": "MIT", "dependencies": { @@ -2484,6 +2577,8 @@ }, "node_modules/@vitest/utils": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, "license": "MIT", "dependencies": { @@ -2496,7 +2591,9 @@ } }, "node_modules/acorn": { - "version": "8.15.0", + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "dev": true, "license": "MIT", "bin": { @@ -2548,6 +2645,8 @@ }, "node_modules/ansi-regex": { "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "dev": true, "license": "MIT", "engines": { @@ -2559,6 +2658,8 @@ }, "node_modules/ansi-styles": { "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", "dev": true, "license": "MIT", "engines": { @@ -2570,6 +2671,8 @@ }, "node_modules/any-promise": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", "dev": true, "license": "MIT" }, @@ -2596,6 +2699,8 @@ }, "node_modules/assertion-error": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, "license": "MIT", "engines": { @@ -2604,6 +2709,8 @@ }, "node_modules/ast-v8-to-istanbul": { "version": "0.3.11", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.11.tgz", + "integrity": "sha512-Qya9fkoofMjCBNVdWINMjB5KZvkYfaO9/anwkWnjxibpWUxo5iHl2sOdP7/uAqaRuUYuoo8rDwnbaaKVFxoUvw==", "dev": true, "license": "MIT", "dependencies": { @@ -2612,13 +2719,10 @@ "js-tokens": "^10.0.0" } }, - "node_modules/ast-v8-to-istanbul/node_modules/js-tokens": { - "version": "10.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/balanced-match": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true, "license": "MIT" }, @@ -2638,6 +2742,8 @@ }, "node_modules/brace-expansion": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2659,6 +2765,8 @@ }, "node_modules/bundle-require": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bundle-require/-/bundle-require-5.1.0.tgz", + "integrity": "sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==", "dev": true, "license": "MIT", "dependencies": { @@ -2673,6 +2781,8 @@ }, "node_modules/cac": { "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", "dev": true, "license": "MIT", "engines": { @@ -2691,6 +2801,8 @@ }, "node_modules/chai": { "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", "dev": true, "license": "MIT", "dependencies": { @@ -2729,6 +2841,8 @@ }, "node_modules/check-error": { "version": "2.1.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.3.tgz", + "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==", "dev": true, "license": "MIT", "engines": { @@ -2737,6 +2851,8 @@ }, "node_modules/chokidar": { "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", "dev": true, "license": "MIT", "dependencies": { @@ -2781,16 +2897,6 @@ "npm": ">=5.0.0" } }, - "node_modules/cli-highlight/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/cli-highlight/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -2836,41 +2942,6 @@ "wrap-ansi": "^7.0.0" } }, - "node_modules/cli-highlight/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/cli-highlight/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-highlight/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cli-highlight/node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -2934,51 +3005,6 @@ "@colors/colors": "1.5.0" } }, - "node_modules/cli-table3/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-table3/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/cli-table3/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-table3/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cliui": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", @@ -3019,6 +3045,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/cliui/node_modules/wrap-ansi": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", @@ -3039,6 +3081,8 @@ }, "node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3050,11 +3094,15 @@ }, "node_modules/color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true, "license": "MIT" }, "node_modules/commander": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", "dev": true, "license": "MIT", "engines": { @@ -3074,6 +3122,8 @@ }, "node_modules/confbox": { "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", "dev": true, "license": "MIT" }, @@ -3090,6 +3140,8 @@ }, "node_modules/consola": { "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", "dev": true, "license": "MIT", "engines": { @@ -3222,6 +3274,8 @@ }, "node_modules/cross-spawn": { "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "license": "MIT", "dependencies": { @@ -3264,6 +3318,8 @@ }, "node_modules/debug": { "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dev": true, "license": "MIT", "dependencies": { @@ -3280,6 +3336,8 @@ }, "node_modules/deep-eql": { "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", "dev": true, "license": "MIT", "engines": { @@ -3334,11 +3392,15 @@ }, "node_modules/eastasianwidth": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", "dev": true, "license": "MIT" }, "node_modules/emoji-regex": { - "version": "9.2.2", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true, "license": "MIT" }, @@ -3529,11 +3591,15 @@ }, "node_modules/es-module-lexer": { "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, "node_modules/esbuild": { "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -3597,6 +3663,8 @@ }, "node_modules/estree-walker": { "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", "dev": true, "license": "MIT", "dependencies": { @@ -3649,6 +3717,8 @@ }, "node_modules/expect-type": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -3672,22 +3742,6 @@ ], "license": "MIT" }, - "node_modules/fdir": { - "version": "6.5.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, "node_modules/figures": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/figures/-/figures-6.1.0.tgz", @@ -3762,6 +3816,8 @@ }, "node_modules/fix-dts-default-cjs-exports": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fix-dts-default-cjs-exports/-/fix-dts-default-cjs-exports-1.0.1.tgz", + "integrity": "sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==", "dev": true, "license": "MIT", "dependencies": { @@ -3772,6 +3828,8 @@ }, "node_modules/foreground-child": { "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", "dev": true, "license": "ISC", "dependencies": { @@ -3813,7 +3871,10 @@ }, "node_modules/fsevents": { "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, + "hasInstallScript": true, "license": "MIT", "optional": true, "os": [ @@ -3847,9 +3908,9 @@ } }, "node_modules/get-east-asian-width": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", - "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.5.0.tgz", + "integrity": "sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==", "dev": true, "license": "MIT", "engines": { @@ -3889,6 +3950,9 @@ }, "node_modules/glob": { "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { @@ -3935,18 +3999,10 @@ "uglify-js": "^3.1.4" } }, - "node_modules/handlebars/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "license": "MIT", "engines": { @@ -3989,18 +4045,10 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", - "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": "20 || >=22" - } - }, "node_modules/html-escaper": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", "dev": true, "license": "MIT" }, @@ -4157,6 +4205,8 @@ }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, "license": "MIT", "engines": { @@ -4231,6 +4281,8 @@ }, "node_modules/isexe": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true, "license": "ISC" }, @@ -4253,6 +4305,8 @@ }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, "license": "BSD-3-Clause", "engines": { @@ -4261,6 +4315,8 @@ }, "node_modules/istanbul-lib-report": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -4274,6 +4330,8 @@ }, "node_modules/istanbul-lib-source-maps": { "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -4287,6 +4345,8 @@ }, "node_modules/istanbul-reports": { "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -4299,6 +4359,8 @@ }, "node_modules/jackspeak": { "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -4323,6 +4385,8 @@ }, "node_modules/joycon": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", "dev": true, "license": "MIT", "engines": { @@ -4330,7 +4394,9 @@ } }, "node_modules/js-tokens": { - "version": "9.0.1", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-10.0.0.tgz", + "integrity": "sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q==", "dev": true, "license": "MIT" }, @@ -4361,6 +4427,13 @@ "dev": true, "license": "MIT" }, + "node_modules/json-with-bigint": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/json-with-bigint/-/json-with-bigint-3.5.3.tgz", + "integrity": "sha512-QObKu6nxy7NsxqR0VK4rkXnsNr5L9ElJaGEg+ucJ6J7/suoKZ0n+p76cu9aCqowytxEbwYNzvrMerfMkXneF5A==", + "dev": true, + "license": "MIT" + }, "node_modules/jsonfile": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", @@ -4376,6 +4449,8 @@ }, "node_modules/lilconfig": { "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", "dev": true, "license": "MIT", "engines": { @@ -4387,6 +4462,8 @@ }, "node_modules/lines-and-columns": { "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "dev": true, "license": "MIT" }, @@ -4422,6 +4499,8 @@ }, "node_modules/load-tsconfig": { "version": "0.2.5", + "resolved": "https://registry.npmjs.org/load-tsconfig/-/load-tsconfig-0.2.5.tgz", + "integrity": "sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==", "dev": true, "license": "MIT", "engines": { @@ -4493,16 +4572,25 @@ }, "node_modules/loupe": { "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", "dev": true, "license": "MIT" }, "node_modules/lru-cache": { - "version": "10.4.3", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "dev": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } }, "node_modules/magic-string": { "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4511,6 +4599,8 @@ }, "node_modules/magicast": { "version": "0.3.5", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", + "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4539,6 +4629,8 @@ }, "node_modules/make-dir": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, "license": "MIT", "dependencies": { @@ -4620,19 +4712,6 @@ "node": ">=8.6" } }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, "node_modules/mime": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/mime/-/mime-4.1.0.tgz", @@ -4661,6 +4740,8 @@ }, "node_modules/minimatch": { "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, "license": "ISC", "dependencies": { @@ -4684,15 +4765,19 @@ } }, "node_modules/minipass": { - "version": "7.1.2", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { "node": ">=16 || 14 >=14.17" } }, "node_modules/mlly": { "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", "dev": true, "license": "MIT", "dependencies": { @@ -4704,11 +4789,15 @@ }, "node_modules/ms": { "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true, "license": "MIT" }, "node_modules/mz": { "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", "dev": true, "license": "MIT", "dependencies": { @@ -4719,6 +4808,8 @@ }, "node_modules/nanoid": { "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -4793,9 +4884,9 @@ } }, "node_modules/npm": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/npm/-/npm-11.10.0.tgz", - "integrity": "sha512-i8hE43iSIAMFuYVi8TxsEISdELM4fIza600aLjJ0ankGPLqd0oTPKMJqAcO/QWm307MbSlWGzJcNZ0lGMQgHPA==", + "version": "11.10.1", + "resolved": "https://registry.npmjs.org/npm/-/npm-11.10.1.tgz", + "integrity": "sha512-woavuY2OgDFQ1K/tB9QHsUuW989nKfvsKTN/h5qGyS+3+BhvXN/DA2TNzx569JaFfTqrET5bEQNHwVhFk+U1gg==", "bundleDependencies": [ "@isaacs/string-locale-compare", "@npmcli/arborist", @@ -4813,7 +4904,6 @@ "cacache", "chalk", "ci-info", - "cli-columns", "fastest-levenshtein", "fs-minipass", "glob", @@ -4875,12 +4965,12 @@ ], "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^9.3.0", - "@npmcli/config": "^10.7.0", + "@npmcli/arborist": "^9.3.1", + "@npmcli/config": "^10.7.1", "@npmcli/fs": "^5.0.0", "@npmcli/map-workspaces": "^5.0.3", "@npmcli/metavuln-calculator": "^9.0.3", - "@npmcli/package-json": "^7.0.4", + "@npmcli/package-json": "^7.0.5", "@npmcli/promise-spawn": "^9.0.1", "@npmcli/redact": "^4.0.0", "@npmcli/run-script": "^10.0.3", @@ -4890,29 +4980,28 @@ "cacache": "^20.0.3", "chalk": "^5.6.2", "ci-info": "^4.4.0", - "cli-columns": "^4.0.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", - "glob": "^13.0.2", + "glob": "^13.0.6", "graceful-fs": "^4.2.11", "hosted-git-info": "^9.0.2", "ini": "^6.0.0", - "init-package-json": "^8.2.4", + "init-package-json": "^8.2.5", "is-cidr": "^6.0.3", "json-parse-even-better-errors": "^5.0.0", "libnpmaccess": "^10.0.3", - "libnpmdiff": "^8.1.1", - "libnpmexec": "^10.2.1", - "libnpmfund": "^7.0.15", + "libnpmdiff": "^8.1.2", + "libnpmexec": "^10.2.2", + "libnpmfund": "^7.0.16", "libnpmorg": "^8.0.1", - "libnpmpack": "^9.1.1", + "libnpmpack": "^9.1.2", "libnpmpublish": "^11.1.3", "libnpmsearch": "^9.0.1", "libnpmteam": "^8.0.2", "libnpmversion": "^8.0.3", "make-fetch-happen": "^15.0.3", - "minimatch": "^10.1.1", - "minipass": "^7.1.1", + "minimatch": "^10.2.2", + "minipass": "^7.1.3", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^12.2.0", @@ -4934,7 +5023,7 @@ "spdx-expression-parse": "^4.0.0", "ssri": "^13.0.1", "supports-color": "^10.2.2", - "tar": "^7.5.7", + "tar": "^7.5.9", "text-table": "~0.2.0", "tiny-relative-date": "^2.0.2", "treeverse": "^3.0.0", @@ -4979,27 +5068,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/npm/node_modules/@isaacs/balanced-match": { - "version": "4.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/npm/node_modules/@isaacs/brace-expansion": { - "version": "5.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "@isaacs/balanced-match": "^4.0.1" - }, - "engines": { - "node": "20 || >=22" - } - }, "node_modules/npm/node_modules/@isaacs/fs-minipass": { "version": "4.0.1", "dev": true, @@ -5035,7 +5103,7 @@ } }, "node_modules/npm/node_modules/@npmcli/arborist": { - "version": "9.3.0", + "version": "9.3.1", "dev": true, "inBundle": true, "license": "ISC", @@ -5082,7 +5150,7 @@ } }, "node_modules/npm/node_modules/@npmcli/config": { - "version": "10.7.0", + "version": "10.7.1", "dev": true, "inBundle": true, "license": "ISC", @@ -5197,7 +5265,7 @@ } }, "node_modules/npm/node_modules/@npmcli/package-json": { - "version": "7.0.4", + "version": "7.0.5", "dev": true, "inBundle": true, "license": "ISC", @@ -5208,7 +5276,7 @@ "json-parse-even-better-errors": "^5.0.0", "proc-log": "^6.0.0", "semver": "^7.5.3", - "validate-npm-package-license": "^3.0.4" + "spdx-expression-parse": "^4.0.0" }, "engines": { "node": "^20.17.0 || >=22.9.0" @@ -5378,15 +5446,6 @@ "node": ">= 14" } }, - "node_modules/npm/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/aproba": { "version": "2.1.0", "dev": true, @@ -5399,6 +5458,15 @@ "inBundle": true, "license": "MIT" }, + "node_modules/npm/node_modules/balanced-match": { + "version": "4.0.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, "node_modules/npm/node_modules/bin-links": { "version": "6.0.0", "dev": true, @@ -5427,6 +5495,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/npm/node_modules/brace-expansion": { + "version": "5.0.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "20 || >=22" + } + }, "node_modules/npm/node_modules/cacache": { "version": "20.0.3", "dev": true, @@ -5486,30 +5566,14 @@ } }, "node_modules/npm/node_modules/cidr-regex": { - "version": "5.0.2", + "version": "5.0.3", "dev": true, "inBundle": true, "license": "BSD-2-Clause", - "dependencies": { - "ip-regex": "5.0.0" - }, "engines": { "node": ">=20" } }, - "node_modules/npm/node_modules/cli-columns": { - "version": "4.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">= 10" - } - }, "node_modules/npm/node_modules/cmd-shim": { "version": "8.0.0", "dev": true, @@ -5566,12 +5630,6 @@ "node": ">=0.3.1" } }, - "node_modules/npm/node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "inBundle": true, - "license": "MIT" - }, "node_modules/npm/node_modules/encoding": { "version": "0.1.13", "dev": true, @@ -5625,17 +5683,17 @@ } }, "node_modules/npm/node_modules/glob": { - "version": "13.0.2", + "version": "13.0.6", "dev": true, "inBundle": true, "license": "BlueOak-1.0.0", "dependencies": { - "minimatch": "^10.1.2", - "minipass": "^7.1.2", - "path-scurry": "^2.0.0" + "minimatch": "^10.2.2", + "minipass": "^7.1.3", + "path-scurry": "^2.0.2" }, "engines": { - "node": "20 || >=22" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -5735,7 +5793,7 @@ } }, "node_modules/npm/node_modules/init-package-json": { - "version": "8.2.4", + "version": "8.2.5", "dev": true, "inBundle": true, "license": "ISC", @@ -5745,7 +5803,6 @@ "promzard": "^3.0.1", "read": "^5.0.1", "semver": "^7.7.2", - "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^7.0.0" }, "engines": { @@ -5761,18 +5818,6 @@ "node": ">= 12" } }, - "node_modules/npm/node_modules/ip-regex": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/npm/node_modules/is-cidr": { "version": "6.0.3", "dev": true, @@ -5785,15 +5830,6 @@ "node": ">=20" } }, - "node_modules/npm/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/isexe": { "version": "4.0.0", "dev": true, @@ -5856,12 +5892,12 @@ } }, "node_modules/npm/node_modules/libnpmdiff": { - "version": "8.1.1", + "version": "8.1.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^9.3.0", + "@npmcli/arborist": "^9.3.1", "@npmcli/installed-package-contents": "^4.0.0", "binary-extensions": "^3.0.0", "diff": "^8.0.2", @@ -5875,12 +5911,12 @@ } }, "node_modules/npm/node_modules/libnpmexec": { - "version": "10.2.1", + "version": "10.2.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^9.3.0", + "@npmcli/arborist": "^9.3.1", "@npmcli/package-json": "^7.0.0", "@npmcli/run-script": "^10.0.0", "ci-info": "^4.0.0", @@ -5898,12 +5934,12 @@ } }, "node_modules/npm/node_modules/libnpmfund": { - "version": "7.0.15", + "version": "7.0.16", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^9.3.0" + "@npmcli/arborist": "^9.3.1" }, "engines": { "node": "^20.17.0 || >=22.9.0" @@ -5923,12 +5959,12 @@ } }, "node_modules/npm/node_modules/libnpmpack": { - "version": "9.1.1", + "version": "9.1.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^9.3.0", + "@npmcli/arborist": "^9.3.1", "@npmcli/run-script": "^10.0.0", "npm-package-arg": "^13.0.0", "pacote": "^21.0.2" @@ -6029,25 +6065,25 @@ } }, "node_modules/npm/node_modules/minimatch": { - "version": "10.1.2", + "version": "10.2.2", "dev": true, "inBundle": true, "license": "BlueOak-1.0.0", "dependencies": { - "@isaacs/brace-expansion": "^5.0.1" + "brace-expansion": "^5.0.2" }, "engines": { - "node": "20 || >=22" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/npm/node_modules/minipass": { - "version": "7.1.2", + "version": "7.1.3", "dev": true, "inBundle": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { "node": ">=16 || 14 >=14.17" } @@ -6105,6 +6141,12 @@ "node": ">=8" } }, + "node_modules/npm/node_modules/minipass-flush/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, "node_modules/npm/node_modules/minipass-pipeline": { "version": "1.2.4", "dev": true, @@ -6129,6 +6171,12 @@ "node": ">=8" } }, + "node_modules/npm/node_modules/minipass-pipeline/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, "node_modules/npm/node_modules/minipass-sized": { "version": "2.0.0", "dev": true, @@ -6400,7 +6448,7 @@ } }, "node_modules/npm/node_modules/path-scurry": { - "version": "2.0.1", + "version": "2.0.2", "dev": true, "inBundle": true, "license": "BlueOak-1.0.0", @@ -6409,7 +6457,7 @@ "minipass": "^7.1.2" }, "engines": { - "node": "20 || >=22" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -6613,26 +6661,6 @@ "node": ">= 14" } }, - "node_modules/npm/node_modules/spdx-correct": { - "version": "3.2.0", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/npm/node_modules/spdx-correct/node_modules/spdx-expression-parse": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, "node_modules/npm/node_modules/spdx-exceptions": { "version": "2.5.0", "dev": true, @@ -6667,32 +6695,6 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/string-width": { - "version": "4.2.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/npm/node_modules/strip-ansi": { - "version": "6.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/supports-color": { "version": "10.2.2", "dev": true, @@ -6706,7 +6708,7 @@ } }, "node_modules/npm/node_modules/tar": { - "version": "7.5.7", + "version": "7.5.9", "dev": true, "inBundle": true, "license": "BlueOak-1.0.0", @@ -6721,15 +6723,6 @@ "node": ">=18" } }, - "node_modules/npm/node_modules/tar/node_modules/yallist": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, "node_modules/npm/node_modules/text-table": { "version": "0.2.0", "dev": true, @@ -6840,26 +6833,6 @@ "inBundle": true, "license": "MIT" }, - "node_modules/npm/node_modules/validate-npm-package-license": { - "version": "3.0.4", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/npm/node_modules/validate-npm-package-license/node_modules/spdx-expression-parse": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, "node_modules/npm/node_modules/validate-npm-package-name": { "version": "7.0.2", "dev": true, @@ -6907,13 +6880,18 @@ } }, "node_modules/npm/node_modules/yallist": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } }, "node_modules/object-assign": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "dev": true, "license": "MIT", "engines": { @@ -6938,6 +6916,8 @@ }, "node_modules/oxfmt": { "version": "0.32.0", + "resolved": "https://registry.npmjs.org/oxfmt/-/oxfmt-0.32.0.tgz", + "integrity": "sha512-KArQhGzt/Y8M1eSAX98Y8DLtGYYDQhkR55THUPY5VNcpFQ+9nRZkL3ULXhagHMD2hIvjy8JSeEQEP5/yYJSrLA==", "dev": true, "license": "MIT", "dependencies": { @@ -6974,16 +6954,10 @@ "@oxfmt/binding-win32-x64-msvc": "0.32.0" } }, - "node_modules/oxfmt/node_modules/tinypool": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": "^20.0.0 || >=22.0.0" - } - }, "node_modules/oxlint": { - "version": "1.47.0", + "version": "1.49.0", + "resolved": "https://registry.npmjs.org/oxlint/-/oxlint-1.49.0.tgz", + "integrity": "sha512-YZffp0gM+63CJoRhHjtjRnwKtAgUnXM6j63YQ++aigji2NVvLGsUlrXo9gJUXZOdcbfShLYtA6RuTu8GZ4lzOQ==", "dev": true, "license": "MIT", "bin": { @@ -6996,28 +6970,28 @@ "url": "https://github.com/sponsors/Boshen" }, "optionalDependencies": { - "@oxlint/binding-android-arm-eabi": "1.47.0", - "@oxlint/binding-android-arm64": "1.47.0", - "@oxlint/binding-darwin-arm64": "1.47.0", - "@oxlint/binding-darwin-x64": "1.47.0", - "@oxlint/binding-freebsd-x64": "1.47.0", - "@oxlint/binding-linux-arm-gnueabihf": "1.47.0", - "@oxlint/binding-linux-arm-musleabihf": "1.47.0", - "@oxlint/binding-linux-arm64-gnu": "1.47.0", - "@oxlint/binding-linux-arm64-musl": "1.47.0", - "@oxlint/binding-linux-ppc64-gnu": "1.47.0", - "@oxlint/binding-linux-riscv64-gnu": "1.47.0", - "@oxlint/binding-linux-riscv64-musl": "1.47.0", - "@oxlint/binding-linux-s390x-gnu": "1.47.0", - "@oxlint/binding-linux-x64-gnu": "1.47.0", - "@oxlint/binding-linux-x64-musl": "1.47.0", - "@oxlint/binding-openharmony-arm64": "1.47.0", - "@oxlint/binding-win32-arm64-msvc": "1.47.0", - "@oxlint/binding-win32-ia32-msvc": "1.47.0", - "@oxlint/binding-win32-x64-msvc": "1.47.0" + "@oxlint/binding-android-arm-eabi": "1.49.0", + "@oxlint/binding-android-arm64": "1.49.0", + "@oxlint/binding-darwin-arm64": "1.49.0", + "@oxlint/binding-darwin-x64": "1.49.0", + "@oxlint/binding-freebsd-x64": "1.49.0", + "@oxlint/binding-linux-arm-gnueabihf": "1.49.0", + "@oxlint/binding-linux-arm-musleabihf": "1.49.0", + "@oxlint/binding-linux-arm64-gnu": "1.49.0", + "@oxlint/binding-linux-arm64-musl": "1.49.0", + "@oxlint/binding-linux-ppc64-gnu": "1.49.0", + "@oxlint/binding-linux-riscv64-gnu": "1.49.0", + "@oxlint/binding-linux-riscv64-musl": "1.49.0", + "@oxlint/binding-linux-s390x-gnu": "1.49.0", + "@oxlint/binding-linux-x64-gnu": "1.49.0", + "@oxlint/binding-linux-x64-musl": "1.49.0", + "@oxlint/binding-openharmony-arm64": "1.49.0", + "@oxlint/binding-win32-arm64-msvc": "1.49.0", + "@oxlint/binding-win32-ia32-msvc": "1.49.0", + "@oxlint/binding-win32-x64-msvc": "1.49.0" }, "peerDependencies": { - "oxlint-tsgolint": ">=0.11.2" + "oxlint-tsgolint": ">=0.14.1" }, "peerDependenciesMeta": { "oxlint-tsgolint": { @@ -7154,6 +7128,8 @@ }, "node_modules/package-json-from-dist": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", "dev": true, "license": "BlueOak-1.0.0" }, @@ -7237,6 +7213,8 @@ }, "node_modules/path-key": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, "license": "MIT", "engines": { @@ -7245,6 +7223,8 @@ }, "node_modules/path-scurry": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -7258,6 +7238,13 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -7270,11 +7257,15 @@ }, "node_modules/pathe": { "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "dev": true, "license": "MIT" }, "node_modules/pathval": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", "dev": true, "license": "MIT", "engines": { @@ -7283,14 +7274,19 @@ }, "node_modules/picocolors": { "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.3", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=8.6" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" @@ -7308,6 +7304,8 @@ }, "node_modules/pirates": { "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", "dev": true, "license": "MIT", "engines": { @@ -7330,6 +7328,8 @@ }, "node_modules/pkg-types": { "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", "dev": true, "license": "MIT", "dependencies": { @@ -7340,6 +7340,8 @@ }, "node_modules/postcss": { "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -7367,6 +7369,8 @@ }, "node_modules/postcss-load-config": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", "dev": true, "funding": [ { @@ -7483,6 +7487,13 @@ "node": "^16.14.0 || >=18.0.0" } }, + "node_modules/read-package-up/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, "node_modules/read-package-up/node_modules/normalize-package-data": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", @@ -7598,6 +7609,8 @@ }, "node_modules/readdirp": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", "dev": true, "license": "MIT", "engines": { @@ -7633,6 +7646,8 @@ }, "node_modules/resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, "license": "MIT", "engines": { @@ -7640,7 +7655,9 @@ } }, "node_modules/rollup": { - "version": "4.57.1", + "version": "4.58.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.58.0.tgz", + "integrity": "sha512-wbT0mBmWbIvvq8NeEYWWvevvxnOyhKChir47S66WCxw1SXqhw7ssIYejnQEVt7XYQpsj2y8F9PM+Cr3SNEa0gw==", "dev": true, "license": "MIT", "dependencies": { @@ -7654,31 +7671,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.57.1", - "@rollup/rollup-android-arm64": "4.57.1", - "@rollup/rollup-darwin-arm64": "4.57.1", - "@rollup/rollup-darwin-x64": "4.57.1", - "@rollup/rollup-freebsd-arm64": "4.57.1", - "@rollup/rollup-freebsd-x64": "4.57.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", - "@rollup/rollup-linux-arm-musleabihf": "4.57.1", - "@rollup/rollup-linux-arm64-gnu": "4.57.1", - "@rollup/rollup-linux-arm64-musl": "4.57.1", - "@rollup/rollup-linux-loong64-gnu": "4.57.1", - "@rollup/rollup-linux-loong64-musl": "4.57.1", - "@rollup/rollup-linux-ppc64-gnu": "4.57.1", - "@rollup/rollup-linux-ppc64-musl": "4.57.1", - "@rollup/rollup-linux-riscv64-gnu": "4.57.1", - "@rollup/rollup-linux-riscv64-musl": "4.57.1", - "@rollup/rollup-linux-s390x-gnu": "4.57.1", - "@rollup/rollup-linux-x64-gnu": "4.57.1", - "@rollup/rollup-linux-x64-musl": "4.57.1", - "@rollup/rollup-openbsd-x64": "4.57.1", - "@rollup/rollup-openharmony-arm64": "4.57.1", - "@rollup/rollup-win32-arm64-msvc": "4.57.1", - "@rollup/rollup-win32-ia32-msvc": "4.57.1", - "@rollup/rollup-win32-x64-gnu": "4.57.1", - "@rollup/rollup-win32-x64-msvc": "4.57.1", + "@rollup/rollup-android-arm-eabi": "4.58.0", + "@rollup/rollup-android-arm64": "4.58.0", + "@rollup/rollup-darwin-arm64": "4.58.0", + "@rollup/rollup-darwin-x64": "4.58.0", + "@rollup/rollup-freebsd-arm64": "4.58.0", + "@rollup/rollup-freebsd-x64": "4.58.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.58.0", + "@rollup/rollup-linux-arm-musleabihf": "4.58.0", + "@rollup/rollup-linux-arm64-gnu": "4.58.0", + "@rollup/rollup-linux-arm64-musl": "4.58.0", + "@rollup/rollup-linux-loong64-gnu": "4.58.0", + "@rollup/rollup-linux-loong64-musl": "4.58.0", + "@rollup/rollup-linux-ppc64-gnu": "4.58.0", + "@rollup/rollup-linux-ppc64-musl": "4.58.0", + "@rollup/rollup-linux-riscv64-gnu": "4.58.0", + "@rollup/rollup-linux-riscv64-musl": "4.58.0", + "@rollup/rollup-linux-s390x-gnu": "4.58.0", + "@rollup/rollup-linux-x64-gnu": "4.58.0", + "@rollup/rollup-linux-x64-musl": "4.58.0", + "@rollup/rollup-openbsd-x64": "4.58.0", + "@rollup/rollup-openharmony-arm64": "4.58.0", + "@rollup/rollup-win32-arm64-msvc": "4.58.0", + "@rollup/rollup-win32-ia32-msvc": "4.58.0", + "@rollup/rollup-win32-x64-gnu": "4.58.0", + "@rollup/rollup-win32-x64-msvc": "4.58.0", "fsevents": "~2.3.2" } }, @@ -7850,6 +7867,8 @@ }, "node_modules/semver": { "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -7874,6 +7893,8 @@ }, "node_modules/shebang-command": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, "license": "MIT", "dependencies": { @@ -7885,6 +7906,8 @@ }, "node_modules/shebang-regex": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true, "license": "MIT", "engines": { @@ -7893,11 +7916,15 @@ }, "node_modules/siginfo": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", "dev": true, "license": "ISC" }, "node_modules/signal-exit": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", "dev": true, "license": "ISC", "engines": { @@ -8013,12 +8040,6 @@ "node": ">=4" } }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "license": "MIT" - }, "node_modules/skin-tone": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/skin-tone/-/skin-tone-2.0.0.tgz", @@ -8033,15 +8054,19 @@ } }, "node_modules/source-map": { - "version": "0.7.6", + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, "license": "BSD-3-Clause", "engines": { - "node": ">= 12" + "node": ">=0.10.0" } }, "node_modules/source-map-js": { "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, "license": "BSD-3-Clause", "engines": { @@ -8085,9 +8110,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "version": "3.0.23", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.23.tgz", + "integrity": "sha512-CWLcCCH7VLu13TgOH+r8p1O/Znwhqv/dbb6lqWy67G+pT1kHmeD/+V36AVb/vq8QMIQwVShJ6Ssl5FPh0fuSdw==", "dev": true, "license": "CC0-1.0" }, @@ -8103,11 +8128,15 @@ }, "node_modules/stackback": { "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", "dev": true, "license": "MIT" }, "node_modules/std-env": { "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", "dev": true, "license": "MIT" }, @@ -8133,24 +8162,25 @@ } }, "node_modules/string-width": { - "version": "5.1.2", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "license": "MIT", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8" } }, "node_modules/string-width-cjs": { "name": "string-width", "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "license": "MIT", "dependencies": { @@ -8162,21 +8192,10 @@ "node": ">=8" } }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { + "node_modules/strip-ansi": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "license": "MIT", "dependencies": { @@ -8186,23 +8205,11 @@ "node": ">=8" } }, - "node_modules/strip-ansi": { - "version": "7.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, "node_modules/strip-ansi-cjs": { "name": "strip-ansi", "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "license": "MIT", "dependencies": { @@ -8214,6 +8221,18 @@ }, "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "license": "MIT", "engines": { @@ -8255,6 +8274,8 @@ }, "node_modules/strip-literal": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", + "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==", "dev": true, "license": "MIT", "dependencies": { @@ -8264,8 +8285,17 @@ "url": "https://github.com/sponsors/antfu" } }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/sucrase": { "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", "dev": true, "license": "MIT", "dependencies": { @@ -8305,6 +8335,8 @@ }, "node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "license": "MIT", "dependencies": { @@ -8401,6 +8433,8 @@ }, "node_modules/test-exclude": { "version": "7.0.1", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz", + "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==", "dev": true, "license": "ISC", "dependencies": { @@ -8414,6 +8448,8 @@ }, "node_modules/thenify": { "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", "dev": true, "license": "MIT", "dependencies": { @@ -8422,6 +8458,8 @@ }, "node_modules/thenify-all": { "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", "dev": true, "license": "MIT", "dependencies": { @@ -8460,16 +8498,22 @@ }, "node_modules/tinybench": { "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", "dev": true, "license": "MIT" }, "node_modules/tinyexec": { "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", "dev": true, "license": "MIT" }, "node_modules/tinyglobby": { "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "dev": true, "license": "MIT", "dependencies": { @@ -8483,16 +8527,51 @@ "url": "https://github.com/sponsors/SuperchupuDev" } }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tinypool": { - "version": "1.1.1", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-2.1.0.tgz", + "integrity": "sha512-Pugqs6M0m7Lv1I7FtxN4aoyToKg1C4tu+/381vH35y8oENM/Ai7f7C4StcoK4/+BSw9ebcS8jRiVrORFKCALLw==", "dev": true, "license": "MIT", "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": "^20.0.0 || >=22.0.0" } }, "node_modules/tinyrainbow": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", "dev": true, "license": "MIT", "engines": { @@ -8501,6 +8580,8 @@ }, "node_modules/tinyspy": { "version": "4.0.4", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz", + "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==", "dev": true, "license": "MIT", "engines": { @@ -8535,6 +8616,8 @@ }, "node_modules/tree-kill": { "version": "1.2.2", + "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", + "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", "dev": true, "license": "MIT", "bin": { @@ -8543,11 +8626,15 @@ }, "node_modules/ts-interface-checker": { "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", "dev": true, "license": "Apache-2.0" }, "node_modules/tsup": { "version": "8.5.1", + "resolved": "https://registry.npmjs.org/tsup/-/tsup-8.5.1.tgz", + "integrity": "sha512-xtgkqwdhpKWr3tKPmCkvYmS9xnQK3m3XgxZHwSUjvfTjp7YfXe5tT3GgWi0F2N+ZSMsOeWeZFh7ZZFg5iPhing==", "dev": true, "license": "MIT", "dependencies": { @@ -8597,6 +8684,16 @@ } } }, + "node_modules/tsup/node_modules/source-map": { + "version": "0.7.6", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", + "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 12" + } + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -8608,25 +8705,27 @@ } }, "node_modules/turbo": { - "version": "2.8.7", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo/-/turbo-2.8.10.tgz", + "integrity": "sha512-OxbzDES66+x7nnKGg2MwBA1ypVsZoDTLHpeaP4giyiHSixbsiTaMyeJqbEyvBdp5Cm28fc+8GG6RdQtic0ijwQ==", "dev": true, "license": "MIT", "bin": { "turbo": "bin/turbo" }, "optionalDependencies": { - "turbo-darwin-64": "2.8.7", - "turbo-darwin-arm64": "2.8.7", - "turbo-linux-64": "2.8.7", - "turbo-linux-arm64": "2.8.7", - "turbo-windows-64": "2.8.7", - "turbo-windows-arm64": "2.8.7" + "turbo-darwin-64": "2.8.10", + "turbo-darwin-arm64": "2.8.10", + "turbo-linux-64": "2.8.10", + "turbo-linux-arm64": "2.8.10", + "turbo-windows-64": "2.8.10", + "turbo-windows-arm64": "2.8.10" } }, "node_modules/turbo-darwin-64": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-2.8.7.tgz", - "integrity": "sha512-Xr4TO/oDDwoozbDtBvunb66g//WK8uHRygl72vUthuwzmiw48pil4IuoG/QbMHd9RE8aBnVmzC0WZEWk/WWt3A==", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-2.8.10.tgz", + "integrity": "sha512-A03fXh+B7S8mL3PbdhTd+0UsaGrhfyPkODvzBDpKRY7bbeac4MDFpJ7I+Slf2oSkCEeSvHKR7Z4U71uKRUfX7g==", "cpu": [ "x64" ], @@ -8638,7 +8737,9 @@ ] }, "node_modules/turbo-darwin-arm64": { - "version": "2.8.7", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-2.8.10.tgz", + "integrity": "sha512-sidzowgWL3s5xCHLeqwC9M3s9M0i16W1nuQF3Mc7fPHpZ+YPohvcbVFBB2uoRRHYZg6yBnwD4gyUHKTeXfwtXA==", "cpu": [ "arm64" ], @@ -8650,9 +8751,9 @@ ] }, "node_modules/turbo-linux-64": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-2.8.7.tgz", - "integrity": "sha512-nwfEPAH3m5y/nJeYly3j1YJNYU2EG5+2ysZUxvBNM+VBV2LjQaLxB9CsEIpIOKuWKCjnFHKIADTSDPZ3D12J5Q==", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-2.8.10.tgz", + "integrity": "sha512-YK9vcpL3TVtqonB021XwgaQhY9hJJbKKUhLv16osxV0HkcQASQWUqR56yMge7puh6nxU67rQlTq1b7ksR1T3KA==", "cpu": [ "x64" ], @@ -8664,9 +8765,9 @@ ] }, "node_modules/turbo-linux-arm64": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-2.8.7.tgz", - "integrity": "sha512-mgA/M6xiJzyxtXV70TtWGDPh+I6acOKmeQGtOzbFQZYEf794pu5jax26bCk5skAp1gqZu3vacPr6jhYHoHU9IQ==", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-2.8.10.tgz", + "integrity": "sha512-3+j2tL0sG95iBJTm+6J8/45JsETQABPqtFyYjVjBbi6eVGdtNTiBmHNKrbvXRlQ3ZbUG75bKLaSSDHSEEN+btQ==", "cpu": [ "arm64" ], @@ -8678,9 +8779,9 @@ ] }, "node_modules/turbo-windows-64": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-2.8.7.tgz", - "integrity": "sha512-sHTYMaXuCcyHnGUQgfUUt7S8407TWoP14zc/4N2tsM0wZNK6V9h4H2t5jQPtqKEb6Fg8313kygdDgEwuM4vsHg==", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-2.8.10.tgz", + "integrity": "sha512-hdeF5qmVY/NFgiucf8FW0CWJWtyT2QPm5mIsX0W1DXAVzqKVXGq+Zf+dg4EUngAFKjDzoBeN6ec2Fhajwfztkw==", "cpu": [ "x64" ], @@ -8692,9 +8793,9 @@ ] }, "node_modules/turbo-windows-arm64": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-2.8.7.tgz", - "integrity": "sha512-WyGiOI2Zp3AhuzVagzQN+T+iq0fWx0oGxDfAWT3ZiLEd4U0cDUkwUZDKVGb3rKqPjDL6lWnuxKKu73ge5xtovQ==", + "version": "2.8.10", + "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-2.8.10.tgz", + "integrity": "sha512-QGdr/Q8LWmj+ITMkSvfiz2glf0d7JG0oXVzGL3jxkGqiBI1zXFj20oqVY0qWi+112LO9SVrYdpHS0E/oGFrMbQ==", "cpu": [ "arm64" ], @@ -8720,6 +8821,8 @@ }, "node_modules/typescript": { "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", "bin": { @@ -8732,6 +8835,8 @@ }, "node_modules/ufo": { "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", "dev": true, "license": "MIT" }, @@ -8750,9 +8855,9 @@ } }, "node_modules/undici": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.21.0.tgz", - "integrity": "sha512-Hn2tCQpoDt1wv23a68Ctc8Cr/BHpUSfaPYrkajTXOS9IKpxVRx/X5m1K2YkbK2ipgZgxXSgsUinl3x+2YdSSfg==", + "version": "7.22.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.22.0.tgz", + "integrity": "sha512-RqslV2Us5BrllB+JeiZnK4peryVTndy9Dnqq62S3yYRRTj0tFQCwEniUy2167skdGOy3vqRzEvl1Dm4sV2ReDg==", "dev": true, "license": "MIT", "engines": { @@ -8761,6 +8866,8 @@ }, "node_modules/undici-types": { "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "dev": true, "license": "MIT" }, @@ -8850,6 +8957,8 @@ }, "node_modules/vite": { "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "dev": true, "license": "MIT", "dependencies": { @@ -8923,6 +9032,8 @@ }, "node_modules/vite-node": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", "dev": true, "license": "MIT", "dependencies": { @@ -8942,8 +9053,41 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/vite/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/vitest": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", "dev": true, "license": "MIT", "dependencies": { @@ -9013,6 +9157,29 @@ } } }, + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/vitest/node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, "node_modules/web-worker": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz", @@ -9022,6 +9189,8 @@ }, "node_modules/which": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, "license": "ISC", "dependencies": { @@ -9036,6 +9205,8 @@ }, "node_modules/why-is-node-running": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", "dev": true, "license": "MIT", "dependencies": { @@ -9058,6 +9229,8 @@ }, "node_modules/wrap-ansi": { "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", "dev": true, "license": "MIT", "dependencies": { @@ -9075,6 +9248,8 @@ "node_modules/wrap-ansi-cjs": { "name": "wrap-ansi", "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "license": "MIT", "dependencies": { @@ -9089,16 +9264,10 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "license": "MIT", "dependencies": { @@ -9111,33 +9280,45 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true, "license": "MIT" }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "dev": true, "license": "MIT", "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" }, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "dev": true, "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.1" + "ansi-regex": "^6.0.1" }, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/xtend": { @@ -9213,6 +9394,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/yoctocolors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz", @@ -9226,154 +9423,36 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "packages/adapter-aider": { - "name": "@premierstudio/adapter-aider", - "version": "1.0.0", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-amp": { - "name": "@premierstudio/adapter-amp", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-claude-code": { - "name": "@premierstudio/adapter-claude-code", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-cline": { - "name": "@premierstudio/adapter-cline", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-codex": { - "name": "@premierstudio/adapter-codex", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-continue": { - "name": "@premierstudio/adapter-continue", - "version": "1.0.0", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-cursor": { - "name": "@premierstudio/adapter-cursor", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-droid": { - "name": "@premierstudio/adapter-droid", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-gemini-cli": { - "name": "@premierstudio/adapter-gemini-cli", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-goose": { - "name": "@premierstudio/adapter-goose", - "version": "1.0.0", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-kiro": { - "name": "@premierstudio/adapter-kiro", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-opencode": { - "name": "@premierstudio/adapter-opencode", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/adapter-roo-code": { - "name": "@premierstudio/adapter-roo-code", - "version": "1.0.0", - "extraneous": true, + "packages/agents": { + "name": "@premierstudio/ai-agents", + "version": "1.1.8", "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" + "bin": { + "ai-agents": "dist/cli/bin.js" + }, + "engines": { + "node": ">=22.0.0" } }, - "packages/adapter-warp": { - "name": "@premierstudio/adapter-warp", - "version": "1.0.0", - "extraneous": true, + "packages/cli": { + "name": "@premierstudio/ai-tools", + "version": "1.1.8", "license": "MIT", "dependencies": { - "@premierstudio/ai-hooks": "*" - } - }, - "packages/cli": { - "name": "@premierstudio/cli", - "version": "1.0.6", - "extraneous": true, - "license": "MIT", - "dependencies": { - "@premierstudio/adapter-amp": "*", - "@premierstudio/adapter-claude-code": "*", - "@premierstudio/adapter-cline": "*", - "@premierstudio/adapter-codex": "*", - "@premierstudio/adapter-cursor": "*", - "@premierstudio/adapter-droid": "*", - "@premierstudio/adapter-gemini-cli": "*", - "@premierstudio/adapter-kiro": "*", - "@premierstudio/adapter-opencode": "*", - "@premierstudio/ai-hooks": "*" + "@premierstudio/ai-agents": "1.1.8", + "@premierstudio/ai-hooks": "1.1.8", + "@premierstudio/ai-mcp": "1.1.8", + "@premierstudio/ai-rules": "1.1.8", + "@premierstudio/ai-skills": "1.1.8" }, "bin": { - "ai-hooks": "dist/bin.js" + "ai-tools": "dist/cli/bin.js" + }, + "engines": { + "node": ">=22.0.0" } }, - "packages/core": { + "packages/hooks": { "name": "@premierstudio/ai-hooks", "version": "1.1.8", "license": "MIT", @@ -9384,37 +9463,37 @@ "node": ">=22.0.0" } }, - "packages/mcp-server": { - "name": "@premierstudio/mcp-server", - "version": "1.0.0", - "extraneous": true, + "packages/mcp": { + "name": "@premierstudio/ai-mcp", + "version": "1.1.8", "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" - }, "bin": { - "ai-hooks-mcp": "dist/serve.js" + "ai-mcp": "dist/cli/bin.js" + }, + "engines": { + "node": ">=22.0.0" } }, - "packages/plannable": { - "name": "@premierstudio/plannable", + "packages/rules": { + "name": "@premierstudio/ai-rules", "version": "1.1.8", "license": "MIT", - "dependencies": { - "@clack/prompts": "^0.10.0", - "@premierstudio/ai-hooks": "^1.1.8" - }, "bin": { - "plannable": "dist/bin.js" + "ai-rules": "dist/cli/bin.js" + }, + "engines": { + "node": ">=22.0.0" } }, - "packages/preset-plannable": { - "name": "@premierstudio/preset-plannable", - "version": "1.0.6", - "extraneous": true, + "packages/skills": { + "name": "@premierstudio/ai-skills", + "version": "1.1.8", "license": "MIT", - "dependencies": { - "@premierstudio/ai-hooks": "*" + "bin": { + "ai-skills": "dist/cli/bin.js" + }, + "engines": { + "node": ">=22.0.0" } } } diff --git a/package.json b/package.json index aad84c9..e7afade 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "@premierstudio/ai-hooks-monorepo", + "name": "@premierstudio/ai-tools-monorepo", "private": true, "type": "module", "workspaces": [ diff --git a/packages/agents/CLAUDE.md b/packages/agents/CLAUDE.md new file mode 100644 index 0000000..afd72e0 --- /dev/null +++ b/packages/agents/CLAUDE.md @@ -0,0 +1,19 @@ +# packages/agents + +Agent configuration management. Notable for its reusable markdown adapter factory. + +## Key Types (`types/definition.ts`) + +`AgentDefinition`: `id`, `name`, `description?`, `instructions` (main content), `model?`, `tools?: string[]`, `tags?`, `enabled?` + +Agents differ from skills by having `model` and `tools` fields — they represent configurable AI agent personas, not just prompt content. + +## Markdown Adapter Factory (`adapters/markdown-adapter.ts`) + +`createMarkdownAdapter(options)` is a reusable factory that generates markdown files with YAML frontmatter from agent definitions. Used by the Claude Code adapter and extensible for other tools. + +The Claude Code adapter generates `.claude/agents/*.md` files — each agent becomes a markdown file with YAML frontmatter (name, description, model, tools) and instructions as the body. + +## CLI Commands + +`detect`, `generate`, `install`, `import`, `sync`, `export`, `help` (no `init` command — unlike hooks/mcp). diff --git a/packages/agents/package.json b/packages/agents/package.json new file mode 100644 index 0000000..e926893 --- /dev/null +++ b/packages/agents/package.json @@ -0,0 +1,52 @@ +{ + "name": "@premierstudio/ai-agents", + "version": "1.1.8", + "description": "Universal agent configuration for AI coding tools", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/PremierStudio/ai-tools" + }, + "bin": { + "ai-agents": "dist/cli/bin.js" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + }, + "./adapters": { + "types": "./dist/adapters/index.d.ts", + "import": "./dist/adapters/index.js" + }, + "./adapters/all": { + "types": "./dist/adapters/all.d.ts", + "import": "./dist/adapters/all.js" + }, + "./cli": { + "types": "./dist/cli/index.d.ts", + "import": "./dist/cli/index.js" + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "clean": "rm -rf dist" + }, + "engines": { + "node": ">=22.0.0" + } +} diff --git a/packages/agents/src/adapters/all.ts b/packages/agents/src/adapters/all.ts new file mode 100644 index 0000000..24fc8c4 --- /dev/null +++ b/packages/agents/src/adapters/all.ts @@ -0,0 +1,10 @@ +// Side-effect imports to register all built-in adapters with the registry. +import "./claude-code.js"; +import "./cursor.js"; +import "./cline.js"; +import "./gemini-cli.js"; +import "./amp.js"; +import "./kiro.js"; +import "./droid.js"; +import "./copilot.js"; +import "./roo-code.js"; diff --git a/packages/agents/src/adapters/amp.test.ts b/packages/agents/src/adapters/amp.test.ts new file mode 100644 index 0000000..cecba0e --- /dev/null +++ b/packages/agents/src/adapters/amp.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { AmpAgentAdapter } from "./amp.js"; + +describe("AmpAgentAdapter", () => { + let adapter: AmpAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code", + instructions: "Review carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new AmpAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("amp"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Amp"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".amp/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".amp/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/amp.ts b/packages/agents/src/adapters/amp.ts new file mode 100644 index 0000000..6ad5b0e --- /dev/null +++ b/packages/agents/src/adapters/amp.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: AmpAgentAdapter, adapter } = createMarkdownAdapter({ + id: "amp", + name: "Amp", + configDir: ".amp/agents", + command: "amp", +}); + +export { AmpAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/base.test.ts b/packages/agents/src/adapters/base.test.ts new file mode 100644 index 0000000..d0b1f0c --- /dev/null +++ b/packages/agents/src/adapters/base.test.ts @@ -0,0 +1,172 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { BaseAgentAdapter } from "./base.js"; +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; + +// Concrete subclass for testing the abstract BaseAgentAdapter +class TestAgentAdapter extends BaseAgentAdapter { + readonly id = "test-agents"; + readonly name = "Test Agents"; + readonly nativeSupport = true; + readonly configDir = ".test/agents"; + + async generate(agents: AgentDefinition[]): Promise { + return agents.map((a) => ({ + path: `.test/agents/${a.id}.md`, + content: `# ${a.name}\n\n${a.instructions}\n`, + format: "md" as const, + })); + } + + async import(_cwd?: string): Promise { + return []; + } +} + +// Mock node:fs and node:fs/promises +vi.mock("node:fs", () => ({ + existsSync: vi.fn(), +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: vi.fn(), + mkdir: vi.fn(), +})); + +// Import mocked modules so we can control them +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; + +const mockedExistsSync = vi.mocked(existsSync); +const mockedWriteFile = vi.mocked(writeFile); +const mockedMkdir = vi.mocked(mkdir); + +describe("BaseAgentAdapter", () => { + let adapter: TestAgentAdapter; + + beforeEach(() => { + adapter = new TestAgentAdapter(); + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("abstract properties", () => { + it("exposes id, name, nativeSupport, and configDir", () => { + expect(adapter.id).toBe("test-agents"); + expect(adapter.name).toBe("Test Agents"); + expect(adapter.nativeSupport).toBe(true); + expect(adapter.configDir).toBe(".test/agents"); + }); + }); + + describe("detect()", () => { + it("returns true when configDir exists", async () => { + mockedExistsSync.mockReturnValue(true); + const result = await adapter.detect(); + expect(result).toBe(true); + }); + + it("returns false when configDir does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + const result = await adapter.detect(); + expect(result).toBe(false); + }); + + it("uses provided cwd to resolve configDir", async () => { + mockedExistsSync.mockReturnValue(true); + await adapter.detect("/custom/dir"); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining("custom/dir")); + }); + + it("uses process.cwd() when no cwd is provided", async () => { + mockedExistsSync.mockReturnValue(false); + await adapter.detect(); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining(adapter.configDir)); + }); + }); + + describe("generate()", () => { + it("calls the subclass implementation with agents", async () => { + const agents: AgentDefinition[] = [ + { id: "coder", name: "Coder Agent", instructions: "Write clean code" }, + ]; + const files = await adapter.generate(agents); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".test/agents/coder.md"); + expect(files[0]?.content).toContain("Coder Agent"); + }); + + it("handles empty agents array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + }); + + describe("install()", () => { + it("writes all files to disk", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: ".agents/agent1.md", content: "# Agent 1", format: "md" }, + { path: ".agents/agent2.md", content: "# Agent 2", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("agent1.md"), + "# Agent 1", + "utf-8", + ); + }); + + it("creates parent directories recursively", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: "deep/nested/dir/agent.md", content: "test", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledWith(expect.stringContaining("deep/nested/dir"), { + recursive: true, + }); + }); + + it("handles empty file array", async () => { + await adapter.install([]); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); + + it("uses provided cwd to resolve file paths", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [{ path: "agent.md", content: "test", format: "md" }]; + + await adapter.install(files, "/custom/project"); + + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("/custom/project"), + "test", + "utf-8", + ); + }); + }); + + describe("uninstall()", () => { + it("is a no-op by default", async () => { + // Default implementation does nothing; subclasses override + await adapter.uninstall(); + // No error thrown means it worked + }); + }); +}); diff --git a/packages/agents/src/adapters/base.ts b/packages/agents/src/adapters/base.ts new file mode 100644 index 0000000..0eca6c6 --- /dev/null +++ b/packages/agents/src/adapters/base.ts @@ -0,0 +1,48 @@ +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; +import { dirname, resolve } from "node:path"; + +export abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + + /** CLI binary name for detection (e.g., "claude", "cursor"). Override in subclass. */ + readonly command?: string; + + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + + async detect(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const hasDir = existsSync(resolve(dir, this.configDir)); + if (hasDir) return true; + if (this.command) return this.commandExists(this.command); + return false; + } + + protected async commandExists(command: string): Promise { + const { exec } = await import("node:child_process"); + return new Promise((ok) => { + exec(`which ${command}`, (error: Error | null) => { + ok(!error); + }); + }); + } + + async install(files: GeneratedFile[], cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + for (const file of files) { + const fullPath = resolve(dir, file.path); + await mkdir(dirname(fullPath), { recursive: true }); + await writeFile(fullPath, file.content, "utf-8"); + } + } + + async uninstall(cwd?: string): Promise { + // Override in subclass + void cwd; + } +} diff --git a/packages/agents/src/adapters/claude-code.test.ts b/packages/agents/src/adapters/claude-code.test.ts new file mode 100644 index 0000000..ee12e27 --- /dev/null +++ b/packages/agents/src/adapters/claude-code.test.ts @@ -0,0 +1,251 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClaudeCodeAgentAdapter } from "./claude-code.js"; + +describe("ClaudeCodeAgentAdapter", () => { + let adapter: ClaudeCodeAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code for quality", + instructions: "Review all code changes carefully.", + model: "claude-sonnet-4-20250514", + tools: ["Read", "Grep"], + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClaudeCodeAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("claude-code"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Claude Code"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".claude/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates one file per agent", async () => { + const files = await adapter.generate([testAgent]); + expect(files).toHaveLength(1); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".claude/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + + it("includes frontmatter with description", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("description: Reviews code for quality"); + }); + + it("includes frontmatter with model", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("model: claude-sonnet-4-20250514"); + }); + + it("includes frontmatter with tools", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("tools:"); + expect(files[0]!.content).toContain(" - Read"); + expect(files[0]!.content).toContain(" - Grep"); + }); + + it("includes agent name as heading", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("# Code Reviewer"); + }); + + it("includes instructions in body", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("Review all code changes carefully."); + }); + + it("wraps content in frontmatter delimiters", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content.startsWith("---\n")).toBe(true); + expect(files[0]!.content).toContain("\n---\n"); + }); + + it("generates multiple agent files", async () => { + const agents: AgentDefinition[] = [ + testAgent, + { id: "fixer", name: "Bug Fixer", instructions: "Fix bugs." }, + ]; + const files = await adapter.generate(agents); + expect(files).toHaveLength(2); + expect(files[0]!.path).toBe(".claude/agents/reviewer.md"); + expect(files[1]!.path).toBe(".claude/agents/fixer.md"); + }); + + it("omits description from frontmatter when not provided", async () => { + const agent: AgentDefinition = { id: "simple", name: "Simple", instructions: "Do stuff." }; + const files = await adapter.generate([agent]); + expect(files[0]!.content).not.toContain("description:"); + }); + + it("omits model from frontmatter when not provided", async () => { + const agent: AgentDefinition = { id: "simple", name: "Simple", instructions: "Do stuff." }; + const files = await adapter.generate([agent]); + expect(files[0]!.content).not.toContain("model:"); + }); + + it("omits tools from frontmatter when not provided", async () => { + const agent: AgentDefinition = { id: "simple", name: "Simple", instructions: "Do stuff." }; + const files = await adapter.generate([agent]); + expect(files[0]!.content).not.toContain("tools:"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("returns empty array when directory has no md files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue([] as unknown as never); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file with frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\nmodel: claude-sonnet-4-20250514\ntools:\n - Read\n - Grep\n---\n\n# Code Reviewer\n\nReview all code carefully.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + expect(agents[0]!.name).toBe("Code Reviewer"); + expect(agents[0]!.description).toBe("Reviews code"); + expect(agents[0]!.model).toBe("claude-sonnet-4-20250514"); + expect(agents[0]!.tools).toEqual(["Read", "Grep"]); + expect(agents[0]!.instructions).toBe("Review all code carefully."); + }); + + it("skips non-md files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["README.txt", "agent.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Test\n---\n\n# Agent\n\nInstructions.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + }); + + it("uses filename as id", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["my-custom-agent.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue("---\n---\n\n# Custom Agent\n\nDo things.\n"); + + const agents = await adapter.import("/project"); + expect(agents[0]!.id).toBe("my-custom-agent"); + }); + + it("handles file without frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue("# Plain Agent\n\nJust instructions.\n"); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("plain"); + expect(agents[0]!.name).toBe("Plain Agent"); + expect(agents[0]!.instructions).toBe("Just instructions."); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("handles file without frontmatter and without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["raw.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue("Just raw content, no heading no frontmatter.\n"); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("raw"); + expect(agents[0]!.name).toBe("raw"); + expect(agents[0]!.instructions).toBe("Just raw content, no heading no frontmatter."); + }); + + it("ignores unknown array keys in frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["agent.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: A test agent\nunknown_key:\n - item1\n - item2\ntools:\n - read\n - edit\nmodel: gpt-4\n---\n\n# Test Agent\n\nInstructions here.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.description).toBe("A test agent"); + expect(agents[0]!.tools).toEqual(["read", "edit"]); + expect(agents[0]!.model).toBe("gpt-4"); + expect(agents[0]!.name).toBe("Test Agent"); + expect(agents[0]!.instructions).toBe("Instructions here."); + }); + }); +}); diff --git a/packages/agents/src/adapters/claude-code.ts b/packages/agents/src/adapters/claude-code.ts new file mode 100644 index 0000000..114726a --- /dev/null +++ b/packages/agents/src/adapters/claude-code.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: ClaudeCodeAgentAdapter, adapter } = createMarkdownAdapter({ + id: "claude-code", + name: "Claude Code", + configDir: ".claude/agents", + command: "claude", +}); + +export { ClaudeCodeAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/cline.test.ts b/packages/agents/src/adapters/cline.test.ts new file mode 100644 index 0000000..8b17c6c --- /dev/null +++ b/packages/agents/src/adapters/cline.test.ts @@ -0,0 +1,116 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClineAgentAdapter } from "./cline.js"; + +describe("ClineAgentAdapter", () => { + let adapter: ClineAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code for quality", + instructions: "Review all code changes carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClineAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("cline"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Cline"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".cline/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".cline/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + + it("includes frontmatter and body", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("---"); + expect(files[0]!.content).toContain("# Code Reviewer"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview carefully.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/cline.ts b/packages/agents/src/adapters/cline.ts new file mode 100644 index 0000000..d3e3217 --- /dev/null +++ b/packages/agents/src/adapters/cline.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: ClineAgentAdapter, adapter } = createMarkdownAdapter({ + id: "cline", + name: "Cline", + configDir: ".cline/agents", + command: "cline", +}); + +export { ClineAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/copilot.test.ts b/packages/agents/src/adapters/copilot.test.ts new file mode 100644 index 0000000..a4debf0 --- /dev/null +++ b/packages/agents/src/adapters/copilot.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CopilotAgentAdapter } from "./copilot.js"; + +describe("CopilotAgentAdapter", () => { + let adapter: CopilotAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code", + instructions: "Review carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CopilotAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("copilot"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Copilot"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".github/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".github/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/copilot.ts b/packages/agents/src/adapters/copilot.ts new file mode 100644 index 0000000..9a1004f --- /dev/null +++ b/packages/agents/src/adapters/copilot.ts @@ -0,0 +1,10 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: CopilotAgentAdapter, adapter } = createMarkdownAdapter({ + id: "copilot", + name: "Copilot", + configDir: ".github/agents", +}); + +export { CopilotAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/cursor.test.ts b/packages/agents/src/adapters/cursor.test.ts new file mode 100644 index 0000000..14bfeb1 --- /dev/null +++ b/packages/agents/src/adapters/cursor.test.ts @@ -0,0 +1,129 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CursorAgentAdapter } from "./cursor.js"; + +describe("CursorAgentAdapter", () => { + let adapter: CursorAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code for quality", + instructions: "Review all code changes carefully.", + tools: ["Read", "Grep"], + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CursorAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("cursor"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Cursor"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".cursor/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates one file per agent at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".cursor/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + + it("includes frontmatter and heading", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content).toContain("---"); + expect(files[0]!.content).toContain("# Code Reviewer"); + expect(files[0]!.content).toContain("Review all code changes carefully."); + }); + + it("generates multiple agent files", async () => { + const agents: AgentDefinition[] = [ + testAgent, + { id: "fixer", name: "Bug Fixer", instructions: "Fix bugs." }, + ]; + const files = await adapter.generate(agents); + expect(files).toHaveLength(2); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview carefully.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + expect(agents[0]!.name).toBe("Code Reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/cursor.ts b/packages/agents/src/adapters/cursor.ts new file mode 100644 index 0000000..41c2498 --- /dev/null +++ b/packages/agents/src/adapters/cursor.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: CursorAgentAdapter, adapter } = createMarkdownAdapter({ + id: "cursor", + name: "Cursor", + configDir: ".cursor/agents", + command: "cursor", +}); + +export { CursorAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/droid.test.ts b/packages/agents/src/adapters/droid.test.ts new file mode 100644 index 0000000..d2de080 --- /dev/null +++ b/packages/agents/src/adapters/droid.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { DroidAgentAdapter } from "./droid.js"; + +describe("DroidAgentAdapter", () => { + let adapter: DroidAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code", + instructions: "Review carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new DroidAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("droid"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Droid"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".factory/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".factory/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/droid.ts b/packages/agents/src/adapters/droid.ts new file mode 100644 index 0000000..54762d5 --- /dev/null +++ b/packages/agents/src/adapters/droid.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: DroidAgentAdapter, adapter } = createMarkdownAdapter({ + id: "droid", + name: "Droid", + configDir: ".factory/agents", + command: "droid", +}); + +export { DroidAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/gemini-cli.test.ts b/packages/agents/src/adapters/gemini-cli.test.ts new file mode 100644 index 0000000..24c1ba7 --- /dev/null +++ b/packages/agents/src/adapters/gemini-cli.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { GeminiCliAgentAdapter } from "./gemini-cli.js"; + +describe("GeminiCliAgentAdapter", () => { + let adapter: GeminiCliAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code", + instructions: "Review carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new GeminiCliAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("gemini-cli"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Gemini CLI"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".gemini/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".gemini/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/gemini-cli.ts b/packages/agents/src/adapters/gemini-cli.ts new file mode 100644 index 0000000..b8fd1d0 --- /dev/null +++ b/packages/agents/src/adapters/gemini-cli.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: GeminiCliAgentAdapter, adapter } = createMarkdownAdapter({ + id: "gemini-cli", + name: "Gemini CLI", + configDir: ".gemini/agents", + command: "gemini", +}); + +export { GeminiCliAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/index.ts b/packages/agents/src/adapters/index.ts new file mode 100644 index 0000000..44f385b --- /dev/null +++ b/packages/agents/src/adapters/index.ts @@ -0,0 +1,2 @@ +export { registry } from "./registry.js"; +export { BaseAgentAdapter } from "./base.js"; diff --git a/packages/agents/src/adapters/kiro.test.ts b/packages/agents/src/adapters/kiro.test.ts new file mode 100644 index 0000000..7330dfa --- /dev/null +++ b/packages/agents/src/adapters/kiro.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { KiroAgentAdapter } from "./kiro.js"; + +describe("KiroAgentAdapter", () => { + let adapter: KiroAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + description: "Reviews code", + instructions: "Review carefully.", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new KiroAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("kiro"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Kiro"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".kiro/agents"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates file at correct path", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.path).toBe(".kiro/agents/reviewer.md"); + }); + + it("generates file with md format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("md"); + }); + }); + + describe("import", () => { + it("returns empty array when config dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import(); + expect(agents).toEqual([]); + }); + + it("parses agent from markdown file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["reviewer.md"] as unknown as never); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Reviews code\n---\n\n# Code Reviewer\n\nReview.\n", + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + }); + }); +}); diff --git a/packages/agents/src/adapters/kiro.ts b/packages/agents/src/adapters/kiro.ts new file mode 100644 index 0000000..3711424 --- /dev/null +++ b/packages/agents/src/adapters/kiro.ts @@ -0,0 +1,11 @@ +import { createMarkdownAdapter } from "./markdown-adapter.js"; + +const { Adapter: KiroAgentAdapter, adapter } = createMarkdownAdapter({ + id: "kiro", + name: "Kiro", + configDir: ".kiro/agents", + command: "kiro", +}); + +export { KiroAgentAdapter }; +export default adapter; diff --git a/packages/agents/src/adapters/markdown-adapter.ts b/packages/agents/src/adapters/markdown-adapter.ts new file mode 100644 index 0000000..a63c9d9 --- /dev/null +++ b/packages/agents/src/adapters/markdown-adapter.ts @@ -0,0 +1,141 @@ +import { BaseAgentAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +export type MarkdownAdapterConfig = { + id: string; + name: string; + configDir: string; + command?: string; +}; + +export function createMarkdownAdapter(config: MarkdownAdapterConfig) { + class MarkdownAgentAdapter extends BaseAgentAdapter { + readonly id = config.id; + readonly name = config.name; + readonly nativeSupport = true; + readonly configDir = config.configDir; + override readonly command = config.command; + + async generate(agents: AgentDefinition[]): Promise { + return agents.map((agent) => ({ + path: `${this.configDir}/${agent.id}.md`, + content: formatAgent(agent), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const agentsDir = resolve(dir, this.configDir); + if (!existsSync(agentsDir)) return []; + + const files = await readdir(agentsDir); + const agents: AgentDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(agentsDir, file), "utf-8"); + const id = basename(file, ".md"); + agents.push(parseAgent(id, content)); + } + + return agents; + } + } + + const adapter = new MarkdownAgentAdapter(); + registry.register(adapter); + return { + Adapter: MarkdownAgentAdapter as unknown as typeof BaseAgentAdapter, + adapter: adapter as BaseAgentAdapter, + }; +} + +function formatAgent(agent: AgentDefinition): string { + const frontmatter: Record = {}; + if (agent.description) frontmatter.description = agent.description; + if (agent.tools?.length) frontmatter.tools = agent.tools; + if (agent.model) frontmatter.model = agent.model; + + let md = "---\n"; + for (const [key, value] of Object.entries(frontmatter)) { + if (Array.isArray(value)) { + md += `${key}:\n`; + for (const item of value) { + md += ` - ${item}\n`; + } + } else { + md += `${key}: ${value}\n`; + } + } + md += "---\n\n"; + md += `# ${agent.name}\n\n`; + md += agent.instructions + "\n"; + return md; +} + +function parseAgent(id: string, raw: string): AgentDefinition { + const agent: AgentDefinition = { id, name: id, instructions: "" }; + + if (raw.startsWith("---")) { + const endIdx = raw.indexOf("---", 3); + if (endIdx !== -1) { + const fm = raw.slice(3, endIdx).trim(); + const body = raw.slice(endIdx + 3).trim(); + + // Simple YAML-like parsing + const lines = fm.split("\n"); + let currentKey = ""; + let currentArray: string[] = []; + + for (const line of lines) { + if (line.startsWith(" - ")) { + currentArray.push(line.slice(4).trim()); + } else { + if (currentKey && currentArray.length > 0) { + if (currentKey === "tools") agent.tools = currentArray; + currentArray = []; + } + const colonIdx = line.indexOf(":"); + if (colonIdx !== -1) { + currentKey = line.slice(0, colonIdx).trim(); + const value = line.slice(colonIdx + 1).trim(); + if (value) { + if (currentKey === "description") agent.description = value; + if (currentKey === "model") agent.model = value; + } + } + } + } + if (currentKey === "tools" && currentArray.length > 0) { + agent.tools = currentArray; + } + + // Parse body + const bodyLines = body.split("\n"); + let contentStart = 0; + if (bodyLines[0]?.startsWith("# ")) { + agent.name = bodyLines[0].slice(2).trim(); + contentStart = 1; + if (bodyLines[contentStart]?.trim() === "") contentStart++; + } + agent.instructions = bodyLines.slice(contentStart).join("\n").trim(); + } + } else { + // No frontmatter, try to parse just body + const bodyLines = raw.split("\n"); + let contentStart = 0; + if (bodyLines[0]?.startsWith("# ")) { + agent.name = bodyLines[0].slice(2).trim(); + contentStart = 1; + if (bodyLines[contentStart]?.trim() === "") contentStart++; + } + agent.instructions = bodyLines.slice(contentStart).join("\n").trim(); + } + + return agent; +} diff --git a/packages/agents/src/adapters/registry.test.ts b/packages/agents/src/adapters/registry.test.ts new file mode 100644 index 0000000..d0faf41 --- /dev/null +++ b/packages/agents/src/adapters/registry.test.ts @@ -0,0 +1,154 @@ +import { describe, it, expect, beforeEach, vi } from "vitest"; +import { registry } from "./registry.js"; +import type { BaseAgentAdapter } from "./base.js"; +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; + +function makeFakeAdapter(id: string, detects: boolean = true): BaseAgentAdapter { + return { + id, + name: `${id} Adapter`, + nativeSupport: true, + configDir: `.${id}/agents`, + detect: async () => detects, + generate: async (_agents: AgentDefinition[]) => [] as GeneratedFile[], + import: async () => [] as AgentDefinition[], + install: async () => {}, + uninstall: async () => {}, + } as unknown as BaseAgentAdapter; +} + +describe("Real AgentAdapterRegistry singleton", () => { + beforeEach(() => { + registry.clear(); + }); + + describe("register / get", () => { + it("registers and retrieves an adapter by id", () => { + const adapter = makeFakeAdapter("claude-code"); + registry.register(adapter); + expect(registry.get("claude-code")).toBe(adapter); + }); + + it("returns undefined for unknown adapter id", () => { + expect(registry.get("nonexistent")).toBeUndefined(); + }); + + it("overwrites a previously registered adapter with the same id", () => { + const first = makeFakeAdapter("dupe"); + const second = makeFakeAdapter("dupe"); + registry.register(first); + registry.register(second); + expect(registry.get("dupe")).toBe(second); + }); + }); + + describe("list", () => { + it("returns empty array when nothing registered", () => { + expect(registry.list()).toEqual([]); + }); + + it("lists all registered adapter IDs", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + expect(registry.list().toSorted()).toEqual(["a", "b"]); + }); + }); + + describe("getAll", () => { + it("returns empty array when nothing registered", () => { + expect(registry.getAll()).toEqual([]); + }); + + it("returns all registered adapters", () => { + const a = makeFakeAdapter("a"); + const b = makeFakeAdapter("b"); + registry.register(a); + registry.register(b); + const all = registry.getAll(); + expect(all).toHaveLength(2); + expect(all).toContain(a); + expect(all).toContain(b); + }); + }); + + describe("detectAll", () => { + it("returns adapters that detect successfully", async () => { + registry.register(makeFakeAdapter("found", true)); + registry.register(makeFakeAdapter("missing", false)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("found"); + }); + + it("skips adapters that throw during detection", async () => { + const throwingAdapter = makeFakeAdapter("broken"); + throwingAdapter.detect = async () => { + throw new Error("detection crashed"); + }; + registry.register(throwingAdapter); + registry.register(makeFakeAdapter("stable", true)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("stable"); + }); + + it("returns empty array when no adapters detect", async () => { + registry.register(makeFakeAdapter("a", false)); + registry.register(makeFakeAdapter("b", false)); + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + }); + + it("passes cwd to adapter.detect", async () => { + const detectFn = vi.fn().mockResolvedValue(true); + const adapter = makeFakeAdapter("with-cwd"); + adapter.detect = detectFn; + registry.register(adapter); + + await registry.detectAll("/custom/dir"); + expect(detectFn).toHaveBeenCalledWith("/custom/dir"); + }); + + it("handles multiple throwing adapters gracefully", async () => { + const spy = vi.fn(); + for (let i = 0; i < 3; i++) { + const adapter = makeFakeAdapter(`throw-${i}`); + adapter.detect = async () => { + spy(); + throw new Error(`fail-${i}`); + }; + registry.register(adapter); + } + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + expect(spy).toHaveBeenCalledTimes(3); + }); + }); + + describe("clear", () => { + it("removes all adapters", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + + registry.clear(); + + expect(registry.list()).toEqual([]); + expect(registry.get("a")).toBeUndefined(); + expect(registry.get("b")).toBeUndefined(); + }); + + it("allows re-registration after clear", () => { + registry.register(makeFakeAdapter("x")); + registry.clear(); + registry.register(makeFakeAdapter("y")); + + expect(registry.list()).toEqual(["y"]); + expect(registry.get("x")).toBeUndefined(); + expect(registry.get("y")?.id).toBe("y"); + }); + }); +}); diff --git a/packages/agents/src/adapters/registry.ts b/packages/agents/src/adapters/registry.ts new file mode 100644 index 0000000..63a56cf --- /dev/null +++ b/packages/agents/src/adapters/registry.ts @@ -0,0 +1,46 @@ +import type { BaseAgentAdapter } from "./base.js"; + +class AgentAdapterRegistry { + private adapters: Map = new Map(); + + register(adapter: BaseAgentAdapter): void { + this.adapters.set(adapter.id, adapter); + } + + get(id: string): BaseAgentAdapter | undefined { + return this.adapters.get(id); + } + + list(): string[] { + return [...this.adapters.keys()]; + } + + getAll(): BaseAgentAdapter[] { + return [...this.adapters.values()]; + } + + async detectAll(cwd?: string): Promise { + const detected: BaseAgentAdapter[] = []; + for (const adapter of this.adapters.values()) { + try { + if (await adapter.detect(cwd)) { + detected.push(adapter); + } + } catch { + // Detection failed, skip + } + } + return detected; + } + + clear(): void { + this.adapters.clear(); + } +} + +declare global { + // eslint-disable-next-line no-var + var __premierstudio_agents_registry: AgentAdapterRegistry | undefined; +} + +export const registry = (globalThis.__premierstudio_agents_registry ??= new AgentAdapterRegistry()); diff --git a/packages/agents/src/adapters/roo-code.test.ts b/packages/agents/src/adapters/roo-code.test.ts new file mode 100644 index 0000000..065b4d4 --- /dev/null +++ b/packages/agents/src/adapters/roo-code.test.ts @@ -0,0 +1,252 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import type { AgentDefinition } from "../types/index.js"; + +vi.mock("./index.js", () => { + const registry = { register: vi.fn() }; + abstract class BaseAgentAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + abstract generate(agents: AgentDefinition[]): Promise; + abstract import(cwd?: string): Promise; + async detect() { + return false; + } + async install() {} + async uninstall() {} + } + return { BaseAgentAdapter, registry }; +}); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { RooCodeAgentAdapter } from "./roo-code.js"; + +describe("RooCodeAgentAdapter", () => { + let adapter: RooCodeAgentAdapter; + + const testAgent: AgentDefinition = { + id: "reviewer", + name: "Code Reviewer", + instructions: "Review all code changes carefully.", + tools: ["read", "edit", "command"], + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new RooCodeAgentAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => { + expect(adapter.id).toBe("roo-code"); + }); + + it("has correct name", () => { + expect(adapter.name).toBe("Roo Code"); + }); + + it("has native support", () => { + expect(adapter.nativeSupport).toBe(true); + }); + + it("has correct configDir", () => { + expect(adapter.configDir).toBe(".roo"); + }); + }); + + describe("generate", () => { + it("returns empty array for no agents", async () => { + const files = await adapter.generate([]); + expect(files).toEqual([]); + }); + + it("generates single .roomodes file", async () => { + const files = await adapter.generate([testAgent]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".roomodes"); + }); + + it("generates file with json format", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.format).toBe("json"); + }); + + it("generates valid JSON with customModes", async () => { + const files = await adapter.generate([testAgent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ + slug: string; + name: string; + roleDefinition: string; + groups: string[]; + }>; + }; + expect(parsed.customModes).toBeDefined(); + expect(parsed.customModes).toHaveLength(1); + }); + + it("maps agent id to slug", async () => { + const files = await adapter.generate([testAgent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ slug: string }>; + }; + expect(parsed.customModes[0]!.slug).toBe("reviewer"); + }); + + it("maps agent name to name", async () => { + const files = await adapter.generate([testAgent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ name: string }>; + }; + expect(parsed.customModes[0]!.name).toBe("Code Reviewer"); + }); + + it("maps agent instructions to roleDefinition", async () => { + const files = await adapter.generate([testAgent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ roleDefinition: string }>; + }; + expect(parsed.customModes[0]!.roleDefinition).toBe("Review all code changes carefully."); + }); + + it("maps agent tools to groups", async () => { + const files = await adapter.generate([testAgent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ groups: string[] }>; + }; + expect(parsed.customModes[0]!.groups).toEqual(["read", "edit", "command"]); + }); + + it("uses default groups when no tools provided", async () => { + const agent: AgentDefinition = { + id: "simple", + name: "Simple", + instructions: "Do stuff.", + }; + const files = await adapter.generate([agent]); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ groups: string[] }>; + }; + expect(parsed.customModes[0]!.groups).toEqual(["read", "edit", "command"]); + }); + + it("generates multiple agents in single file", async () => { + const agents: AgentDefinition[] = [ + testAgent, + { id: "fixer", name: "Bug Fixer", instructions: "Fix bugs." }, + ]; + const files = await adapter.generate(agents); + expect(files).toHaveLength(1); + const parsed = JSON.parse(files[0]!.content) as { + customModes: Array<{ slug: string }>; + }; + expect(parsed.customModes).toHaveLength(2); + expect(parsed.customModes[0]!.slug).toBe("reviewer"); + expect(parsed.customModes[1]!.slug).toBe("fixer"); + }); + + it("content ends with newline", async () => { + const files = await adapter.generate([testAgent]); + expect(files[0]!.content.endsWith("\n")).toBe(true); + }); + }); + + describe("import", () => { + it("returns empty array when .roomodes does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("parses agents from .roomodes JSON", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + customModes: [ + { + slug: "reviewer", + name: "Code Reviewer", + roleDefinition: "Review all code.", + groups: ["read", "edit"], + }, + ], + }), + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(1); + expect(agents[0]!.id).toBe("reviewer"); + expect(agents[0]!.name).toBe("Code Reviewer"); + expect(agents[0]!.instructions).toBe("Review all code."); + expect(agents[0]!.tools).toEqual(["read", "edit"]); + }); + + it("parses multiple agents from .roomodes", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + customModes: [ + { slug: "agent1", name: "Agent 1", roleDefinition: "Do A.", groups: ["read"] }, + { slug: "agent2", name: "Agent 2", roleDefinition: "Do B.", groups: ["edit"] }, + ], + }), + ); + + const agents = await adapter.import("/project"); + expect(agents).toHaveLength(2); + }); + + it("handles empty customModes array", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({ customModes: [] })); + + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("handles missing customModes key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + + const agents = await adapter.import("/project"); + expect(agents).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); + + describe("detect", () => { + it("detects based on .roomodes file", async () => { + vi.mocked(existsSync).mockReturnValue(true); + const result = await adapter.detect("/project"); + expect(result).toBe(true); + }); + + it("returns false when .roomodes does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.detect("/project"); + expect(result).toBe(false); + }); + + it("detects without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.detect(); + expect(result).toBe(false); + }); + }); +}); diff --git a/packages/agents/src/adapters/roo-code.ts b/packages/agents/src/adapters/roo-code.ts new file mode 100644 index 0000000..eaf01ec --- /dev/null +++ b/packages/agents/src/adapters/roo-code.ts @@ -0,0 +1,66 @@ +import { BaseAgentAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class RooCodeAgentAdapter extends BaseAgentAdapter { + readonly id = "roo-code"; + readonly name = "Roo Code"; + readonly nativeSupport = true; + readonly configDir = ".roo"; + + async generate(agents: AgentDefinition[]): Promise { + if (agents.length === 0) return []; + + const customModes = agents.map((agent) => ({ + slug: agent.id, + name: agent.name, + roleDefinition: agent.instructions, + groups: agent.tools ?? ["read", "edit", "command"], + })); + + return [ + { + path: ".roomodes", + content: JSON.stringify({ customModes }, null, 2) + "\n", + format: "json" as const, + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const filePath = resolve(dir, ".roomodes"); + if (!existsSync(filePath)) return []; + + const raw = await readFile(filePath, "utf-8"); + const data = JSON.parse(raw) as { + customModes?: Array<{ + slug: string; + name: string; + roleDefinition: string; + groups?: string[]; + }>; + }; + + return (data.customModes ?? []).map((mode) => ({ + id: mode.slug, + name: mode.name, + instructions: mode.roleDefinition, + tools: mode.groups, + })); + } + + override async detect(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + return existsSync(resolve(dir, ".roomodes")); + } +} + +const adapter = new RooCodeAgentAdapter(); +registry.register(adapter); + +export { RooCodeAgentAdapter }; +export default adapter; diff --git a/packages/core/src/cli/bin.ts b/packages/agents/src/cli/bin.ts similarity index 100% rename from packages/core/src/cli/bin.ts rename to packages/agents/src/cli/bin.ts diff --git a/packages/agents/src/cli/index.test.ts b/packages/agents/src/cli/index.test.ts new file mode 100644 index 0000000..893a295 --- /dev/null +++ b/packages/agents/src/cli/index.test.ts @@ -0,0 +1,791 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import type { AgentDefinition, GeneratedFile } from "../types/index.js"; +import type { BaseAgentAdapter } from "../adapters/base.js"; + +const { + mockRegistryDetectAll, + mockRegistryList, + mockRegistryGet, + mockRegistryGetAll, + mockWriteFile, + mockMkdir, + mockReadFile, +} = vi.hoisted(() => ({ + mockRegistryDetectAll: vi.fn(), + mockRegistryList: vi.fn(), + mockRegistryGet: vi.fn(), + mockRegistryGetAll: vi.fn(), + mockWriteFile: vi.fn(), + mockMkdir: vi.fn(), + mockReadFile: vi.fn(), +})); + +vi.mock("../adapters/all.js", () => ({})); + +vi.mock("../adapters/registry.js", () => ({ + registry: { + detectAll: (...args: unknown[]) => mockRegistryDetectAll(...args), + list: () => mockRegistryList(), + get: (id: string) => mockRegistryGet(id), + getAll: () => mockRegistryGetAll(), + }, +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: (...args: unknown[]) => mockWriteFile(...args), + mkdir: (...args: unknown[]) => mockMkdir(...args), + readFile: (...args: unknown[]) => mockReadFile(...args), +})); + +import { run } from "./index.js"; + +function makeAdapter(overrides: Partial = {}): BaseAgentAdapter { + return { + id: overrides.id ?? "test-tool", + name: overrides.name ?? "Test Tool", + nativeSupport: true, + configDir: ".test", + detect: overrides.detect ?? vi.fn<() => Promise>().mockResolvedValue(true), + generate: + overrides.generate ?? + vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".test/agents/a.md", content: "test", format: "md" }]), + import: overrides.import ?? vi.fn<() => Promise>().mockResolvedValue([]), + install: + overrides.install ?? + vi.fn<(files: GeneratedFile[]) => Promise>().mockResolvedValue(undefined), + uninstall: overrides.uninstall ?? vi.fn<() => Promise>().mockResolvedValue(undefined), + } as BaseAgentAdapter; +} + +let logOutput: string[]; +let errorOutput: string[]; +let warnOutput: string[]; +let exitCode: number | undefined; + +const originalLog = console.log; +const originalError = console.error; +const originalWarn = console.warn; +const originalExit = process.exit; + +beforeEach(() => { + logOutput = []; + errorOutput = []; + warnOutput = []; + exitCode = undefined; + + console.log = vi.fn((...args: unknown[]) => { + logOutput.push(args.map(String).join(" ")); + }); + console.error = vi.fn((...args: unknown[]) => { + errorOutput.push(args.map(String).join(" ")); + }); + console.warn = vi.fn((...args: unknown[]) => { + warnOutput.push(args.map(String).join(" ")); + }); + process.exit = vi.fn((code?: number) => { + exitCode = code ?? 0; + throw new Error(`process.exit(${code})`); + }) as never; + + vi.clearAllMocks(); + mockWriteFile.mockResolvedValue(undefined); + mockMkdir.mockResolvedValue(undefined); +}); + +afterEach(() => { + console.log = originalLog; + console.error = originalError; + console.warn = originalWarn; + process.exit = originalExit; +}); + +function allLog(): string { + return logOutput.join("\n"); +} + +function allError(): string { + return errorOutput.join("\n"); +} + +function allWarn(): string { + return warnOutput.join("\n"); +} + +describe("run() - help output", () => { + it("prints help text for help command", async () => { + await run(["help"]); + expect(allLog()).toContain("ai-agents"); + expect(allLog()).toContain("USAGE:"); + expect(allLog()).toContain("COMMANDS:"); + }); + + it("prints help text for --help flag", async () => { + await run(["--help"]); + expect(allLog()).toContain("ai-agents"); + }); + + it("prints help text for -h flag", async () => { + await run(["-h"]); + expect(allLog()).toContain("ai-agents"); + }); + + it("prints help text when no arguments provided", async () => { + await run([]); + expect(allLog()).toContain("ai-agents"); + }); + + it("includes all documented commands in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const cmd of ["detect", "generate", "install", "import", "sync", "export", "help"]) { + expect(output).toContain(cmd); + } + }); + + it("includes all documented options in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const opt of ["--tools", "--config", "--verbose", "--dry-run"]) { + expect(output).toContain(opt); + } + }); +}); + +describe("run() - unknown command", () => { + it("prints error and help, then exits with code 1", async () => { + await expect(run(["foobar"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: foobar"); + expect(allLog()).toContain("USAGE:"); + expect(exitCode).toBe(1); + }); + + it("prints the actual command name in the error message", async () => { + await expect(run(["deploy-everything"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: deploy-everything"); + }); +}); + +describe("run() - detect command", () => { + it("shows detection header", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryGetAll.mockReturnValue([]); + await run(["detect"]); + expect(allLog()).toContain("Detecting AI coding tools"); + }); + + it("lists detected adapters", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryGetAll.mockReturnValue([adapter]); + await run(["detect"]); + expect(allLog()).toContain("Claude Code"); + expect(allLog()).toContain("\u2713"); + }); + + it("lists undetected adapters with cross mark", async () => { + const detected = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const missing = makeAdapter({ id: "cursor", name: "Cursor" }); + + mockRegistryDetectAll.mockResolvedValue([detected]); + mockRegistryGetAll.mockReturnValue([detected, missing]); + + await run(["detect"]); + const output = allLog(); + expect(output).toContain("\u2713"); + expect(output).toContain("Claude Code"); + expect(output).toContain("\u2717"); + expect(output).toContain("Cursor"); + }); + + it("shows detection summary with counts", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryGetAll.mockReturnValue([adapter, makeAdapter({ id: "cursor", name: "Cursor" })]); + + await run(["detect"]); + expect(allLog()).toContain("Detected 1/2 tools"); + }); + + it("shows configDir for each adapter", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryGetAll.mockReturnValue([adapter]); + + await run(["detect"]); + expect(allLog()).toContain(".test"); + }); +}); + +describe("run() - generate command", () => { + it("prints message when no tools detected and no --tools flag", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["generate"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("generates configs for detected adapters", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + await run(["generate"]); + expect(allLog()).toContain("Generating"); + expect(allLog()).toContain("Done!"); + }); + + it("uses --tools flag to resolve specific adapters", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + await run(["generate", "--tools=claude-code"]); + expect(allLog()).toContain("Generating"); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("warns for unknown adapter IDs in --tools flag", async () => { + mockRegistryGet.mockReturnValue(undefined); + await run(["generate", "--tools=nonexistent"]); + expect(allWarn()).toContain("Unknown adapter"); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("respects --dry-run and does not write files", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate", "--dry-run"]); + expect(allLog()).toContain("[dry-run] Would write:"); + }); + + it("generates for multiple adapters", async () => { + const adapter1 = makeAdapter({ + id: "claude-code", + name: "Claude Code", + generate: vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".claude/agents/a.md", content: "test", format: "md" }]), + }); + const adapter2 = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".cursor/agents/a.md", content: "test", format: "md" }]), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["generate"]); + const output = allLog(); + expect(output).toContain("Generating agent configs for 2 tool(s)"); + expect(output).toContain("Generated: .claude/agents/a.md"); + expect(output).toContain("Generated: .cursor/agents/a.md"); + }); + + it("handles adapter that generates empty file list", async () => { + const adapter = makeAdapter({ + id: "empty", + name: "Empty", + generate: vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([]), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate"]); + expect(allLog()).toContain("Generating agent configs for 1 tool(s)"); + expect(allLog()).toContain("Done!"); + }); + + it("handles mixed known and unknown tools in --tools flag", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code,nonexistent"]); + expect(allWarn()).toContain('Warning: Unknown adapter "nonexistent"'); + expect(allLog()).toContain("Generating agent configs for 1 tool(s)"); + }); + + it("resolves multiple comma-separated tools", async () => { + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter1; + if (id === "cursor") return adapter2; + return undefined; + }); + + await run(["generate", "--tools=claude-code,cursor"]); + expect(allLog()).toContain("Generating agent configs for 2 tool(s)"); + }); + + it("trims whitespace in --tools values", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools= claude-code "]); + expect(allLog()).toContain("Generating agent configs for 1 tool(s)"); + }); +}); + +describe("run() - install command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["install"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("installs agents into detected tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", install: installFn }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + await run(["install"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Claude Code"); + }); + + it("respects --dry-run and does not call install", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + install: installFn, + generate: vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".claude/agents/a.md", content: "test", format: "md" }]), + }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["install", "--dry-run"]); + expect(installFn).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Would install: .claude/agents/a.md"); + }); + + it("installs into multiple tools", async () => { + const install1 = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const install2 = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code", install: install1 }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor", install: install2 }); + + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["install"]); + expect(install1).toHaveBeenCalled(); + expect(install2).toHaveBeenCalled(); + expect(allLog()).toContain("Installing agents into 2 tool(s)"); + expect(allLog()).toContain("\u2713 Claude Code"); + expect(allLog()).toContain("\u2713 Cursor"); + expect(allLog()).toContain("Agents installed!"); + }); + + it("uses --tools flag to install for specific tools only", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", install: installFn }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["install", "--tools=claude-code"]); + expect(installFn).toHaveBeenCalled(); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("skips undetected tool in --tools and warns", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro"]); + expect(allWarn()).toContain("Kiro not detected, skipping"); + expect(allWarn()).toContain("--force"); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("--force bypasses detection check for --tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro", "--force"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing agents into 1 tool(s)"); + }); + + it("shows dry-run output for multiple generated files", async () => { + const adapter = makeAdapter({ + id: "multi", + name: "Multi", + generate: vi.fn<(agents: AgentDefinition[]) => Promise>().mockResolvedValue([ + { path: "file1.md", content: "# A", format: "md" }, + { path: "file2.md", content: "# B", format: "md" }, + ]), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["install", "--dry-run"]); + const output = allLog(); + expect(output).toContain("[dry-run] Would install: file1.md"); + expect(output).toContain("[dry-run] Would install: file2.md"); + }); +}); + +describe("run() - import command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["import"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("imports agents from detected tools", async () => { + const importFn = vi + .fn<() => Promise>() + .mockResolvedValue([{ id: "agent1", name: "Agent 1", instructions: "Do stuff." }]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + await run(["import"]); + expect(importFn).toHaveBeenCalled(); + expect(allLog()).toContain("Agent 1"); + }); + + it("shows 'no agents found' message when adapter has no agents", async () => { + const importFn = vi.fn<() => Promise>().mockResolvedValue([]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + await run(["import"]); + expect(allLog()).toContain("Claude Code: no agents found"); + }); + + it("imports from multiple tools and shows counts", async () => { + const import1 = vi + .fn<() => Promise>() + .mockResolvedValue([{ id: "a1", name: "Agent A1", instructions: "A1 instructions" }]); + const import2 = vi.fn<() => Promise>().mockResolvedValue([ + { id: "a2", name: "Agent A2", instructions: "A2 instructions" }, + { id: "a3", name: "Agent A3", instructions: "A3 instructions" }, + ]); + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code", import: import1 }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor", import: import2 }); + + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["import"]); + const output = allLog(); + expect(output).toContain("Importing agents from 2 tool(s)"); + expect(output).toContain("Claude Code: 1 agent(s)"); + expect(output).toContain("Agent A1 (a1)"); + expect(output).toContain("Cursor: 2 agent(s)"); + expect(output).toContain("Agent A2 (a2)"); + expect(output).toContain("Agent A3 (a3)"); + }); + + it("uses --tools flag to import from specific tools", async () => { + const importFn = vi + .fn<() => Promise>() + .mockResolvedValue([{ id: "agent1", name: "Agent 1", instructions: "Do stuff." }]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--tools=claude-code"]); + expect(importFn).toHaveBeenCalled(); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); +}); + +describe("run() - export command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["export"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("exports agents as JSON to stdout", async () => { + const agents: AgentDefinition[] = [ + { id: "agent1", name: "Agent 1", instructions: "Do stuff." }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(agents); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["export"]); + const output = allLog(); + expect(output).toContain('"id": "agent1"'); + expect(output).toContain('"name": "Agent 1"'); + expect(output).toContain('"instructions": "Do stuff."'); + }); + + it("uses --tools flag to export from specific tool", async () => { + const importFn = vi.fn<() => Promise>().mockResolvedValue([]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["export", "--tools=claude-code"]); + expect(importFn).toHaveBeenCalled(); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); +}); + +describe("run() - sync command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["sync"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("syncs agents from first tool to others", async () => { + const agents: AgentDefinition[] = [{ id: "a1", name: "Agent 1", instructions: "Do stuff." }]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(agents); + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const generateFn = vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".cursor/agents/a1.md", content: "test", format: "md" }]); + + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + const target = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: generateFn, + install: installFn, + }); + + mockRegistryDetectAll.mockResolvedValue([source, target]); + + await run(["sync"]); + expect(importFn).toHaveBeenCalled(); + expect(generateFn).toHaveBeenCalledWith(agents); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Syncing 1 agent(s) from Claude Code to 1 tool(s)"); + expect(allLog()).toContain("\u2713 Cursor"); + expect(allLog()).toContain("Done!"); + }); + + it("respects --dry-run during sync", async () => { + const agents: AgentDefinition[] = [{ id: "a1", name: "Agent 1", instructions: "Do stuff." }]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(agents); + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const generateFn = vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".cursor/agents/a1.md", content: "test", format: "md" }]); + + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + const target = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: generateFn, + install: installFn, + }); + + mockRegistryDetectAll.mockResolvedValue([source, target]); + + await run(["sync", "--dry-run"]); + expect(installFn).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Would write: .cursor/agents/a1.md"); + }); + + it("syncs with only one tool (no targets to sync to)", async () => { + const agents: AgentDefinition[] = [{ id: "a1", name: "Agent 1", instructions: "Do stuff." }]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(agents); + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + + mockRegistryDetectAll.mockResolvedValue([source]); + + await run(["sync"]); + expect(allLog()).toContain("Syncing 1 agent(s) from Claude Code to 0 tool(s)"); + expect(allLog()).toContain("Done!"); + }); +}); + +describe("run() - flag parsing", () => { + it("parses --tools flag with = syntax", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code"]); + expect(mockRegistryGet).toHaveBeenCalledWith("claude-code"); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("parses --dry-run flag", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate", "--dry-run"]); + expect(allLog()).toContain("[dry-run]"); + }); + + it("parses --verbose flag", async () => { + // Verbose is parsed but not used differently in generate; just ensure no crash + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate", "--verbose"]); + expect(allLog()).toContain("Done!"); + }); + + it("handles multiple flags together", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code", "--dry-run", "--verbose"]); + expect(allLog()).toContain("[dry-run]"); + }); + + it("parses --force flag", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["generate", "--tools=kiro", "--force"]); + expect(allLog()).toContain("Generating agent configs for 1 tool(s)"); + }); +}); + +describe("run() - error propagation", () => { + it("propagates adapter generate errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + generate: vi + .fn<(agents: AgentDefinition[]) => Promise>() + .mockRejectedValue(new Error("generate failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["generate"])).rejects.toThrow("generate failed"); + }); + + it("propagates adapter install errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + install: vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockRejectedValue(new Error("install failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["install"])).rejects.toThrow("install failed"); + }); + + it("propagates adapter import errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + import: vi + .fn<() => Promise>() + .mockRejectedValue(new Error("import failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["import"])).rejects.toThrow("import failed"); + }); + + it("propagates adapter export errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + import: vi + .fn<() => Promise>() + .mockRejectedValue(new Error("export failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["export"])).rejects.toThrow("export failed"); + }); +}); + +describe("run() - edge cases", () => { + it("handles adapter that generates multiple files", async () => { + const adapter = makeAdapter({ + id: "multi", + name: "Multi Config Tool", + generate: vi.fn<(agents: AgentDefinition[]) => Promise>().mockResolvedValue([ + { path: "agents/a1.md", content: "# A1", format: "md" }, + { path: "agents/a2.md", content: "# A2", format: "md" }, + { path: "agents/a3.md", content: "# A3", format: "md" }, + ]), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate"]); + const output = allLog(); + expect(output).toContain("Generated: agents/a1.md"); + expect(output).toContain("Generated: agents/a2.md"); + expect(output).toContain("Generated: agents/a3.md"); + }); + + it("detect with zero registered adapters", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryGetAll.mockReturnValue([]); + + await run(["detect"]); + expect(allLog()).toContain("Detected 0/0 tools"); + }); +}); diff --git a/packages/agents/src/cli/index.ts b/packages/agents/src/cli/index.ts new file mode 100644 index 0000000..dba04d2 --- /dev/null +++ b/packages/agents/src/cli/index.ts @@ -0,0 +1,286 @@ +import { registry } from "../adapters/registry.js"; +import type { BaseAgentAdapter } from "../adapters/base.js"; +import type { AgentsConfig } from "../types/index.js"; + +// Import all adapters to register them +import "../adapters/all.js"; + +const HELP = ` +ai-agents - Universal agent configuration for AI coding tools + +USAGE: + ai-agents [options] + +COMMANDS: + detect Detect which AI tools are installed + generate Generate agent configs for detected/specified tools + install Generate and install agents into detected tools + import Import existing agents from detected tools + sync Sync agents across all detected tools + export Export agents as JSON to stdout + help Show this help message + +OPTIONS: + --tools Comma-separated list of tools (e.g., --tools=claude-code,cursor) + --config Path to agents config file + --verbose Show detailed output + --dry-run Show what would be generated without writing files + --force Skip detection checks for --tools (install even if tool not found) + +EXAMPLES: + ai-agents detect # See which AI tools support agents + ai-agents generate # Generate agent configs for all detected tools + ai-agents install --tools=claude-code # Install agents for Claude Code only + ai-agents import # Import existing agents from detected tools + ai-agents sync # Sync agents across all tools +`; + +type Flags = { + tools?: string; + config?: string; + verbose?: boolean; + dryRun?: boolean; + force?: boolean; +}; + +export async function run(args: string[]): Promise { + const command = args[0]; + const flags = parseFlags(args.slice(1)); + + switch (command) { + case "detect": + await cmdDetect(flags); + break; + case "generate": + await cmdGenerate(flags); + break; + case "install": + await cmdInstall(flags); + break; + case "import": + await cmdImport(flags); + break; + case "sync": + await cmdSync(flags); + break; + case "export": + await cmdExport(flags); + break; + case "help": + case "--help": + case "-h": + case undefined: + console.log(HELP); + break; + default: + console.error(`Unknown command: ${command}`); + console.log(HELP); + process.exit(1); + } +} + +async function cmdDetect(_flags: Flags): Promise { + console.log("Detecting AI coding tools with agent support...\n"); + + const detected = await registry.detectAll(); + const all = registry.getAll(); + + for (const adapter of all) { + const isDetected = detected.some((d) => d.id === adapter.id); + const icon = isDetected ? "\u2713" : "\u2717"; + const color = isDetected ? "\x1b[32m" : "\x1b[90m"; + const reset = "\x1b[0m"; + + console.log(` ${color}${icon}${reset} ${adapter.name.padEnd(20)} ${adapter.configDir}`); + } + + console.log(`\nDetected ${detected.length}/${all.length} tools`); +} + +async function cmdGenerate(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Generating agent configs for ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.agents); + + for (const file of files) { + if (flags.dryRun) { + console.log(` [dry-run] Would write: ${file.path}`); + } else { + console.log(` Generated: ${file.path}`); + } + } + + if (!flags.dryRun) { + await adapter.install(files); + } + } + + console.log("\nDone!"); +} + +async function cmdInstall(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Installing agents into ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.agents); + + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would install: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nAgents installed!"); +} + +async function cmdImport(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + console.log(`Importing agents from ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const agents = await adapter.import(); + + if (agents.length === 0) { + console.log(` ${adapter.name}: no agents found`); + } else { + console.log(` ${adapter.name}: ${agents.length} agent(s)`); + for (const agent of agents) { + console.log(` - ${agent.name} (${agent.id})`); + } + } + } +} + +async function cmdSync(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + // Import from first tool, generate for rest + const source = adapters[0]; + if (!source) return; + const agents = await source.import(); + + console.log( + `Syncing ${agents.length} agent(s) from ${source.name} to ${adapters.length - 1} tool(s)...\n`, + ); + + for (const adapter of adapters.slice(1)) { + const files = await adapter.generate(agents); + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would write: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nDone!"); +} + +async function cmdExport(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const source = adapters[0]; + if (!source) return; + const agents = await source.import(); + console.log(JSON.stringify({ agents }, null, 2)); +} + +function parseFlags(args: string[]): Flags { + const flags: Flags = {}; + + for (const arg of args) { + if (arg.startsWith("--tools=")) { + flags.tools = arg.slice(8); + } else if (arg.startsWith("--config=")) { + flags.config = arg.slice(9); + } else if (arg === "--verbose") { + flags.verbose = true; + } else if (arg === "--dry-run") { + flags.dryRun = true; + } else if (arg === "--force") { + flags.force = true; + } + } + + return flags; +} + +async function loadConfig(configPath?: string): Promise { + const path = configPath ?? "ai-agents.config.ts"; + const { existsSync } = await import("node:fs"); + + if (!existsSync(path)) { + if (configPath) { + throw new Error(`Config file not found: ${path}`); + } + return { agents: [] }; + } + + const { resolve } = await import("node:path"); + const fullPath = resolve(process.cwd(), path); + const mod = await import(fullPath); + return mod.default as AgentsConfig; +} + +async function resolveAdapters(flags: Flags): Promise { + if (flags.tools) { + const ids = flags.tools.split(",").map((t) => t.trim()); + const adapters: BaseAgentAdapter[] = []; + for (const id of ids) { + const adapter = registry.get(id); + if (!adapter) { + console.warn(` Warning: Unknown adapter "${id}"`); + continue; + } + if (!flags.force && !(await adapter.detect())) { + console.warn(` Warning: ${adapter.name} not detected, skipping (use --force to override)`); + continue; + } + adapters.push(adapter); + } + return adapters; + } + + return registry.detectAll(); +} diff --git a/packages/agents/src/config/define.test.ts b/packages/agents/src/config/define.test.ts new file mode 100644 index 0000000..f2dfc6a --- /dev/null +++ b/packages/agents/src/config/define.test.ts @@ -0,0 +1,40 @@ +import { describe, it, expect } from "vitest"; +import { defineConfig } from "./define.js"; + +describe("defineConfig", () => { + it("returns the config as-is", () => { + const config = { + agents: [ + { + id: "reviewer", + name: "Code Reviewer", + instructions: "Review code for issues", + }, + ], + }; + expect(defineConfig(config)).toEqual(config); + }); + + it("returns empty agents array", () => { + const config = { agents: [] }; + expect(defineConfig(config)).toEqual({ agents: [] }); + }); + + it("preserves all agent fields", () => { + const config = { + agents: [ + { + id: "test-agent", + name: "Test Agent", + description: "A test agent", + instructions: "Do the test", + model: "claude-sonnet-4-20250514", + tools: ["Read", "Bash"], + tags: ["testing"], + enabled: false, + }, + ], + }; + expect(defineConfig(config)).toEqual(config); + }); +}); diff --git a/packages/agents/src/config/define.ts b/packages/agents/src/config/define.ts new file mode 100644 index 0000000..c561114 --- /dev/null +++ b/packages/agents/src/config/define.ts @@ -0,0 +1,5 @@ +import type { AgentsConfig } from "../types/index.js"; + +export function defineConfig(config: AgentsConfig): AgentsConfig { + return config; +} diff --git a/packages/agents/src/config/index.ts b/packages/agents/src/config/index.ts new file mode 100644 index 0000000..ee71dfd --- /dev/null +++ b/packages/agents/src/config/index.ts @@ -0,0 +1 @@ +export { defineConfig } from "./define.js"; diff --git a/packages/agents/src/index.ts b/packages/agents/src/index.ts new file mode 100644 index 0000000..7eb26b3 --- /dev/null +++ b/packages/agents/src/index.ts @@ -0,0 +1,3 @@ +export { defineConfig } from "./config/index.js"; +export { registry, BaseAgentAdapter } from "./adapters/index.js"; +export type { AgentDefinition, AgentsConfig, GeneratedFile } from "./types/index.js"; diff --git a/packages/agents/src/types/config.ts b/packages/agents/src/types/config.ts new file mode 100644 index 0000000..4226828 --- /dev/null +++ b/packages/agents/src/types/config.ts @@ -0,0 +1,11 @@ +import type { AgentDefinition } from "./definition.js"; + +export type AgentsConfig = { + agents: AgentDefinition[]; +}; + +export type GeneratedFile = { + path: string; + content: string; + format: "md" | "json" | "yaml"; +}; diff --git a/packages/agents/src/types/definition.ts b/packages/agents/src/types/definition.ts new file mode 100644 index 0000000..354370d --- /dev/null +++ b/packages/agents/src/types/definition.ts @@ -0,0 +1,10 @@ +export type AgentDefinition = { + id: string; + name: string; + description?: string; + instructions: string; + model?: string; + tools?: string[]; + tags?: string[]; + enabled?: boolean; +}; diff --git a/packages/agents/src/types/index.ts b/packages/agents/src/types/index.ts new file mode 100644 index 0000000..b3f49cb --- /dev/null +++ b/packages/agents/src/types/index.ts @@ -0,0 +1,2 @@ +export type { AgentDefinition } from "./definition.js"; +export type { AgentsConfig, GeneratedFile } from "./config.js"; diff --git a/packages/core/tsconfig.json b/packages/agents/tsconfig.json similarity index 100% rename from packages/core/tsconfig.json rename to packages/agents/tsconfig.json diff --git a/packages/agents/tsup.config.ts b/packages/agents/tsup.config.ts new file mode 100644 index 0000000..d87215c --- /dev/null +++ b/packages/agents/tsup.config.ts @@ -0,0 +1,29 @@ +import { defineConfig } from "tsup"; + +export default defineConfig([ + { + entry: ["src/index.ts", "src/adapters/index.ts", "src/adapters/all.ts", "src/cli/index.ts"], + format: ["esm"], + dts: true, + clean: true, + sourcemap: true, + target: "node22", + outDir: "dist", + splitting: true, + treeshake: true, + }, + { + entry: ["src/cli/bin.ts"], + format: ["esm"], + dts: false, + clean: false, + sourcemap: true, + target: "node22", + outDir: "dist/cli", + splitting: false, + treeshake: true, + banner: { + js: "#!/usr/bin/env node", + }, + }, +]); diff --git a/packages/cli/CLAUDE.md b/packages/cli/CLAUDE.md new file mode 100644 index 0000000..286babb --- /dev/null +++ b/packages/cli/CLAUDE.md @@ -0,0 +1,24 @@ +# packages/cli + +Unified CLI dispatcher (`ai-tools` binary). Thin routing layer — no domain logic of its own. + +## Architecture (`src/cli/index.ts`) + +Routes `ai-tools ` to the appropriate engine's CLI: + +- `ai-tools hooks ` → `@premierstudio/ai-hooks/cli` +- `ai-tools mcp ` → `@premierstudio/ai-mcp/cli` +- `ai-tools agents ` → `@premierstudio/ai-agents/cli` +- `ai-tools skills ` → `@premierstudio/ai-skills/cli` +- `ai-tools rules ` → `@premierstudio/ai-rules/cli` + +Uses dynamic `import()` for lazy engine loading — only the invoked engine is loaded. + +## Cross-Cutting Commands + +Two commands run across all engines: + +- `ai-tools detect` — runs `detect` on every engine +- `ai-tools sync [--dry-run]` — runs `sync` across mcp, skills, agents, rules (not hooks) + +Engine list defined by `ENGINE_NAMES` constant in `src/index.ts`. diff --git a/packages/cli/package.json b/packages/cli/package.json new file mode 100644 index 0000000..fca3152 --- /dev/null +++ b/packages/cli/package.json @@ -0,0 +1,47 @@ +{ + "name": "@premierstudio/ai-tools", + "version": "1.1.8", + "description": "Unified CLI for all ai-tools engines (hooks, mcp, skills, agents, rules)", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/PremierStudio/ai-tools" + }, + "bin": { + "ai-tools": "dist/cli/bin.js" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "clean": "rm -rf dist" + }, + "dependencies": { + "@premierstudio/ai-agents": "1.1.8", + "@premierstudio/ai-hooks": "1.1.8", + "@premierstudio/ai-mcp": "1.1.8", + "@premierstudio/ai-rules": "1.1.8", + "@premierstudio/ai-skills": "1.1.8" + }, + "engines": { + "node": ">=22.0.0" + } +} diff --git a/packages/plannable/src/bin.ts b/packages/cli/src/cli/bin.ts similarity index 63% rename from packages/plannable/src/bin.ts rename to packages/cli/src/cli/bin.ts index d834a8b..3adf2a9 100644 --- a/packages/plannable/src/bin.ts +++ b/packages/cli/src/cli/bin.ts @@ -1,6 +1,6 @@ import { run } from "./index.js"; -run(process.argv.slice(2)).catch((err: Error) => { +run(process.argv.slice(2)).catch((err) => { console.error(err.message); process.exit(1); }); diff --git a/packages/cli/src/cli/index.test.ts b/packages/cli/src/cli/index.test.ts new file mode 100644 index 0000000..d7cad31 --- /dev/null +++ b/packages/cli/src/cli/index.test.ts @@ -0,0 +1,243 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +const mockHooksRun = vi.fn(); +const mockMcpRun = vi.fn(); +const mockSkillsRun = vi.fn(); +const mockAgentsRun = vi.fn(); +const mockRulesRun = vi.fn(); + +vi.mock("@premierstudio/ai-hooks/cli", () => ({ run: mockHooksRun })); +vi.mock("@premierstudio/ai-mcp/cli", () => ({ run: mockMcpRun })); +vi.mock("@premierstudio/ai-skills/cli", () => ({ run: mockSkillsRun })); +vi.mock("@premierstudio/ai-agents/cli", () => ({ run: mockAgentsRun })); +vi.mock("@premierstudio/ai-rules/cli", () => ({ run: mockRulesRun })); + +import { run } from "./index.js"; + +let logOutput: string[]; +let errorOutput: string[]; +let exitCode: number | undefined; + +const originalLog = console.log; +const originalError = console.error; +const originalExit = process.exit; + +beforeEach(() => { + logOutput = []; + errorOutput = []; + exitCode = undefined; + + console.log = vi.fn((...args: unknown[]) => { + logOutput.push(args.map(String).join(" ")); + }); + console.error = vi.fn((...args: unknown[]) => { + errorOutput.push(args.map(String).join(" ")); + }); + process.exit = vi.fn((code?: number) => { + exitCode = code ?? 0; + throw new Error(`process.exit(${code})`); + }) as never; + + vi.clearAllMocks(); +}); + +afterEach(() => { + console.log = originalLog; + console.error = originalError; + process.exit = originalExit; +}); + +function allLog(): string { + return logOutput.join("\n"); +} + +function allError(): string { + return errorOutput.join("\n"); +} + +// ── Help output ────────────────────────────────────────────── + +describe("run() - help output", () => { + it("prints help text for 'help' command", async () => { + await run(["help"]); + expect(allLog()).toContain("ai-tools - Unified CLI"); + expect(allLog()).toContain("USAGE:"); + expect(allLog()).toContain("ENGINES:"); + expect(allLog()).toContain("CROSS-CUTTING COMMANDS:"); + expect(allLog()).toContain("EXAMPLES:"); + }); + + it("prints help text for --help flag", async () => { + await run(["--help"]); + expect(allLog()).toContain("ai-tools - Unified CLI"); + }); + + it("prints help text for -h flag", async () => { + await run(["-h"]); + expect(allLog()).toContain("ai-tools - Unified CLI"); + }); + + it("prints help text when no arguments are provided", async () => { + await run([]); + expect(allLog()).toContain("ai-tools - Unified CLI"); + }); + + it("includes all engine names in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const name of ["hooks", "mcp", "skills", "agents", "rules"]) { + expect(output).toContain(name); + } + }); +}); + +// ── Unknown command ────────────────────────────────────────── + +describe("run() - unknown command", () => { + it("prints error and help, then exits with code 1", async () => { + await expect(run(["foobar"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: foobar"); + expect(allLog()).toContain("USAGE:"); + expect(exitCode).toBe(1); + }); +}); + +// ── Engine delegation ──────────────────────────────────────── + +describe("run() - engine delegation", () => { + it("delegates to hooks engine", async () => { + await run(["hooks", "detect"]); + expect(mockHooksRun).toHaveBeenCalledWith(["detect"]); + }); + + it("delegates to mcp engine", async () => { + await run(["mcp", "install"]); + expect(mockMcpRun).toHaveBeenCalledWith(["install"]); + }); + + it("delegates to skills engine", async () => { + await run(["skills", "sync"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["sync"]); + }); + + it("delegates to agents engine", async () => { + await run(["agents", "export"]); + expect(mockAgentsRun).toHaveBeenCalledWith(["export"]); + }); + + it("delegates to rules engine", async () => { + await run(["rules", "import"]); + expect(mockRulesRun).toHaveBeenCalledWith(["import"]); + }); + + it("forwards all remaining args to the engine", async () => { + await run(["mcp", "install", "--tools=claude-code", "--dry-run"]); + expect(mockMcpRun).toHaveBeenCalledWith(["install", "--tools=claude-code", "--dry-run"]); + }); +}); + +// ── Cross-cutting detect ───────────────────────────────────── + +describe("run() - cross-cutting detect", () => { + it("calls detect on all 5 engines", async () => { + await run(["detect"]); + + expect(mockHooksRun).toHaveBeenCalledWith(["detect"]); + expect(mockMcpRun).toHaveBeenCalledWith(["detect"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["detect"]); + expect(mockAgentsRun).toHaveBeenCalledWith(["detect"]); + expect(mockRulesRun).toHaveBeenCalledWith(["detect"]); + }); + + it("prints engine headers", async () => { + await run(["detect"]); + const output = allLog(); + expect(output).toContain("── hooks ──"); + expect(output).toContain("── mcp ──"); + expect(output).toContain("── skills ──"); + expect(output).toContain("── agents ──"); + expect(output).toContain("── rules ──"); + }); + + it("forwards flags to each engine", async () => { + await run(["detect", "--tools=claude-code"]); + + expect(mockHooksRun).toHaveBeenCalledWith(["detect", "--tools=claude-code"]); + expect(mockMcpRun).toHaveBeenCalledWith(["detect", "--tools=claude-code"]); + }); + + it("catches engine errors and continues", async () => { + mockHooksRun.mockRejectedValueOnce(new Error("hooks failed")); + + await run(["detect"]); + + expect(allError()).toContain("Error: hooks failed"); + // Other engines still called + expect(mockMcpRun).toHaveBeenCalledWith(["detect"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["detect"]); + expect(mockAgentsRun).toHaveBeenCalledWith(["detect"]); + expect(mockRulesRun).toHaveBeenCalledWith(["detect"]); + }); +}); + +// ── Cross-cutting sync ─────────────────────────────────────── + +describe("run() - cross-cutting sync", () => { + it("calls sync on 4 engines (skips hooks)", async () => { + await run(["sync"]); + + expect(mockHooksRun).not.toHaveBeenCalled(); + expect(mockMcpRun).toHaveBeenCalledWith(["sync"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["sync"]); + expect(mockAgentsRun).toHaveBeenCalledWith(["sync"]); + expect(mockRulesRun).toHaveBeenCalledWith(["sync"]); + }); + + it("prints engine headers for sync-capable engines only", async () => { + await run(["sync"]); + const output = allLog(); + expect(output).not.toContain("── hooks ──"); + expect(output).toContain("── mcp ──"); + expect(output).toContain("── skills ──"); + expect(output).toContain("── agents ──"); + expect(output).toContain("── rules ──"); + }); + + it("forwards flags to each engine", async () => { + await run(["sync", "--dry-run"]); + + expect(mockMcpRun).toHaveBeenCalledWith(["sync", "--dry-run"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["sync", "--dry-run"]); + }); + + it("catches engine errors and continues", async () => { + mockMcpRun.mockRejectedValueOnce(new Error("mcp failed")); + + await run(["sync"]); + + expect(allError()).toContain("Error: mcp failed"); + // Other engines still called + expect(mockSkillsRun).toHaveBeenCalledWith(["sync"]); + expect(mockAgentsRun).toHaveBeenCalledWith(["sync"]); + expect(mockRulesRun).toHaveBeenCalledWith(["sync"]); + }); +}); + +// ── Flag pass-through ──────────────────────────────────────── + +describe("run() - flag pass-through", () => { + it("passes --tools flag through to engine", async () => { + await run(["mcp", "generate", "--tools=cursor,claude-code"]); + expect(mockMcpRun).toHaveBeenCalledWith(["generate", "--tools=cursor,claude-code"]); + }); + + it("passes --dry-run flag through to engine", async () => { + await run(["rules", "sync", "--dry-run"]); + expect(mockRulesRun).toHaveBeenCalledWith(["sync", "--dry-run"]); + }); + + it("passes multiple flags through to engine", async () => { + await run(["skills", "install", "--tools=claude-code", "--dry-run"]); + expect(mockSkillsRun).toHaveBeenCalledWith(["install", "--tools=claude-code", "--dry-run"]); + }); +}); diff --git a/packages/cli/src/cli/index.ts b/packages/cli/src/cli/index.ts new file mode 100644 index 0000000..7f166bd --- /dev/null +++ b/packages/cli/src/cli/index.ts @@ -0,0 +1,120 @@ +const HELP = ` +ai-tools - Unified CLI for all ai-hooks engines + +USAGE: + ai-tools [options] + ai-tools [options] + +ENGINES: + hooks Lifecycle hooks for AI coding tools + mcp MCP server configuration + skills Skills/prompts configuration + agents Agent configuration + rules Project rules configuration + +CROSS-CUTTING COMMANDS: + detect Run detect across all engines + sync Run sync across supported engines (hooks excluded) + help Show this help message + +ENGINE COMMANDS: + Pass any command supported by the engine's CLI. + Example: ai-tools mcp install --tools=claude-code + +OPTIONS: + --tools Comma-separated list of tools (forwarded to engine) + --dry-run Show what would happen without writing files + +EXAMPLES: + ai-tools mcp detect # Detect MCP-capable tools + ai-tools skills sync # Sync skills across tools + ai-tools detect # Detect across all engines + ai-tools sync --dry-run # Sync all engines (dry run) + ai-tools hooks init # Initialize hooks config +`; + +type EngineEntry = { + name: string; + pkg: string; + hasSync: boolean; +}; + +const ENGINES: Record = { + hooks: { name: "hooks", pkg: "@premierstudio/ai-hooks/cli", hasSync: false }, + mcp: { name: "mcp", pkg: "@premierstudio/ai-mcp/cli", hasSync: true }, + skills: { name: "skills", pkg: "@premierstudio/ai-skills/cli", hasSync: true }, + agents: { name: "agents", pkg: "@premierstudio/ai-agents/cli", hasSync: true }, + rules: { name: "rules", pkg: "@premierstudio/ai-rules/cli", hasSync: true }, +}; + +const ENGINE_NAMES = Object.keys(ENGINES); + +async function loadEngine(pkg: string): Promise<{ run: (args: string[]) => Promise }> { + return import(pkg) as Promise<{ run: (args: string[]) => Promise }>; +} + +export async function run(args: string[]): Promise { + const command = args[0]; + + switch (command) { + case "help": + case "--help": + case "-h": + case undefined: + console.log(HELP); + return; + + case "detect": + await crossCutDetect(args.slice(1)); + return; + + case "sync": + await crossCutSync(args.slice(1)); + return; + + default: + // Check if it's an engine name + if (command in ENGINES) { + const engine = ENGINES[command]; + if (engine) { + const mod = await loadEngine(engine.pkg); + await mod.run(args.slice(1)); + return; + } + } + + // Unknown command + console.error(`Unknown command: ${command}`); + console.log(HELP); + process.exit(1); + } +} + +async function crossCutDetect(flags: string[]): Promise { + for (const name of ENGINE_NAMES) { + const engine = ENGINES[name]; + if (!engine) continue; + console.log(`\n── ${engine.name} ──`); + try { + const mod = await loadEngine(engine.pkg); + await mod.run(["detect", ...flags]); + } catch (err) { + console.error(` Error: ${err instanceof Error ? err.message : String(err)}`); + } + } +} + +async function crossCutSync(flags: string[]): Promise { + for (const name of ENGINE_NAMES) { + const engine = ENGINES[name]; + if (!engine || !engine.hasSync) continue; + + console.log(`\n── ${engine.name} ──`); + try { + const mod = await loadEngine(engine.pkg); + await mod.run(["sync", ...flags]); + } catch (err) { + console.error(` Error: ${err instanceof Error ? err.message : String(err)}`); + } + } +} diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts new file mode 100644 index 0000000..5def92e --- /dev/null +++ b/packages/cli/src/index.ts @@ -0,0 +1,5 @@ +export { run } from "./cli/index.js"; + +export const ENGINE_NAMES = ["hooks", "mcp", "skills", "agents", "rules"] as const; + +export type EngineName = (typeof ENGINE_NAMES)[number]; diff --git a/packages/plannable/tsconfig.json b/packages/cli/tsconfig.json similarity index 73% rename from packages/plannable/tsconfig.json rename to packages/cli/tsconfig.json index 2def9e0..329ab40 100644 --- a/packages/plannable/tsconfig.json +++ b/packages/cli/tsconfig.json @@ -5,5 +5,5 @@ "rootDir": "./src" }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] + "exclude": ["node_modules", "dist", "**/*.test.ts"] } diff --git a/packages/plannable/tsup.config.ts b/packages/cli/tsup.config.ts similarity index 77% rename from packages/plannable/tsup.config.ts rename to packages/cli/tsup.config.ts index 30889a1..772c5c1 100644 --- a/packages/plannable/tsup.config.ts +++ b/packages/cli/tsup.config.ts @@ -8,16 +8,20 @@ export default defineConfig([ clean: true, sourcemap: true, target: "node22", + outDir: "dist", splitting: false, + treeshake: true, }, { - entry: ["src/bin.ts"], + entry: ["src/cli/bin.ts"], format: ["esm"], dts: false, clean: false, sourcemap: true, target: "node22", + outDir: "dist/cli", splitting: false, + treeshake: true, banner: { js: "#!/usr/bin/env node", }, diff --git a/packages/hooks/CLAUDE.md b/packages/hooks/CLAUDE.md new file mode 100644 index 0000000..9ca6df0 --- /dev/null +++ b/packages/hooks/CLAUDE.md @@ -0,0 +1,68 @@ +# packages/hooks + +Core engine package. Most complex package in the monorepo — the only one with a runtime execution engine. + +## Event System + +15 universal event types defined in `src/types/events.ts`. Each has a phase: + +- **Before events** (blockable): `session:start`, `prompt:submit`, `tool:before`, `file:read`, `file:write`, `file:edit`, `file:delete`, `shell:before`, `mcp:before` +- **After events** (observe-only): `session:end`, `prompt:response`, `tool:after`, `shell:after`, `mcp:after`, `notification` + +Adapters map these to tool-native events via `EVENT_MAP` (universal → native) and `REVERSE_MAP` (native → universal). Both maps live as module-level constants in each adapter file. + +## Chain Execution (`runtime/chain.ts`) + +`executeChain(hooks, ctx, settings)` runs hooks in Express.js middleware style: + +1. Sorts by priority (lower number = runs first, default 100) +2. Each hook receives `next()` — not calling it stops the chain +3. Skips hooks where `enabled === false` or `filter()` returns false +4. Stops processing before-phase hooks after a block (`ctx.results.blocked = true`) +5. Each hook wrapped in `Promise.race()` against timeout — timeout rejects with `HookTimeoutError` +6. All results accumulate in `ctx.results[]` + +## HookEngine (`runtime/engine.ts`) + +Stores hooks in `Map`. Key methods: + +- `register(hook)` / `registerAll(hooks)` — adds to event-type buckets +- `emit(event, toolInfo)` — creates `HookContext`, calls `executeChain()`, returns results +- `isBlocked(event, toolInfo)` — convenience: emits and checks if any result blocked + +Settings: `hookTimeout` (default 5000ms), `failMode` ("open" = swallow errors, "closed" = block on error), `logLevel`. + +## Adapter Implementation + +Each adapter in `src/adapters/` follows this pattern: + +1. Define `EVENT_MAP: Record` — maps each of the 15 events to native event names (empty array if unsupported) +2. Define `REVERSE_MAP: Record` — inverse mapping +3. Extend `BaseAdapter`, set `id`, `name`, `version`, `capabilities` +4. Implement `detect()` — use `this.commandExists()` or `this.existsSync()` from base class +5. Implement `generate(hooks)` — return `GeneratedConfig[]` with `{ path, content }` +6. Instantiate and call `registry.register(adapter)` + +The `capabilities` object declares: `beforeHooks`, `afterHooks`, `mcp`, `configFile`, `supportedEvents[]`, `blockableEvents[]`. + +Use `claude-code.ts` as the reference adapter — it's the most complete, generating both a runner script and settings.json modifications. + +## Config & Builder (`config/`) + +- `defineConfig(config)` — type-safe wrapper, returns config as-is +- `hook()` — fluent builder: `hook().id("x").name("X").on("shell:before").do(handler).build()` +- `loadConfig()` — searches for `ai-hooks.config.{ts,js,mts,mjs}`, dynamically imports it +- Config supports `extends: [preset]` for composing hook collections + +## Built-in Hooks (`hooks/builtin.ts`) + +Four security hooks with escalating priority: + +1. `block-dangerous-commands` (priority 1) — blocks `rm -rf /`, `mkfs`, `dd`, fork bombs, `DROP DATABASE`, etc. +2. `scan-secrets` (priority 2) — detects API keys, tokens, private keys in content +3. `protect-sensitive-files` (priority 3) — blocks writes to `.env`, credentials, SSH keys +4. `audit-shell` (priority 100) — records command, exit code, duration (after-phase, observe-only) + +## Registry (`adapters/registry.ts`) + +Uses `globalThis` singleton pattern to survive module duplication (multiple copies of the package loaded). All adapters register on import. Import `adapters/all.ts` to register every adapter at once. diff --git a/packages/core/README.md b/packages/hooks/README.md similarity index 100% rename from packages/core/README.md rename to packages/hooks/README.md diff --git a/packages/core/package.json b/packages/hooks/package.json similarity index 96% rename from packages/core/package.json rename to packages/hooks/package.json index 8f1b6b6..c0e598a 100644 --- a/packages/core/package.json +++ b/packages/hooks/package.json @@ -17,7 +17,7 @@ "license": "MIT", "repository": { "type": "git", - "url": "https://github.com/PremierStudio/ai-hooks" + "url": "https://github.com/PremierStudio/ai-tools" }, "bin": { "ai-hooks": "dist/cli/bin.js" diff --git a/packages/core/src/adapters/all.ts b/packages/hooks/src/adapters/all.ts similarity index 100% rename from packages/core/src/adapters/all.ts rename to packages/hooks/src/adapters/all.ts diff --git a/packages/core/src/adapters/amp.test.ts b/packages/hooks/src/adapters/amp.test.ts similarity index 100% rename from packages/core/src/adapters/amp.test.ts rename to packages/hooks/src/adapters/amp.test.ts diff --git a/packages/core/src/adapters/amp.ts b/packages/hooks/src/adapters/amp.ts similarity index 100% rename from packages/core/src/adapters/amp.ts rename to packages/hooks/src/adapters/amp.ts diff --git a/packages/core/src/adapters/base.test.ts b/packages/hooks/src/adapters/base.test.ts similarity index 100% rename from packages/core/src/adapters/base.test.ts rename to packages/hooks/src/adapters/base.test.ts diff --git a/packages/core/src/adapters/base.ts b/packages/hooks/src/adapters/base.ts similarity index 97% rename from packages/core/src/adapters/base.ts rename to packages/hooks/src/adapters/base.ts index 1c57688..87e2782 100644 --- a/packages/core/src/adapters/base.ts +++ b/packages/hooks/src/adapters/base.ts @@ -73,9 +73,9 @@ export abstract class BaseAdapter implements Adapter { */ protected async commandExists(command: string): Promise { const { exec } = await import("node:child_process"); - return new Promise((resolve) => { + return new Promise((ok) => { exec(`which ${command}`, (error) => { - resolve(!error); + ok(!error); }); }); } diff --git a/packages/core/src/adapters/claude-code.test.ts b/packages/hooks/src/adapters/claude-code.test.ts similarity index 100% rename from packages/core/src/adapters/claude-code.test.ts rename to packages/hooks/src/adapters/claude-code.test.ts diff --git a/packages/core/src/adapters/claude-code.ts b/packages/hooks/src/adapters/claude-code.ts similarity index 100% rename from packages/core/src/adapters/claude-code.ts rename to packages/hooks/src/adapters/claude-code.ts diff --git a/packages/core/src/adapters/cline.test.ts b/packages/hooks/src/adapters/cline.test.ts similarity index 100% rename from packages/core/src/adapters/cline.test.ts rename to packages/hooks/src/adapters/cline.test.ts diff --git a/packages/core/src/adapters/cline.ts b/packages/hooks/src/adapters/cline.ts similarity index 100% rename from packages/core/src/adapters/cline.ts rename to packages/hooks/src/adapters/cline.ts diff --git a/packages/core/src/adapters/codex.test.ts b/packages/hooks/src/adapters/codex.test.ts similarity index 100% rename from packages/core/src/adapters/codex.test.ts rename to packages/hooks/src/adapters/codex.test.ts diff --git a/packages/core/src/adapters/codex.ts b/packages/hooks/src/adapters/codex.ts similarity index 100% rename from packages/core/src/adapters/codex.ts rename to packages/hooks/src/adapters/codex.ts diff --git a/packages/core/src/adapters/cursor.test.ts b/packages/hooks/src/adapters/cursor.test.ts similarity index 100% rename from packages/core/src/adapters/cursor.test.ts rename to packages/hooks/src/adapters/cursor.test.ts diff --git a/packages/core/src/adapters/cursor.ts b/packages/hooks/src/adapters/cursor.ts similarity index 100% rename from packages/core/src/adapters/cursor.ts rename to packages/hooks/src/adapters/cursor.ts diff --git a/packages/core/src/adapters/droid.test.ts b/packages/hooks/src/adapters/droid.test.ts similarity index 100% rename from packages/core/src/adapters/droid.test.ts rename to packages/hooks/src/adapters/droid.test.ts diff --git a/packages/core/src/adapters/droid.ts b/packages/hooks/src/adapters/droid.ts similarity index 100% rename from packages/core/src/adapters/droid.ts rename to packages/hooks/src/adapters/droid.ts diff --git a/packages/core/src/adapters/gemini-cli.test.ts b/packages/hooks/src/adapters/gemini-cli.test.ts similarity index 100% rename from packages/core/src/adapters/gemini-cli.test.ts rename to packages/hooks/src/adapters/gemini-cli.test.ts diff --git a/packages/core/src/adapters/gemini-cli.ts b/packages/hooks/src/adapters/gemini-cli.ts similarity index 100% rename from packages/core/src/adapters/gemini-cli.ts rename to packages/hooks/src/adapters/gemini-cli.ts diff --git a/packages/core/src/adapters/index.ts b/packages/hooks/src/adapters/index.ts similarity index 100% rename from packages/core/src/adapters/index.ts rename to packages/hooks/src/adapters/index.ts diff --git a/packages/core/src/adapters/kiro.test.ts b/packages/hooks/src/adapters/kiro.test.ts similarity index 100% rename from packages/core/src/adapters/kiro.test.ts rename to packages/hooks/src/adapters/kiro.test.ts diff --git a/packages/core/src/adapters/kiro.ts b/packages/hooks/src/adapters/kiro.ts similarity index 100% rename from packages/core/src/adapters/kiro.ts rename to packages/hooks/src/adapters/kiro.ts diff --git a/packages/core/src/adapters/opencode.test.ts b/packages/hooks/src/adapters/opencode.test.ts similarity index 100% rename from packages/core/src/adapters/opencode.test.ts rename to packages/hooks/src/adapters/opencode.test.ts diff --git a/packages/core/src/adapters/opencode.ts b/packages/hooks/src/adapters/opencode.ts similarity index 100% rename from packages/core/src/adapters/opencode.ts rename to packages/hooks/src/adapters/opencode.ts diff --git a/packages/core/src/adapters/registry.real.test.ts b/packages/hooks/src/adapters/registry.real.test.ts similarity index 100% rename from packages/core/src/adapters/registry.real.test.ts rename to packages/hooks/src/adapters/registry.real.test.ts diff --git a/packages/core/src/adapters/registry.test.ts b/packages/hooks/src/adapters/registry.test.ts similarity index 100% rename from packages/core/src/adapters/registry.test.ts rename to packages/hooks/src/adapters/registry.test.ts diff --git a/packages/core/src/adapters/registry.ts b/packages/hooks/src/adapters/registry.ts similarity index 100% rename from packages/core/src/adapters/registry.ts rename to packages/hooks/src/adapters/registry.ts diff --git a/packages/hooks/src/cli/bin.ts b/packages/hooks/src/cli/bin.ts new file mode 100644 index 0000000..3adf2a9 --- /dev/null +++ b/packages/hooks/src/cli/bin.ts @@ -0,0 +1,6 @@ +import { run } from "./index.js"; + +run(process.argv.slice(2)).catch((err) => { + console.error(err.message); + process.exit(1); +}); diff --git a/packages/core/src/cli/index.test.ts b/packages/hooks/src/cli/index.test.ts similarity index 95% rename from packages/core/src/cli/index.test.ts rename to packages/hooks/src/cli/index.test.ts index 6065e12..2239127 100644 --- a/packages/core/src/cli/index.test.ts +++ b/packages/hooks/src/cli/index.test.ts @@ -658,6 +658,47 @@ describe("run() - install command", () => { expect(installFn).toHaveBeenCalled(); expect(mockRegistryDetectAll).not.toHaveBeenCalled(); }); + + it("skips undetected tool in --tools and warns", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + + mockLoadConfig.mockResolvedValue(defaultConfig); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro"]); + expect(allWarn()).toContain("Kiro not detected, skipping"); + expect(allWarn()).toContain("--force"); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("--force bypasses detection check for --tools", async () => { + const installFn = vi + .fn<(configs: GeneratedConfig[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + install: installFn, + }); + + mockLoadConfig.mockResolvedValue(defaultConfig); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro", "--force"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing hooks into 1 tool(s)"); + }); }); describe("run() - uninstall command", () => { @@ -1000,6 +1041,22 @@ describe("run() - flag parsing", () => { expect(mockLoadConfig).toHaveBeenCalledWith("/custom.ts"); expect(allLog()).toContain("[dry-run]"); }); + + it("parses --force flag", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + mockLoadConfig.mockResolvedValue({ hooks: [] }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["generate", "--tools=kiro", "--force"]); + expect(allLog()).toContain("Generating configs for 1 tool(s)"); + }); }); describe("run() - error propagation", () => { diff --git a/packages/core/src/cli/index.ts b/packages/hooks/src/cli/index.ts similarity index 95% rename from packages/core/src/cli/index.ts rename to packages/hooks/src/cli/index.ts index 8856794..714e2b8 100644 --- a/packages/core/src/cli/index.ts +++ b/packages/hooks/src/cli/index.ts @@ -27,6 +27,7 @@ OPTIONS: --config Path to config file (default: ai-hooks.config.ts) --verbose Show detailed output --dry-run Show what would be generated without writing files + --force Skip detection checks for --tools (install even if tool not found) EXAMPLES: ai-hooks init # Create config file @@ -40,6 +41,7 @@ type Flags = { config?: string; verbose?: boolean; dryRun?: boolean; + force?: boolean; }; export async function run(args: string[]): Promise { @@ -294,6 +296,8 @@ function parseFlags(args: string[]): Flags { flags.verbose = true; } else if (arg === "--dry-run") { flags.dryRun = true; + } else if (arg === "--force") { + flags.force = true; } } @@ -306,11 +310,15 @@ async function resolveAdapters(flags: Flags): Promise { const adapters: Adapter[] = []; for (const id of ids) { const adapter = registry.get(id); - if (adapter) { - adapters.push(adapter); - } else { + if (!adapter) { console.warn(` Warning: Unknown adapter "${id}"`); + continue; + } + if (!flags.force && !(await adapter.detect())) { + console.warn(` Warning: ${adapter.name} not detected, skipping (use --force to override)`); + continue; } + adapters.push(adapter); } return adapters; } diff --git a/packages/core/src/config/define.test.ts b/packages/hooks/src/config/define.test.ts similarity index 100% rename from packages/core/src/config/define.test.ts rename to packages/hooks/src/config/define.test.ts diff --git a/packages/core/src/config/define.ts b/packages/hooks/src/config/define.ts similarity index 100% rename from packages/core/src/config/define.ts rename to packages/hooks/src/config/define.ts diff --git a/packages/core/src/config/index.ts b/packages/hooks/src/config/index.ts similarity index 100% rename from packages/core/src/config/index.ts rename to packages/hooks/src/config/index.ts diff --git a/packages/core/src/config/loader.test.ts b/packages/hooks/src/config/loader.test.ts similarity index 100% rename from packages/core/src/config/loader.test.ts rename to packages/hooks/src/config/loader.test.ts diff --git a/packages/core/src/config/loader.ts b/packages/hooks/src/config/loader.ts similarity index 100% rename from packages/core/src/config/loader.ts rename to packages/hooks/src/config/loader.ts diff --git a/packages/core/src/hooks/builtin.test.ts b/packages/hooks/src/hooks/builtin.test.ts similarity index 100% rename from packages/core/src/hooks/builtin.test.ts rename to packages/hooks/src/hooks/builtin.test.ts diff --git a/packages/core/src/hooks/builtin.ts b/packages/hooks/src/hooks/builtin.ts similarity index 100% rename from packages/core/src/hooks/builtin.ts rename to packages/hooks/src/hooks/builtin.ts diff --git a/packages/core/src/hooks/index.ts b/packages/hooks/src/hooks/index.ts similarity index 100% rename from packages/core/src/hooks/index.ts rename to packages/hooks/src/hooks/index.ts diff --git a/packages/core/src/index.ts b/packages/hooks/src/index.ts similarity index 100% rename from packages/core/src/index.ts rename to packages/hooks/src/index.ts diff --git a/packages/core/src/runtime/chain.test.ts b/packages/hooks/src/runtime/chain.test.ts similarity index 100% rename from packages/core/src/runtime/chain.test.ts rename to packages/hooks/src/runtime/chain.test.ts diff --git a/packages/core/src/runtime/chain.ts b/packages/hooks/src/runtime/chain.ts similarity index 100% rename from packages/core/src/runtime/chain.ts rename to packages/hooks/src/runtime/chain.ts diff --git a/packages/core/src/runtime/engine.test.ts b/packages/hooks/src/runtime/engine.test.ts similarity index 100% rename from packages/core/src/runtime/engine.test.ts rename to packages/hooks/src/runtime/engine.test.ts diff --git a/packages/core/src/runtime/engine.ts b/packages/hooks/src/runtime/engine.ts similarity index 100% rename from packages/core/src/runtime/engine.ts rename to packages/hooks/src/runtime/engine.ts diff --git a/packages/core/src/runtime/index.ts b/packages/hooks/src/runtime/index.ts similarity index 100% rename from packages/core/src/runtime/index.ts rename to packages/hooks/src/runtime/index.ts diff --git a/packages/core/src/types/adapter.ts b/packages/hooks/src/types/adapter.ts similarity index 100% rename from packages/core/src/types/adapter.ts rename to packages/hooks/src/types/adapter.ts diff --git a/packages/core/src/types/config.ts b/packages/hooks/src/types/config.ts similarity index 100% rename from packages/core/src/types/config.ts rename to packages/hooks/src/types/config.ts diff --git a/packages/core/src/types/events.test.ts b/packages/hooks/src/types/events.test.ts similarity index 100% rename from packages/core/src/types/events.test.ts rename to packages/hooks/src/types/events.test.ts diff --git a/packages/core/src/types/events.ts b/packages/hooks/src/types/events.ts similarity index 100% rename from packages/core/src/types/events.ts rename to packages/hooks/src/types/events.ts diff --git a/packages/core/src/types/hooks.ts b/packages/hooks/src/types/hooks.ts similarity index 100% rename from packages/core/src/types/hooks.ts rename to packages/hooks/src/types/hooks.ts diff --git a/packages/core/src/types/index.ts b/packages/hooks/src/types/index.ts similarity index 100% rename from packages/core/src/types/index.ts rename to packages/hooks/src/types/index.ts diff --git a/packages/hooks/tsconfig.json b/packages/hooks/tsconfig.json new file mode 100644 index 0000000..329ab40 --- /dev/null +++ b/packages/hooks/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/core/tsup.config.ts b/packages/hooks/tsup.config.ts similarity index 100% rename from packages/core/tsup.config.ts rename to packages/hooks/tsup.config.ts diff --git a/packages/mcp/CLAUDE.md b/packages/mcp/CLAUDE.md new file mode 100644 index 0000000..39146cc --- /dev/null +++ b/packages/mcp/CLAUDE.md @@ -0,0 +1,31 @@ +# packages/mcp + +MCP (Model Context Protocol) server configuration management. Simpler than hooks — no runtime engine, no event system. Just manages server definitions across tools. + +## Key Types (`types/definition.ts`) + +`MCPServerDefinition`: `id`, `name`, `description?`, `transport`, `enabled?`, `tags?` + +Two transport types: + +- `{ type: "stdio", command: string, args?: string[], env?: Record }` +- `{ type: "sse", url: string, headers?: Record }` + +Config: `MCPConfig { servers: MCPServerDefinition[] }` + +## Adapter Pattern + +Much simpler than hooks (~44-line base). No event mapping. Adapters: + +- `detect()` — check if tool is installed +- `generate(servers)` — produce tool-specific config files (e.g., `.mcp.json` for Claude Code) +- `install()` — write generated files +- `import()` — read existing tool configs back to universal format + +The Claude Code adapter reads/writes `.mcp.json` with a `mcpServers` object structure. + +## CLI Commands + +`init`, `detect`, `generate`, `install`, `import`, `sync`, `export`, `help` + +The `import` and `sync` commands are the primary use case — syncing MCP server configs across multiple AI tools. diff --git a/packages/mcp/package.json b/packages/mcp/package.json new file mode 100644 index 0000000..5b35b99 --- /dev/null +++ b/packages/mcp/package.json @@ -0,0 +1,52 @@ +{ + "name": "@premierstudio/ai-mcp", + "version": "1.1.8", + "description": "Universal MCP server configuration for AI coding tools", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/PremierStudio/ai-tools" + }, + "bin": { + "ai-mcp": "dist/cli/bin.js" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + }, + "./adapters": { + "types": "./dist/adapters/index.d.ts", + "import": "./dist/adapters/index.js" + }, + "./adapters/all": { + "types": "./dist/adapters/all.d.ts", + "import": "./dist/adapters/all.js" + }, + "./cli": { + "types": "./dist/cli/index.d.ts", + "import": "./dist/cli/index.js" + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "clean": "rm -rf dist" + }, + "engines": { + "node": ">=22.0.0" + } +} diff --git a/packages/mcp/src/adapters/all.ts b/packages/mcp/src/adapters/all.ts new file mode 100644 index 0000000..335cd31 --- /dev/null +++ b/packages/mcp/src/adapters/all.ts @@ -0,0 +1,13 @@ +// Side-effect imports to register all built-in MCP adapters with the registry. +import "./amp.js"; +import "./claude-code.js"; +import "./cline.js"; +import "./codex.js"; +import "./copilot.js"; +import "./cursor.js"; +import "./droid.js"; +import "./gemini-cli.js"; +import "./kiro.js"; +import "./opencode.js"; +import "./roo-code.js"; +import "./windsurf.js"; diff --git a/packages/mcp/src/adapters/amp.test.ts b/packages/mcp/src/adapters/amp.test.ts new file mode 100644 index 0000000..74f768e --- /dev/null +++ b/packages/mcp/src/adapters/amp.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { AmpMCPAdapter } from "./amp.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("AmpMCPAdapter", () => { + let adapter: AmpMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new AmpMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("amp")); + it("has correct name", () => expect(adapter.name).toBe("Amp")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".amp/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".amp/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/amp.ts b/packages/mcp/src/adapters/amp.ts new file mode 100644 index 0000000..37ef9b3 --- /dev/null +++ b/packages/mcp/src/adapters/amp.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class AmpMCPAdapter extends BaseMCPAdapter { + readonly id = "amp"; + readonly name = "Amp"; + readonly nativeSupport = true; + readonly configPath = ".amp/mcp.json"; + readonly command = "amp"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new AmpMCPAdapter(); +registry.register(adapter); + +export { AmpMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/base.test.ts b/packages/mcp/src/adapters/base.test.ts new file mode 100644 index 0000000..c74b0fb --- /dev/null +++ b/packages/mcp/src/adapters/base.test.ts @@ -0,0 +1,240 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { BaseMCPAdapter } from "./base.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; + +// Concrete subclass for testing the abstract BaseMCPAdapter +class TestMCPAdapter extends BaseMCPAdapter { + readonly id = "test-mcp"; + readonly name = "Test MCP"; + readonly nativeSupport = true; + readonly configPath = ".test/mcp.json"; + + async generate(servers: MCPServerDefinition[]): Promise { + return servers.map((s) => ({ + path: `.test/${s.id}.json`, + content: JSON.stringify(s), + format: "json" as const, + })); + } + + async import(_cwd?: string): Promise { + return []; + } + + // Expose protected methods for testing + publicReadJsonFile(path: string): Promise { + return this.readJsonFile(path); + } +} + +// Mock node:fs and node:fs/promises +vi.mock("node:fs", () => ({ + existsSync: vi.fn(), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +// Import mocked modules so we can control them +import { existsSync } from "node:fs"; +import { readFile, writeFile, mkdir, rm } from "node:fs/promises"; + +const mockedExistsSync = vi.mocked(existsSync); +const mockedReadFile = vi.mocked(readFile); +const mockedWriteFile = vi.mocked(writeFile); +const mockedMkdir = vi.mocked(mkdir); +const mockedRm = vi.mocked(rm); + +describe("BaseMCPAdapter", () => { + let adapter: TestMCPAdapter; + + beforeEach(() => { + adapter = new TestMCPAdapter(); + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("abstract properties", () => { + it("exposes id, name, nativeSupport, and configPath", () => { + expect(adapter.id).toBe("test-mcp"); + expect(adapter.name).toBe("Test MCP"); + expect(adapter.nativeSupport).toBe(true); + expect(adapter.configPath).toBe(".test/mcp.json"); + }); + }); + + describe("detect()", () => { + it("returns true when config file exists", async () => { + mockedExistsSync.mockReturnValue(true); + const result = await adapter.detect(); + expect(result).toBe(true); + }); + + it("returns false when config file does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + const result = await adapter.detect(); + expect(result).toBe(false); + }); + + it("uses provided cwd to resolve config path", async () => { + mockedExistsSync.mockReturnValue(true); + await adapter.detect("/custom/dir"); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining("custom/dir")); + }); + + it("uses process.cwd() when no cwd is provided", async () => { + mockedExistsSync.mockReturnValue(false); + await adapter.detect(); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining(adapter.configPath)); + }); + }); + + describe("generate()", () => { + it("calls the subclass implementation with servers", async () => { + const servers: MCPServerDefinition[] = [ + { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test"] }, + }, + ]; + const files = await adapter.generate(servers); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".test/test-server.json"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + }); + + describe("install()", () => { + it("writes all files to disk", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: ".mcp/config.json", content: '{"servers":{}}', format: "json" }, + { path: ".mcp/settings.json", content: '{"enabled":true}', format: "json" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("config.json"), + '{"servers":{}}', + "utf-8", + ); + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("settings.json"), + '{"enabled":true}', + "utf-8", + ); + }); + + it("creates parent directories recursively", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: "deep/nested/dir/config.json", content: "{}", format: "json" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledWith(expect.stringContaining("deep/nested/dir"), { + recursive: true, + }); + }); + + it("handles empty file array", async () => { + await adapter.install([]); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); + + it("uses provided cwd to resolve file paths", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [{ path: "config.json", content: "{}", format: "json" }]; + + await adapter.install(files, "/custom/project"); + + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("/custom/project"), + "{}", + "utf-8", + ); + }); + }); + + describe("uninstall()", () => { + it("removes config file when it exists", async () => { + mockedExistsSync.mockReturnValue(true); + mockedRm.mockResolvedValue(undefined); + + await adapter.uninstall(); + + expect(mockedRm).toHaveBeenCalledWith(expect.stringContaining(adapter.configPath)); + }); + + it("does nothing when config file does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + + await adapter.uninstall(); + + expect(mockedRm).not.toHaveBeenCalled(); + }); + + it("uses provided cwd to resolve config path", async () => { + mockedExistsSync.mockReturnValue(true); + mockedRm.mockResolvedValue(undefined); + + await adapter.uninstall("/custom/dir"); + + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining("/custom/dir")); + }); + }); + + describe("readJsonFile()", () => { + it("returns parsed JSON when file exists", async () => { + mockedExistsSync.mockReturnValue(true); + mockedReadFile.mockResolvedValue('{"name":"test","version":"1.0"}'); + + const result = await adapter.publicReadJsonFile<{ name: string; version: string }>( + "package.json", + ); + + expect(result).toEqual({ name: "test", version: "1.0" }); + }); + + it("returns null when file does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + + const result = await adapter.publicReadJsonFile("missing.json"); + + expect(result).toBeNull(); + expect(mockedReadFile).not.toHaveBeenCalled(); + }); + + it("reads file with utf-8 encoding", async () => { + mockedExistsSync.mockReturnValue(true); + mockedReadFile.mockResolvedValue("{}"); + + await adapter.publicReadJsonFile("file.json"); + + expect(mockedReadFile).toHaveBeenCalledWith(expect.any(String), "utf-8"); + }); + }); +}); diff --git a/packages/mcp/src/adapters/base.ts b/packages/mcp/src/adapters/base.ts new file mode 100644 index 0000000..b0bc3a9 --- /dev/null +++ b/packages/mcp/src/adapters/base.ts @@ -0,0 +1,58 @@ +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, writeFile, mkdir, rm } from "node:fs/promises"; +import { dirname, resolve } from "node:path"; + +export abstract class BaseMCPAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configPath: string; + + /** CLI binary name for detection (e.g., "claude", "cursor"). Override in subclass. */ + readonly command?: string; + + abstract generate(servers: MCPServerDefinition[]): Promise; + abstract import(cwd?: string): Promise; + + async detect(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const hasConfig = existsSync(resolve(dir, this.configPath)); + if (hasConfig) return true; + if (this.command) return this.commandExists(this.command); + return false; + } + + protected async commandExists(command: string): Promise { + const { exec } = await import("node:child_process"); + return new Promise((ok) => { + exec(`which ${command}`, (error: Error | null) => { + ok(!error); + }); + }); + } + + async install(files: GeneratedFile[], cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + for (const file of files) { + const fullPath = resolve(dir, file.path); + await mkdir(dirname(fullPath), { recursive: true }); + await writeFile(fullPath, file.content, "utf-8"); + } + } + + async uninstall(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (existsSync(fullPath)) { + await rm(fullPath); + } + } + + protected async readJsonFile(path: string): Promise { + const fullPath = resolve(process.cwd(), path); + if (!existsSync(fullPath)) return null; + const content = await readFile(fullPath, "utf-8"); + return JSON.parse(content) as T; + } +} diff --git a/packages/mcp/src/adapters/claude-code.test.ts b/packages/mcp/src/adapters/claude-code.test.ts new file mode 100644 index 0000000..089d167 --- /dev/null +++ b/packages/mcp/src/adapters/claude-code.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { ClaudeCodeMCPAdapter } from "./claude-code.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("ClaudeCodeMCPAdapter", () => { + let adapter: ClaudeCodeMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClaudeCodeMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("claude-code")); + it("has correct name", () => expect(adapter.name).toBe("Claude Code")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/claude-code.ts b/packages/mcp/src/adapters/claude-code.ts new file mode 100644 index 0000000..7b9aecf --- /dev/null +++ b/packages/mcp/src/adapters/claude-code.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class ClaudeCodeMCPAdapter extends BaseMCPAdapter { + readonly id = "claude-code"; + readonly name = "Claude Code"; + readonly nativeSupport = true; + readonly configPath = ".mcp.json"; + readonly command = "claude"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new ClaudeCodeMCPAdapter(); +registry.register(adapter); + +export { ClaudeCodeMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/cline.test.ts b/packages/mcp/src/adapters/cline.test.ts new file mode 100644 index 0000000..47a71c3 --- /dev/null +++ b/packages/mcp/src/adapters/cline.test.ts @@ -0,0 +1,138 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { ClineMCPAdapter } from "./cline.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("ClineMCPAdapter", () => { + let adapter: ClineMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClineMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cline")); + it("has correct name", () => expect(adapter.name).toBe("Cline")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => + expect(adapter.configPath).toBe(".cline/mcp_settings.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".cline/mcp_settings.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/cline.ts b/packages/mcp/src/adapters/cline.ts new file mode 100644 index 0000000..bea8bcc --- /dev/null +++ b/packages/mcp/src/adapters/cline.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class ClineMCPAdapter extends BaseMCPAdapter { + readonly id = "cline"; + readonly name = "Cline"; + readonly nativeSupport = true; + readonly configPath = ".cline/mcp_settings.json"; + readonly command = "cline"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new ClineMCPAdapter(); +registry.register(adapter); + +export { ClineMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/codex.test.ts b/packages/mcp/src/adapters/codex.test.ts new file mode 100644 index 0000000..b2f8a63 --- /dev/null +++ b/packages/mcp/src/adapters/codex.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { CodexMCPAdapter } from "./codex.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("CodexMCPAdapter", () => { + let adapter: CodexMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CodexMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("codex")); + it("has correct name", () => expect(adapter.name).toBe("Codex")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".codex/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".codex/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/codex.ts b/packages/mcp/src/adapters/codex.ts new file mode 100644 index 0000000..0b44607 --- /dev/null +++ b/packages/mcp/src/adapters/codex.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class CodexMCPAdapter extends BaseMCPAdapter { + readonly id = "codex"; + readonly name = "Codex"; + readonly nativeSupport = true; + readonly configPath = ".codex/mcp.json"; + readonly command = "codex"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new CodexMCPAdapter(); +registry.register(adapter); + +export { CodexMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/copilot.test.ts b/packages/mcp/src/adapters/copilot.test.ts new file mode 100644 index 0000000..9b7c43a --- /dev/null +++ b/packages/mcp/src/adapters/copilot.test.ts @@ -0,0 +1,153 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { CopilotMCPAdapter } from "./copilot.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("CopilotMCPAdapter", () => { + let adapter: CopilotMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CopilotMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("copilot")); + it("has correct name", () => expect(adapter.name).toBe("Copilot")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".vscode/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config with servers key (not mcpServers)", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".vscode/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.servers["test-server"]).toBeDefined(); + expect(parsed.servers["test-server"].command).toBe("npx"); + expect(parsed.servers["test-server"].type).toBe("stdio"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.servers["sse-server"].url).toBe("http://localhost:3000"); + expect(parsed.servers["sse-server"].type).toBe("sse"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.servers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.servers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.servers["minimal"].args).toEqual([]); + expect(parsed.servers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + servers: { + "my-server": { type: "stdio", command: "npx", args: ["-y", "test"], env: { A: "1" } }, + }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + servers: { sse: { type: "sse", url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports SSE servers by url without type field", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + servers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/copilot.ts b/packages/mcp/src/adapters/copilot.ts new file mode 100644 index 0000000..2304331 --- /dev/null +++ b/packages/mcp/src/adapters/copilot.ts @@ -0,0 +1,80 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class CopilotMCPAdapter extends BaseMCPAdapter { + readonly id = "copilot"; + readonly name = "Copilot"; + readonly nativeSupport = true; + readonly configPath = ".vscode/mcp.json"; + + async generate(servers: MCPServerDefinition[]): Promise { + const serversObj: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + serversObj[server.id] = { + type: "stdio", + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + serversObj[server.id] = { + type: "sse", + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ servers: serversObj }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { servers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.servers ?? {})) { + if (config.type === "stdio" || config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.type === "sse" || config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new CopilotMCPAdapter(); +registry.register(adapter); + +export { CopilotMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/cursor.test.ts b/packages/mcp/src/adapters/cursor.test.ts new file mode 100644 index 0000000..9db78ab --- /dev/null +++ b/packages/mcp/src/adapters/cursor.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { CursorMCPAdapter } from "./cursor.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("CursorMCPAdapter", () => { + let adapter: CursorMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CursorMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cursor")); + it("has correct name", () => expect(adapter.name).toBe("Cursor")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".cursor/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".cursor/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/cursor.ts b/packages/mcp/src/adapters/cursor.ts new file mode 100644 index 0000000..5d4e791 --- /dev/null +++ b/packages/mcp/src/adapters/cursor.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class CursorMCPAdapter extends BaseMCPAdapter { + readonly id = "cursor"; + readonly name = "Cursor"; + readonly nativeSupport = true; + readonly configPath = ".cursor/mcp.json"; + readonly command = "cursor"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new CursorMCPAdapter(); +registry.register(adapter); + +export { CursorMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/droid.test.ts b/packages/mcp/src/adapters/droid.test.ts new file mode 100644 index 0000000..5c2d541 --- /dev/null +++ b/packages/mcp/src/adapters/droid.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { DroidMCPAdapter } from "./droid.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("DroidMCPAdapter", () => { + let adapter: DroidMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new DroidMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("droid")); + it("has correct name", () => expect(adapter.name).toBe("Droid")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".factory/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".factory/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/droid.ts b/packages/mcp/src/adapters/droid.ts new file mode 100644 index 0000000..accb0ae --- /dev/null +++ b/packages/mcp/src/adapters/droid.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class DroidMCPAdapter extends BaseMCPAdapter { + readonly id = "droid"; + readonly name = "Droid"; + readonly nativeSupport = true; + readonly configPath = ".factory/mcp.json"; + readonly command = "droid"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new DroidMCPAdapter(); +registry.register(adapter); + +export { DroidMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/gemini-cli.test.ts b/packages/mcp/src/adapters/gemini-cli.test.ts new file mode 100644 index 0000000..1df5e7d --- /dev/null +++ b/packages/mcp/src/adapters/gemini-cli.test.ts @@ -0,0 +1,140 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { GeminiCliMCPAdapter } from "./gemini-cli.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("GeminiCliMCPAdapter", () => { + let adapter: GeminiCliMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new GeminiCliMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("gemini-cli")); + it("has correct name", () => expect(adapter.name).toBe("Gemini CLI")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".gemini/settings.json")); + }); + + describe("generate", () => { + it("generates valid JSON config without env", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".gemini/settings.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + expect(parsed.mcpServers["test-server"].args).toEqual(["-y", "test-mcp"]); + // Gemini CLI does not support env in its config + expect(parsed.mcpServers["test-server"].env).toBeUndefined(); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toBeUndefined(); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"] } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/gemini-cli.ts b/packages/mcp/src/adapters/gemini-cli.ts new file mode 100644 index 0000000..fa51bb9 --- /dev/null +++ b/packages/mcp/src/adapters/gemini-cli.ts @@ -0,0 +1,78 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class GeminiCliMCPAdapter extends BaseMCPAdapter { + readonly id = "gemini-cli"; + readonly name = "Gemini CLI"; + readonly nativeSupport = true; + readonly configPath = ".gemini/settings.json"; + readonly command = "gemini"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + // Gemini CLI does not support env in its config + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new GeminiCliMCPAdapter(); +registry.register(adapter); + +export { GeminiCliMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/index.ts b/packages/mcp/src/adapters/index.ts new file mode 100644 index 0000000..f6c9c78 --- /dev/null +++ b/packages/mcp/src/adapters/index.ts @@ -0,0 +1,2 @@ +export { registry } from "./registry.js"; +export { BaseMCPAdapter } from "./base.js"; diff --git a/packages/mcp/src/adapters/kiro.test.ts b/packages/mcp/src/adapters/kiro.test.ts new file mode 100644 index 0000000..9f28f08 --- /dev/null +++ b/packages/mcp/src/adapters/kiro.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { KiroMCPAdapter } from "./kiro.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("KiroMCPAdapter", () => { + let adapter: KiroMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new KiroMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("kiro")); + it("has correct name", () => expect(adapter.name).toBe("Kiro")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".kiro/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".kiro/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/kiro.ts b/packages/mcp/src/adapters/kiro.ts new file mode 100644 index 0000000..bce5e94 --- /dev/null +++ b/packages/mcp/src/adapters/kiro.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class KiroMCPAdapter extends BaseMCPAdapter { + readonly id = "kiro"; + readonly name = "Kiro"; + readonly nativeSupport = true; + readonly configPath = ".kiro/mcp.json"; + readonly command = "kiro"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new KiroMCPAdapter(); +registry.register(adapter); + +export { KiroMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/opencode.test.ts b/packages/mcp/src/adapters/opencode.test.ts new file mode 100644 index 0000000..0a3c14b --- /dev/null +++ b/packages/mcp/src/adapters/opencode.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { OpenCodeMCPAdapter } from "./opencode.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("OpenCodeMCPAdapter", () => { + let adapter: OpenCodeMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new OpenCodeMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("opencode")); + it("has correct name", () => expect(adapter.name).toBe("OpenCode")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".opencode/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".opencode/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/opencode.ts b/packages/mcp/src/adapters/opencode.ts new file mode 100644 index 0000000..cfa1365 --- /dev/null +++ b/packages/mcp/src/adapters/opencode.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class OpenCodeMCPAdapter extends BaseMCPAdapter { + readonly id = "opencode"; + readonly name = "OpenCode"; + readonly nativeSupport = true; + readonly configPath = ".opencode/mcp.json"; + readonly command = "opencode"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new OpenCodeMCPAdapter(); +registry.register(adapter); + +export { OpenCodeMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/registry.test.ts b/packages/mcp/src/adapters/registry.test.ts new file mode 100644 index 0000000..b61bf4f --- /dev/null +++ b/packages/mcp/src/adapters/registry.test.ts @@ -0,0 +1,154 @@ +import { describe, it, expect, beforeEach, vi } from "vitest"; +import { registry } from "./registry.js"; +import type { BaseMCPAdapter } from "./base.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; + +function makeFakeAdapter(id: string, detects: boolean = true): BaseMCPAdapter { + return { + id, + name: `${id} Adapter`, + nativeSupport: true, + configPath: `.${id}/mcp.json`, + detect: async () => detects, + generate: async (_servers: MCPServerDefinition[]) => [] as GeneratedFile[], + import: async () => [] as MCPServerDefinition[], + install: async () => {}, + uninstall: async () => {}, + } as unknown as BaseMCPAdapter; +} + +describe("Real MCPAdapterRegistry singleton", () => { + beforeEach(() => { + registry.clear(); + }); + + describe("register / get", () => { + it("registers and retrieves an adapter by id", () => { + const adapter = makeFakeAdapter("claude-code"); + registry.register(adapter); + expect(registry.get("claude-code")).toBe(adapter); + }); + + it("returns undefined for unknown adapter id", () => { + expect(registry.get("nonexistent")).toBeUndefined(); + }); + + it("overwrites a previously registered adapter with the same id", () => { + const first = makeFakeAdapter("dupe"); + const second = makeFakeAdapter("dupe"); + registry.register(first); + registry.register(second); + expect(registry.get("dupe")).toBe(second); + }); + }); + + describe("list", () => { + it("returns empty array when nothing registered", () => { + expect(registry.list()).toEqual([]); + }); + + it("lists all registered adapter IDs", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + expect(registry.list().toSorted()).toEqual(["a", "b"]); + }); + }); + + describe("getAll", () => { + it("returns empty array when nothing registered", () => { + expect(registry.getAll()).toEqual([]); + }); + + it("returns all registered adapters", () => { + const a = makeFakeAdapter("a"); + const b = makeFakeAdapter("b"); + registry.register(a); + registry.register(b); + const all = registry.getAll(); + expect(all).toHaveLength(2); + expect(all).toContain(a); + expect(all).toContain(b); + }); + }); + + describe("detectAll", () => { + it("returns adapters that detect successfully", async () => { + registry.register(makeFakeAdapter("found", true)); + registry.register(makeFakeAdapter("missing", false)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("found"); + }); + + it("skips adapters that throw during detection", async () => { + const throwingAdapter = makeFakeAdapter("broken"); + throwingAdapter.detect = async () => { + throw new Error("detection crashed"); + }; + registry.register(throwingAdapter); + registry.register(makeFakeAdapter("stable", true)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("stable"); + }); + + it("returns empty array when no adapters detect", async () => { + registry.register(makeFakeAdapter("a", false)); + registry.register(makeFakeAdapter("b", false)); + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + }); + + it("passes cwd to adapter.detect", async () => { + const detectFn = vi.fn().mockResolvedValue(true); + const adapter = makeFakeAdapter("with-cwd"); + adapter.detect = detectFn; + registry.register(adapter); + + await registry.detectAll("/custom/dir"); + expect(detectFn).toHaveBeenCalledWith("/custom/dir"); + }); + + it("handles multiple throwing adapters gracefully", async () => { + const spy = vi.fn(); + for (let i = 0; i < 3; i++) { + const adapter = makeFakeAdapter(`throw-${i}`); + adapter.detect = async () => { + spy(); + throw new Error(`fail-${i}`); + }; + registry.register(adapter); + } + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + expect(spy).toHaveBeenCalledTimes(3); + }); + }); + + describe("clear", () => { + it("removes all adapters", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + + registry.clear(); + + expect(registry.list()).toEqual([]); + expect(registry.get("a")).toBeUndefined(); + expect(registry.get("b")).toBeUndefined(); + }); + + it("allows re-registration after clear", () => { + registry.register(makeFakeAdapter("x")); + registry.clear(); + registry.register(makeFakeAdapter("y")); + + expect(registry.list()).toEqual(["y"]); + expect(registry.get("x")).toBeUndefined(); + expect(registry.get("y")?.id).toBe("y"); + }); + }); +}); diff --git a/packages/mcp/src/adapters/registry.ts b/packages/mcp/src/adapters/registry.ts new file mode 100644 index 0000000..b97f3d3 --- /dev/null +++ b/packages/mcp/src/adapters/registry.ts @@ -0,0 +1,44 @@ +import type { BaseMCPAdapter } from "./base.js"; + +class MCPAdapterRegistry { + private adapters: Map = new Map(); + + register(adapter: BaseMCPAdapter): void { + this.adapters.set(adapter.id, adapter); + } + + get(id: string): BaseMCPAdapter | undefined { + return this.adapters.get(id); + } + + list(): string[] { + return [...this.adapters.keys()]; + } + + getAll(): BaseMCPAdapter[] { + return [...this.adapters.values()]; + } + + async detectAll(cwd?: string): Promise { + const detected: BaseMCPAdapter[] = []; + for (const adapter of this.adapters.values()) { + try { + if (await adapter.detect(cwd)) detected.push(adapter); + } catch { + /* skip */ + } + } + return detected; + } + + clear(): void { + this.adapters.clear(); + } +} + +declare global { + // eslint-disable-next-line no-var + var __premierstudio_mcp_registry: MCPAdapterRegistry | undefined; +} + +export const registry = (globalThis.__premierstudio_mcp_registry ??= new MCPAdapterRegistry()); diff --git a/packages/mcp/src/adapters/roo-code.test.ts b/packages/mcp/src/adapters/roo-code.test.ts new file mode 100644 index 0000000..7515441 --- /dev/null +++ b/packages/mcp/src/adapters/roo-code.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { RooCodeMCPAdapter } from "./roo-code.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("RooCodeMCPAdapter", () => { + let adapter: RooCodeMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new RooCodeMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("roo-code")); + it("has correct name", () => expect(adapter.name).toBe("Roo Code")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe(".roo/mcp.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe(".roo/mcp.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/roo-code.ts b/packages/mcp/src/adapters/roo-code.ts new file mode 100644 index 0000000..4460788 --- /dev/null +++ b/packages/mcp/src/adapters/roo-code.ts @@ -0,0 +1,78 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class RooCodeMCPAdapter extends BaseMCPAdapter { + readonly id = "roo-code"; + readonly name = "Roo Code"; + readonly nativeSupport = true; + readonly configPath = ".roo/mcp.json"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new RooCodeMCPAdapter(); +registry.register(adapter); + +export { RooCodeMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/adapters/windsurf.test.ts b/packages/mcp/src/adapters/windsurf.test.ts new file mode 100644 index 0000000..e0fddaa --- /dev/null +++ b/packages/mcp/src/adapters/windsurf.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { WindsurfMCPAdapter } from "./windsurf.js"; +import type { MCPServerDefinition } from "../types/index.js"; + +describe("WindsurfMCPAdapter", () => { + let adapter: WindsurfMCPAdapter; + + const testServer: MCPServerDefinition = { + id: "test-server", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"], env: { KEY: "val" } }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new WindsurfMCPAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("windsurf")); + it("has correct name", () => expect(adapter.name).toBe("Windsurf")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config path", () => expect(adapter.configPath).toBe("mcp_config.json")); + }); + + describe("generate", () => { + it("generates valid JSON config", async () => { + const files = await adapter.generate([testServer]); + expect(files).toHaveLength(1); + expect(files[0]!.path).toBe("mcp_config.json"); + expect(files[0]!.format).toBe("json"); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["test-server"]).toBeDefined(); + expect(parsed.mcpServers["test-server"].command).toBe("npx"); + }); + + it("handles SSE transport", async () => { + const sseServer: MCPServerDefinition = { + id: "sse-server", + name: "SSE Server", + transport: { type: "sse", url: "http://localhost:3000" }, + }; + const files = await adapter.generate([sseServer]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["sse-server"].url).toBe("http://localhost:3000"); + }); + + it("handles empty servers array", async () => { + const files = await adapter.generate([]); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(0); + }); + + it("handles multiple servers", async () => { + const servers = [testServer, { ...testServer, id: "server-2", name: "Server 2" }]; + const files = await adapter.generate(servers); + const parsed = JSON.parse(files[0]!.content); + expect(Object.keys(parsed.mcpServers)).toHaveLength(2); + }); + + it("generates with optional fields undefined", async () => { + const server: MCPServerDefinition = { + id: "minimal", + name: "Minimal", + transport: { type: "stdio", command: "node" }, + }; + const files = await adapter.generate([server]); + const parsed = JSON.parse(files[0]!.content); + expect(parsed.mcpServers["minimal"].args).toEqual([]); + expect(parsed.mcpServers["minimal"].env).toEqual({}); + }); + }); + + describe("import", () => { + it("returns empty array when file does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports stdio servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { "my-server": { command: "npx", args: ["-y", "test"], env: { A: "1" } } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.id).toBe("my-server"); + expect(result[0]!.transport.type).toBe("stdio"); + }); + + it("imports SSE servers from config", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue( + JSON.stringify({ + mcpServers: { sse: { url: "http://localhost:3000" } }, + }), + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]!.transport.type).toBe("sse"); + }); + + it("imports with missing servers key", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFile).mockResolvedValue(JSON.stringify({})); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/mcp/src/adapters/windsurf.ts b/packages/mcp/src/adapters/windsurf.ts new file mode 100644 index 0000000..be026ed --- /dev/null +++ b/packages/mcp/src/adapters/windsurf.ts @@ -0,0 +1,79 @@ +import { BaseMCPAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; + +class WindsurfMCPAdapter extends BaseMCPAdapter { + readonly id = "windsurf"; + readonly name = "Windsurf"; + readonly nativeSupport = true; + readonly configPath = "mcp_config.json"; + readonly command = "windsurf"; + + async generate(servers: MCPServerDefinition[]): Promise { + const mcpServers: Record = {}; + for (const server of servers) { + if (server.transport.type === "stdio") { + mcpServers[server.id] = { + command: server.transport.command, + args: server.transport.args ?? [], + env: server.transport.env ?? {}, + }; + } else { + mcpServers[server.id] = { + url: server.transport.url, + headers: server.transport.headers ?? {}, + }; + } + } + return [ + { + path: this.configPath, + content: JSON.stringify({ mcpServers }, null, 2) + "\n", + format: "json", + }, + ]; + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const fullPath = resolve(dir, this.configPath); + if (!existsSync(fullPath)) return []; + const raw = await readFile(fullPath, "utf-8"); + const data = JSON.parse(raw) as { mcpServers?: Record> }; + const servers: MCPServerDefinition[] = []; + for (const [id, config] of Object.entries(data.mcpServers ?? {})) { + if (config.command) { + servers.push({ + id, + name: id, + transport: { + type: "stdio", + command: config.command as string, + args: config.args as string[] | undefined, + env: config.env as Record | undefined, + }, + }); + } else if (config.url) { + servers.push({ + id, + name: id, + transport: { + type: "sse", + url: config.url as string, + headers: config.headers as Record | undefined, + }, + }); + } + } + return servers; + } +} + +const adapter = new WindsurfMCPAdapter(); +registry.register(adapter); + +export { WindsurfMCPAdapter }; +export default adapter; diff --git a/packages/mcp/src/cli/bin.ts b/packages/mcp/src/cli/bin.ts new file mode 100644 index 0000000..3adf2a9 --- /dev/null +++ b/packages/mcp/src/cli/bin.ts @@ -0,0 +1,6 @@ +import { run } from "./index.js"; + +run(process.argv.slice(2)).catch((err) => { + console.error(err.message); + process.exit(1); +}); diff --git a/packages/mcp/src/cli/index.test.ts b/packages/mcp/src/cli/index.test.ts new file mode 100644 index 0000000..8585239 --- /dev/null +++ b/packages/mcp/src/cli/index.test.ts @@ -0,0 +1,537 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import type { BaseMCPAdapter } from "../adapters/base.js"; +import type { MCPServerDefinition, GeneratedFile } from "../types/index.js"; + +const { + mockRegistryDetectAll, + mockRegistryList, + mockRegistryGet, + mockRegistryGetAll, + mockWriteFile, + mockMkdir, +} = vi.hoisted(() => ({ + mockRegistryDetectAll: vi.fn(), + mockRegistryList: vi.fn(), + mockRegistryGet: vi.fn(), + mockRegistryGetAll: vi.fn(), + mockWriteFile: vi.fn(), + mockMkdir: vi.fn(), +})); + +vi.mock("../adapters/all.js", () => ({})); + +vi.mock("../adapters/registry.js", () => ({ + registry: { + detectAll: (...args: unknown[]) => mockRegistryDetectAll(...args), + list: () => mockRegistryList(), + get: (id: string) => mockRegistryGet(id), + getAll: () => mockRegistryGetAll(), + }, +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: (...args: unknown[]) => mockWriteFile(...args), + mkdir: (...args: unknown[]) => mockMkdir(...args), +})); + +import { run } from "./index.js"; + +function makeAdapter(overrides: Partial = {}): BaseMCPAdapter { + return { + id: overrides.id ?? "test-tool", + name: overrides.name ?? "Test Tool", + nativeSupport: overrides.nativeSupport ?? true, + configPath: overrides.configPath ?? ".test/mcp.json", + generate: + overrides.generate ?? + vi + .fn<(servers: MCPServerDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".test/mcp.json", content: "{}", format: "json" }]), + import: + overrides.import ?? + vi.fn<(cwd?: string) => Promise>().mockResolvedValue([]), + detect: overrides.detect ?? vi.fn<(cwd?: string) => Promise>().mockResolvedValue(true), + install: + overrides.install ?? + vi.fn<(files: GeneratedFile[], cwd?: string) => Promise>().mockResolvedValue(undefined), + uninstall: + overrides.uninstall ?? vi.fn<(cwd?: string) => Promise>().mockResolvedValue(undefined), + } as unknown as BaseMCPAdapter; +} + +let logOutput: string[]; +let errorOutput: string[]; +let warnOutput: string[]; +let exitCode: number | undefined; + +const originalLog = console.log; +const originalError = console.error; +const originalWarn = console.warn; +const originalExit = process.exit; + +beforeEach(() => { + logOutput = []; + errorOutput = []; + warnOutput = []; + exitCode = undefined; + + console.log = vi.fn((...args: unknown[]) => { + logOutput.push(args.map(String).join(" ")); + }); + console.error = vi.fn((...args: unknown[]) => { + errorOutput.push(args.map(String).join(" ")); + }); + console.warn = vi.fn((...args: unknown[]) => { + warnOutput.push(args.map(String).join(" ")); + }); + process.exit = vi.fn((code?: number) => { + exitCode = code ?? 0; + throw new Error(`process.exit(${code})`); + }) as never; + + vi.clearAllMocks(); + mockWriteFile.mockResolvedValue(undefined); + mockMkdir.mockResolvedValue(undefined); +}); + +afterEach(() => { + console.log = originalLog; + console.error = originalError; + console.warn = originalWarn; + process.exit = originalExit; +}); + +function allLog(): string { + return logOutput.join("\n"); +} + +function allError(): string { + return errorOutput.join("\n"); +} + +function allWarn(): string { + return warnOutput.join("\n"); +} + +describe("run() - help output", () => { + it('prints help text for "help" command', async () => { + await run(["help"]); + expect(allLog()).toContain("ai-mcp - Universal MCP server configuration"); + expect(allLog()).toContain("USAGE:"); + expect(allLog()).toContain("COMMANDS:"); + expect(allLog()).toContain("OPTIONS:"); + expect(allLog()).toContain("EXAMPLES:"); + }); + + it("prints help text for --help flag", async () => { + await run(["--help"]); + expect(allLog()).toContain("ai-mcp - Universal MCP server configuration"); + }); + + it("prints help text for -h flag", async () => { + await run(["-h"]); + expect(allLog()).toContain("ai-mcp - Universal MCP server configuration"); + }); + + it("prints help text when no arguments are provided", async () => { + await run([]); + expect(allLog()).toContain("ai-mcp - Universal MCP server configuration"); + }); + + it("includes all documented commands in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const cmd of [ + "init", + "detect", + "generate", + "install", + "import", + "sync", + "export", + "help", + ]) { + expect(output).toContain(cmd); + } + }); +}); + +describe("run() - unknown command", () => { + it("prints error and help, then exits with code 1", async () => { + await expect(run(["foobar"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: foobar"); + expect(allLog()).toContain("USAGE:"); + expect(exitCode).toBe(1); + }); +}); + +describe("run() - detect command", () => { + it("shows detection header", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryList.mockReturnValue([]); + await run(["detect"]); + expect(allLog()).toContain("Detecting AI coding tools with MCP support..."); + }); + + it("lists detected and missing tools", async () => { + const detected = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const missing = makeAdapter({ id: "codex", name: "Codex" }); + + mockRegistryDetectAll.mockResolvedValue([detected]); + mockRegistryList.mockReturnValue(["claude-code", "codex"]); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return detected; + if (id === "codex") return missing; + return undefined; + }); + + await run(["detect"]); + const output = allLog(); + expect(output).toContain("\u2713"); + expect(output).toContain("Claude Code"); + expect(output).toContain("\u2717"); + expect(output).toContain("Codex"); + }); + + it("shows detection summary with counts", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryList.mockReturnValue(["claude-code", "codex", "gemini-cli"]); + mockRegistryGet.mockReturnValue(adapter); + + await run(["detect"]); + expect(allLog()).toContain("Detected 1/3 tools"); + }); +}); + +describe("run() - generate command", () => { + it("prints message when no tools detected and no --tools flag", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + + await run(["generate"]); + expect(allLog()).toContain("No AI tools detected"); + expect(allLog()).toContain("--tools"); + }); + + it("generates configs for detected adapters", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate"]); + expect(allLog()).toContain("Generating MCP configs for 1 tool(s)"); + expect(allLog()).toContain("Generated: .test/mcp.json"); + expect(allLog()).toContain("Done!"); + }); + + it("respects --dry-run and does not write files", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["generate", "--dry-run"]); + expect(allLog()).toContain("[dry-run] Would write: .test/mcp.json"); + expect(mockWriteFile).not.toHaveBeenCalled(); + expect(mockMkdir).not.toHaveBeenCalled(); + }); + + it("uses --tools flag to resolve specific adapters", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code"]); + expect(allLog()).toContain("Generating MCP configs for 1 tool(s)"); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("warns for unknown adapter IDs in --tools flag", async () => { + mockRegistryGet.mockReturnValue(undefined); + + await run(["generate", "--tools=nonexistent"]); + expect(allWarn()).toContain('Warning: Unknown adapter "nonexistent"'); + expect(allLog()).toContain("No AI tools detected"); + }); +}); + +describe("run() - install command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + + await run(["install"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("installs MCP servers into detected tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + install: installFn, + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["install"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing MCP servers into 1 tool(s)"); + expect(allLog()).toContain("\u2713 Claude Code"); + expect(allLog()).toContain("MCP servers installed!"); + }); + + it("respects --dry-run and does not call install", async () => { + const installFn = vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + install: installFn, + generate: vi + .fn<(servers: MCPServerDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".mcp.json", content: "{}", format: "json" }]), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["install", "--dry-run"]); + expect(installFn).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Would install: .mcp.json"); + }); + + it("skips undetected tool in --tools and warns", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<(cwd?: string) => Promise>().mockResolvedValue(false), + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro"]); + expect(allWarn()).toContain("Kiro not detected, skipping"); + expect(allWarn()).toContain("--force"); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("--force bypasses detection check for --tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<(cwd?: string) => Promise>().mockResolvedValue(false), + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro", "--force"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing MCP servers into 1 tool(s)"); + }); +}); + +describe("run() - export command", () => { + it("exports servers as JSON to stdout", async () => { + const servers: MCPServerDefinition[] = [ + { + id: "test", + name: "test", + transport: { type: "stdio", command: "npx", args: ["-y", "test"] }, + }, + ]; + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: vi.fn<(cwd?: string) => Promise>().mockResolvedValue(servers), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["export"]); + const output = allLog(); + expect(output).toContain('"id": "test"'); + expect(output).toContain('"command": "npx"'); + }); + + it("uses --tools flag", async () => { + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: vi.fn<(cwd?: string) => Promise>().mockResolvedValue([]), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["export", "--tools=claude-code"]); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); +}); + +describe("run() - import command", () => { + it("imports servers from a detected tool", async () => { + const servers: MCPServerDefinition[] = [ + { + id: "test", + name: "test", + transport: { type: "stdio", command: "npx", args: ["-y", "test"] }, + }, + ]; + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: vi.fn<(cwd?: string) => Promise>().mockResolvedValue(servers), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await run(["import"]); + expect(allLog()).toContain("Imported 1 server(s) from Claude Code"); + }); + + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + + await run(["import"]); + expect(allLog()).toContain("No AI tools detected"); + }); +}); + +describe("run() - sync command", () => { + it("syncs servers across detected tools", async () => { + const servers: MCPServerDefinition[] = [ + { + id: "test", + name: "test", + transport: { type: "stdio", command: "npx", args: ["-y", "test"] }, + }, + ]; + const importFn = vi + .fn<(cwd?: string) => Promise>() + .mockResolvedValue(servers); + const installFn = vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockResolvedValue(undefined); + const adapter1 = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: importFn, + install: installFn, + }); + const adapter2 = makeAdapter({ + id: "cursor", + name: "Cursor", + install: installFn, + }); + + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["sync"]); + expect(allLog()).toContain("Syncing MCP servers across 2 tool(s)"); + expect(allLog()).toContain("Sync complete!"); + }); + + it("respects --dry-run", async () => { + const servers: MCPServerDefinition[] = [ + { + id: "test", + name: "test", + transport: { type: "stdio", command: "npx", args: ["-y", "test"] }, + }, + ]; + const importFn = vi + .fn<(cwd?: string) => Promise>() + .mockResolvedValue(servers); + const installFn = vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockResolvedValue(undefined); + const adapter1 = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: importFn, + install: installFn, + }); + + mockRegistryDetectAll.mockResolvedValue([adapter1]); + + await run(["sync", "--dry-run"]); + expect(installFn).not.toHaveBeenCalled(); + }); +}); + +describe("run() - flag parsing", () => { + it("parses --tools flag with = syntax", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code"]); + expect(mockRegistryGet).toHaveBeenCalledWith("claude-code"); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("parses --dry-run flag", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["generate", "--dry-run"]); + // No error means flag was parsed + }); + + it("parses --force flag", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<(cwd?: string) => Promise>().mockResolvedValue(false), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["generate", "--tools=kiro", "--force"]); + expect(allLog()).toContain("Generating MCP configs for 1 tool(s)"); + }); +}); + +describe("run() - error propagation", () => { + it("propagates adapter generate errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + generate: vi + .fn<(servers: MCPServerDefinition[]) => Promise>() + .mockRejectedValue(new Error("generate failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["generate"])).rejects.toThrow("generate failed"); + }); + + it("propagates adapter install errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + install: vi + .fn<(files: GeneratedFile[], cwd?: string) => Promise>() + .mockRejectedValue(new Error("install failed")), + }); + + mockRegistryDetectAll.mockResolvedValue([adapter]); + + await expect(run(["install"])).rejects.toThrow("install failed"); + }); +}); diff --git a/packages/mcp/src/cli/index.ts b/packages/mcp/src/cli/index.ts new file mode 100644 index 0000000..d6f0ba8 --- /dev/null +++ b/packages/mcp/src/cli/index.ts @@ -0,0 +1,343 @@ +import { registry } from "../adapters/registry.js"; +import type { BaseMCPAdapter } from "../adapters/base.js"; +import type { MCPServerDefinition, MCPConfig, GeneratedFile } from "../types/index.js"; + +// Import all adapters to register them +import "../adapters/all.js"; + +const HELP = ` +ai-mcp - Universal MCP server configuration for AI coding tools + +USAGE: + ai-mcp [options] + +COMMANDS: + init Create an mcp.config.ts in the current directory + detect Detect which AI tools support MCP + generate Generate MCP configs for detected/specified tools + install Generate and install MCP servers into detected tools + import Import MCP servers from an existing tool's config + sync Sync MCP servers across all detected tools + export Export MCP server definitions as JSON + help Show this help message + +OPTIONS: + --tools Comma-separated list of tools (e.g., --tools=claude-code,cursor) + --dry-run Show what would be generated without writing files + --force Skip detection checks for --tools (install even if tool not found) + +EXAMPLES: + ai-mcp detect # See which AI tools support MCP + ai-mcp generate # Generate configs for all detected tools + ai-mcp install --tools=claude-code # Install MCP servers for Claude Code only + ai-mcp sync # Sync MCP servers across all tools + ai-mcp import --tools=claude-code # Import servers from Claude Code config + ai-mcp export # Export all MCP servers as JSON +`; + +type Flags = { + tools?: string; + config?: string; + dryRun?: boolean; + force?: boolean; +}; + +export async function run(args: string[]): Promise { + const command = args[0]; + const flags = parseFlags(args.slice(1)); + + switch (command) { + case "init": + await cmdInit(flags); + break; + case "detect": + await cmdDetect(); + break; + case "generate": + await cmdGenerate(flags); + break; + case "install": + await cmdInstall(flags); + break; + case "import": + await cmdImport(flags); + break; + case "sync": + await cmdSync(flags); + break; + case "export": + await cmdExport(flags); + break; + case "help": + case "--help": + case "-h": + case undefined: + console.log(HELP); + break; + default: + console.error(`Unknown command: ${command}`); + console.log(HELP); + process.exit(1); + } +} + +// ── Commands ──────────────────────────────────────────────── + +async function cmdInit(flags: Flags): Promise { + const { writeFile } = await import("node:fs/promises"); + + const template = `import { defineConfig } from "@premierstudio/ai-mcp"; + +export default defineConfig({ + servers: [ + // Add your MCP servers here: + // { + // id: "my-server", + // name: "My MCP Server", + // transport: { + // type: "stdio", + // command: "npx", + // args: ["-y", "@my-org/mcp-server"], + // }, + // }, + ], +}); +`; + + if (flags.dryRun) { + console.log("[dry-run] Would create mcp.config.ts"); + return; + } + + await writeFile("mcp.config.ts", template, "utf-8"); + console.log("Created mcp.config.ts"); + console.log(""); + console.log("Next steps:"); + console.log(" 1. Edit mcp.config.ts to add your MCP servers"); + console.log(" 2. Run: ai-mcp detect (see which AI tools support MCP)"); + console.log(" 3. Run: ai-mcp install (install servers into your tools)"); +} + +async function cmdDetect(): Promise { + console.log("Detecting AI coding tools with MCP support...\n"); + + const detected = await registry.detectAll(); + const all = registry.list(); + + for (const id of all) { + const adapter = registry.get(id); + if (!adapter) continue; + + const isDetected = detected.some((d) => d.id === id); + const icon = isDetected ? "\u2713" : "\u2717"; + const color = isDetected ? "\x1b[32m" : "\x1b[90m"; + const reset = "\x1b[0m"; + + console.log(` ${color}${icon}${reset} ${adapter.name}`); + } + + console.log(`\nDetected ${detected.length}/${all.length} tools`); +} + +async function cmdGenerate(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Generating MCP configs for ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.servers); + + for (const file of files) { + if (flags.dryRun) { + console.log(` [dry-run] Would write: ${file.path}`); + } else { + console.log(` Generated: ${file.path}`); + } + } + + if (!flags.dryRun) { + await writeFiles(files); + } + } + + console.log("\nDone!"); +} + +async function cmdInstall(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Installing MCP servers into ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.servers); + + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would install: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nMCP servers installed!"); +} + +async function cmdImport(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + for (const adapter of adapters) { + const servers = await adapter.import(); + console.log(` Imported ${servers.length} server(s) from ${adapter.name}`); + } +} + +async function cmdSync(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + console.log(`Syncing MCP servers across ${adapters.length} tool(s)...\n`); + + // Collect all unique servers from all detected tools + const allServers = new Map(); + for (const adapter of adapters) { + const servers = await adapter.import(); + for (const server of servers) { + if (!allServers.has(server.id)) { + allServers.set(server.id, server); + } + } + } + + const servers = [...allServers.values()]; + + // Write to all adapters + for (const adapter of adapters) { + const files = await adapter.generate(servers); + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would write: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name} (${servers.length} servers)`); + } + } + + console.log("\nSync complete!"); +} + +async function cmdExport(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const allServers = new Map(); + for (const adapter of adapters) { + const servers = await adapter.import(); + for (const server of servers) { + if (!allServers.has(server.id)) { + allServers.set(server.id, server); + } + } + } + + console.log(JSON.stringify([...allServers.values()], null, 2)); +} + +// ── Helpers ───────────────────────────────────────────────── + +function parseFlags(args: string[]): Flags { + const flags: Flags = {}; + + for (const arg of args) { + if (arg.startsWith("--tools=")) { + flags.tools = arg.slice(8); + } else if (arg.startsWith("--config=")) { + flags.config = arg.slice(9); + } else if (arg === "--dry-run") { + flags.dryRun = true; + } else if (arg === "--force") { + flags.force = true; + } + } + + return flags; +} + +async function resolveAdapters(flags: Flags): Promise { + if (flags.tools) { + const ids = flags.tools.split(",").map((t) => t.trim()); + const adapters: BaseMCPAdapter[] = []; + for (const id of ids) { + const adapter = registry.get(id); + if (!adapter) { + console.warn(` Warning: Unknown adapter "${id}"`); + continue; + } + if (!flags.force && !(await adapter.detect())) { + console.warn(` Warning: ${adapter.name} not detected, skipping (use --force to override)`); + continue; + } + adapters.push(adapter); + } + return adapters; + } + + return registry.detectAll(); +} + +async function loadConfig(configPath?: string): Promise { + const path = configPath ?? "mcp.config.ts"; + const { existsSync } = await import("node:fs"); + + if (!existsSync(path)) { + if (configPath) { + throw new Error(`Config file not found: ${path}`); + } + return { servers: [] }; + } + + const { resolve } = await import("node:path"); + const fullPath = resolve(process.cwd(), path); + const mod = await import(fullPath); + return mod.default as MCPConfig; +} + +async function writeFiles(files: GeneratedFile[]): Promise { + const { writeFile, mkdir } = await import("node:fs/promises"); + const { dirname, resolve } = await import("node:path"); + + for (const file of files) { + const fullPath = resolve(process.cwd(), file.path); + await mkdir(dirname(fullPath), { recursive: true }); + await writeFile(fullPath, file.content, "utf-8"); + } +} diff --git a/packages/mcp/src/config/define.test.ts b/packages/mcp/src/config/define.test.ts new file mode 100644 index 0000000..c55749c --- /dev/null +++ b/packages/mcp/src/config/define.test.ts @@ -0,0 +1,44 @@ +import { describe, it, expect } from "vitest"; +import { defineConfig } from "./define.js"; +import type { MCPConfig } from "../types/index.js"; + +describe("defineConfig", () => { + it("returns the same config object", () => { + const config: MCPConfig = { + servers: [ + { + id: "test", + name: "Test Server", + transport: { type: "stdio", command: "npx", args: ["-y", "test-mcp"] }, + }, + ], + }; + const result = defineConfig(config); + expect(result).toBe(config); + }); + + it("returns empty servers array unchanged", () => { + const config: MCPConfig = { servers: [] }; + const result = defineConfig(config); + expect(result).toEqual({ servers: [] }); + }); + + it("preserves all server fields", () => { + const config: MCPConfig = { + servers: [ + { + id: "my-server", + name: "My Server", + description: "A test server", + transport: { type: "stdio", command: "node", args: ["server.js"], env: { KEY: "val" } }, + enabled: true, + tags: ["dev", "test"], + }, + ], + }; + const result = defineConfig(config); + expect(result.servers[0]!.id).toBe("my-server"); + expect(result.servers[0]!.description).toBe("A test server"); + expect(result.servers[0]!.tags).toEqual(["dev", "test"]); + }); +}); diff --git a/packages/mcp/src/config/define.ts b/packages/mcp/src/config/define.ts new file mode 100644 index 0000000..72435a3 --- /dev/null +++ b/packages/mcp/src/config/define.ts @@ -0,0 +1,5 @@ +import type { MCPConfig } from "../types/index.js"; + +export function defineConfig(config: MCPConfig): MCPConfig { + return config; +} diff --git a/packages/mcp/src/config/index.ts b/packages/mcp/src/config/index.ts new file mode 100644 index 0000000..ee71dfd --- /dev/null +++ b/packages/mcp/src/config/index.ts @@ -0,0 +1 @@ +export { defineConfig } from "./define.js"; diff --git a/packages/mcp/src/index.ts b/packages/mcp/src/index.ts new file mode 100644 index 0000000..9e25416 --- /dev/null +++ b/packages/mcp/src/index.ts @@ -0,0 +1,4 @@ +export { defineConfig } from "./config/index.js"; +export { registry, BaseMCPAdapter } from "./adapters/index.js"; + +export type { MCPTransport, MCPServerDefinition, MCPConfig, GeneratedFile } from "./types/index.js"; diff --git a/packages/mcp/src/types/config.ts b/packages/mcp/src/types/config.ts new file mode 100644 index 0000000..d7f59f2 --- /dev/null +++ b/packages/mcp/src/types/config.ts @@ -0,0 +1,11 @@ +import type { MCPServerDefinition } from "./definition.js"; + +export type MCPConfig = { + servers: MCPServerDefinition[]; +}; + +export type GeneratedFile = { + path: string; + content: string; + format: "json" | "yaml" | "jsonc"; +}; diff --git a/packages/mcp/src/types/definition.ts b/packages/mcp/src/types/definition.ts new file mode 100644 index 0000000..e336fe3 --- /dev/null +++ b/packages/mcp/src/types/definition.ts @@ -0,0 +1,12 @@ +export type MCPTransport = + | { type: "stdio"; command: string; args?: string[]; env?: Record } + | { type: "sse"; url: string; headers?: Record }; + +export type MCPServerDefinition = { + id: string; + name: string; + description?: string; + transport: MCPTransport; + enabled?: boolean; + tags?: string[]; +}; diff --git a/packages/mcp/src/types/index.ts b/packages/mcp/src/types/index.ts new file mode 100644 index 0000000..d4014a8 --- /dev/null +++ b/packages/mcp/src/types/index.ts @@ -0,0 +1,2 @@ +export type { MCPTransport, MCPServerDefinition } from "./definition.js"; +export type { MCPConfig, GeneratedFile } from "./config.js"; diff --git a/packages/mcp/tsconfig.json b/packages/mcp/tsconfig.json new file mode 100644 index 0000000..329ab40 --- /dev/null +++ b/packages/mcp/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/mcp/tsup.config.ts b/packages/mcp/tsup.config.ts new file mode 100644 index 0000000..d87215c --- /dev/null +++ b/packages/mcp/tsup.config.ts @@ -0,0 +1,29 @@ +import { defineConfig } from "tsup"; + +export default defineConfig([ + { + entry: ["src/index.ts", "src/adapters/index.ts", "src/adapters/all.ts", "src/cli/index.ts"], + format: ["esm"], + dts: true, + clean: true, + sourcemap: true, + target: "node22", + outDir: "dist", + splitting: true, + treeshake: true, + }, + { + entry: ["src/cli/bin.ts"], + format: ["esm"], + dts: false, + clean: false, + sourcemap: true, + target: "node22", + outDir: "dist/cli", + splitting: false, + treeshake: true, + banner: { + js: "#!/usr/bin/env node", + }, + }, +]); diff --git a/packages/plannable/README.md b/packages/plannable/README.md deleted file mode 100644 index 47b5d11..0000000 --- a/packages/plannable/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# @premierstudio/plannable - -Connect your AI coding tools to [Plannable](https://plannable.ai) with a single command. Automatically detects your IDE, sets up MCP connections, and installs PM-AI hooks that keep your AI tools in sync with Plannable. - -## Quick Start - -```bash -npx @premierstudio/plannable -``` - -That's it. The CLI will: - -1. **Authenticate** via OAuth (opens browser) -2. **Detect** your AI tools (Claude Code, Cursor, Gemini CLI, Codex, and more) -3. **Configure** MCP server connections for each detected tool -4. **Install** PM-AI hooks for intelligent guardrails - -## Commands - -```bash -npx @premierstudio/plannable # Interactive setup (default) -npx @premierstudio/plannable status # Check connection status -npx @premierstudio/plannable remove # Clean removal -``` - -## Options - -``` ---server Plannable server URL (default: https://plannable.ai) - Also configurable via PLANNABLE_SERVER env var -``` - -## Supported AI Tools - -| Tool | Detection | MCP | Hooks | -| ----------- | ------------------------------------------------- | --- | ------- | -| Claude Code | `claude` CLI or `.claude/` directory | Yes | Yes | -| Cursor | `cursor` CLI or `.cursor/` directory | Yes | Yes | -| Gemini CLI | `gemini` CLI or `.gemini/` directory | Yes | Yes | -| Codex | `codex` CLI, `codex.json`, or `.codex/` directory | Yes | Yes | -| Cline | `cline` CLI or `.clinerules/` directory | Yes | Yes | -| Kiro | `kiro` CLI or `.kiro/` directory | Yes | Yes | -| OpenCode | `opencode` CLI or `.opencode/` directory | Yes | Yes | -| Droid | `droid` CLI or `.factory/` directory | Yes | Yes | -| Amp | `amp` CLI or `.amp/` directory | Yes | Planned | - -## What Are PM-AI Hooks? - -Hooks are lightweight middleware that run inside your AI tool. They let Plannable: - -- Guide your AI toward the right tasks and priorities -- Enforce project conventions automatically -- Signal file and shell activity back to Plannable for risk detection -- Block unsafe operations before they happen - -Hooks are installed via [ai-hooks](https://github.com/PremierStudio/ai-hooks), a universal hooks framework for AI coding tools. - -## Self-Hosted - -```bash -# Use a custom server -npx @premierstudio/plannable --server https://your-instance.example.com - -# Or set via environment variable -PLANNABLE_SERVER=https://your-instance.example.com npx @premierstudio/plannable -``` - -## Requirements - -- Node.js >= 22.0.0 -- At least one supported AI coding tool installed - -## License - -MIT diff --git a/packages/plannable/package.json b/packages/plannable/package.json deleted file mode 100644 index 667c0e4..0000000 --- a/packages/plannable/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@premierstudio/plannable", - "version": "1.1.8", - "description": "CLI for connecting AI coding tools to Plannable's MCP server", - "license": "MIT", - "repository": { - "type": "git", - "url": "https://github.com/PremierStudio/ai-hooks" - }, - "bin": { - "plannable": "dist/bin.js" - }, - "files": [ - "dist" - ], - "type": "module", - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "types": "./dist/index.d.ts", - "import": "./dist/index.js" - } - }, - "publishConfig": { - "access": "public", - "provenance": true - }, - "scripts": { - "build": "tsup", - "dev": "tsup --watch", - "typecheck": "tsc --noEmit", - "test": "vitest run", - "clean": "rm -rf dist" - }, - "dependencies": { - "@clack/prompts": "^0.10.0", - "@premierstudio/ai-hooks": "^1.1.8" - } -} diff --git a/packages/plannable/src/auth/oauth.ts b/packages/plannable/src/auth/oauth.ts deleted file mode 100644 index e11422b..0000000 --- a/packages/plannable/src/auth/oauth.ts +++ /dev/null @@ -1,421 +0,0 @@ -import { createServer, type IncomingMessage, type ServerResponse } from "node:http"; -import { createHash, randomBytes } from "node:crypto"; -import { exec } from "node:child_process"; -import { platform } from "node:os"; -import * as p from "@clack/prompts"; -import { saveAuth, loadAuth, clearAuth, isTokenExpired } from "./token-store.js"; -import type { StoredAuth } from "./token-store.js"; - -const REDIRECT_PORT = 21347; -const REDIRECT_URI = `http://localhost:${REDIRECT_PORT}/callback`; -const REDIRECT_URI_IP = `http://127.0.0.1:${REDIRECT_PORT}/callback`; -const LOGIN_TIMEOUT_MS = 120_000; - -type OAuthMetadata = { - authorization_endpoint: string; - token_endpoint: string; - registration_endpoint: string; - scopes_supported: string[]; -}; - -type RegisteredClient = { - client_id: string; - registration_access_token: string; -}; - -type TokenResponse = { - access_token: string; - token_type: string; - expires_in: number; - refresh_token: string; - scope: string; -}; - -function generateCodeVerifier(): string { - return randomBytes(32).toString("base64url"); -} - -function generateCodeChallenge(verifier: string): string { - return createHash("sha256").update(verifier).digest("base64url"); -} - -function generateState(): string { - return randomBytes(16).toString("base64url"); -} - -function openBrowser(url: string): void { - const os = platform(); - const command = - os === "darwin" ? `open "${url}"` : os === "win32" ? `start "${url}"` : `xdg-open "${url}"`; - - exec(command, (error) => { - if (error) { - p.log.warn(`Could not open browser automatically. Please visit:\n ${url}`); - } - }); -} - -async function discoverEndpoints(serverUrl: string): Promise { - const wellKnownUrl = `${serverUrl}/.well-known/oauth-authorization-server`; - const response = await fetch(wellKnownUrl); - - if (!response.ok) { - throw new Error(`Failed to discover OAuth endpoints at ${wellKnownUrl} (${response.status})`); - } - - return (await response.json()) as OAuthMetadata; -} - -async function registerClient(registrationEndpoint: string): Promise { - const response = await fetch(registrationEndpoint, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - client_name: "Plannable CLI", - redirect_uris: [REDIRECT_URI, REDIRECT_URI_IP], - grant_types: ["authorization_code", "refresh_token"], - response_types: ["code"], - token_endpoint_auth_method: "none", - }), - }); - - if (!response.ok) { - const error = (await response.json()) as { error_description?: string }; - throw new Error(error.error_description ?? `Client registration failed (${response.status})`); - } - - const result = (await response.json()) as { - client_id: string; - registration_access_token: string; - }; - - return { - client_id: result.client_id, - registration_access_token: result.registration_access_token, - }; -} - -async function exchangeCode( - tokenEndpoint: string, - clientId: string, - code: string, - codeVerifier: string, -): Promise { - const body = new URLSearchParams({ - grant_type: "authorization_code", - client_id: clientId, - code, - redirect_uri: REDIRECT_URI, - code_verifier: codeVerifier, - }); - - const response = await fetch(tokenEndpoint, { - method: "POST", - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - body: body.toString(), - }); - - if (!response.ok) { - const error = (await response.json()) as { error_description?: string }; - throw new Error(error.error_description ?? `Token exchange failed (${response.status})`); - } - - return (await response.json()) as TokenResponse; -} - -async function refreshAccessToken( - serverUrl: string, - clientId: string, - refreshToken: string, -): Promise { - const metadata = await discoverEndpoints(serverUrl); - - const body = new URLSearchParams({ - grant_type: "refresh_token", - client_id: clientId, - refresh_token: refreshToken, - }); - - const response = await fetch(metadata.token_endpoint, { - method: "POST", - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - body: body.toString(), - }); - - if (!response.ok) { - throw new Error("Token refresh failed. Please re-authenticate."); - } - - return (await response.json()) as TokenResponse; -} - -const CALLBACK_HTML = ` - - - Plannable - - - -
-
-
-
- - - -
-

Connected to Plannable

-

You can close this tab and return to your terminal.

-
- -`; - -function waitForCallback(state: string): Promise<{ code: string }> { - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - server.close(); - reject(new Error("Login timed out after 2 minutes. Please try again.")); - }, LOGIN_TIMEOUT_MS); - - const server = createServer((req: IncomingMessage, res: ServerResponse) => { - const url = new URL(req.url ?? "/", `http://127.0.0.1:${REDIRECT_PORT}`); - - if (url.pathname !== "/callback") { - res.writeHead(404); - res.end("Not found"); - return; - } - - const error = url.searchParams.get("error"); - if (error) { - const description = url.searchParams.get("error_description") ?? error; - res.writeHead(200, { "Content-Type": "text/html" }); - res.end( - `

Authorization failed: ${description}

`, - ); - clearTimeout(timeout); - server.close(); - reject(new Error(`Authorization failed: ${description}`)); - return; - } - - const code = url.searchParams.get("code"); - const returnedState = url.searchParams.get("state"); - - if (!code) { - res.writeHead(400); - res.end("Missing authorization code"); - return; - } - - if (returnedState !== state) { - res.writeHead(400); - res.end("State mismatch"); - clearTimeout(timeout); - server.close(); - reject(new Error("OAuth state mismatch — possible CSRF attack.")); - return; - } - - res.writeHead(200, { "Content-Type": "text/html" }); - res.end(CALLBACK_HTML); - clearTimeout(timeout); - server.close(); - resolve({ code }); - }); - - server.listen(REDIRECT_PORT, "127.0.0.1", () => { - // Server ready - }); - - server.on("error", (err: NodeJS.ErrnoException) => { - clearTimeout(timeout); - if (err.code === "EADDRINUSE") { - reject( - new Error(`Port ${REDIRECT_PORT} is in use. Close the process using it and try again.`), - ); - } else { - reject(err); - } - }); - }); -} - -export async function login(serverUrl: string): Promise { - const spin = p.spinner(); - spin.start("Discovering OAuth endpoints..."); - - const metadata = await discoverEndpoints(serverUrl); - spin.stop("OAuth endpoints discovered"); - - // Check for existing client registration, or register a new one - const existing = await loadAuth(); - let clientId: string; - let registrationAccessToken: string; - - if (existing?.client_id && existing.server_url === serverUrl) { - const expired = isTokenExpired(existing); - const dim = "\x1b[2m"; - const r = "\x1b[0m"; - p.log.info( - [ - "Found existing credentials:", - ` ${dim}client${r} ${existing.client_id}`, - ` ${dim}server${r} ${existing.server_url}`, - ` ${dim}status${r} ${expired ? "expired" : "active"}`, - ].join("\n"), - ); - - const action = await p.select({ - message: "What would you like to do?", - options: [ - { - value: "reuse", - label: "Use existing credentials", - hint: "skip re-auth if still valid on server", - }, - { - value: "fresh", - label: "Start fresh", - hint: "clear cached credentials and re-register", - }, - ], - }); - - if (p.isCancel(action)) { - p.cancel("Setup cancelled."); - process.exit(0); - } - - if (action === "fresh") { - await clearAuth(); - p.log.info("Cleared cached credentials"); - const regSpin = p.spinner(); - regSpin.start("Registering new OAuth client..."); - const registered = await registerClient(metadata.registration_endpoint); - clientId = registered.client_id; - registrationAccessToken = registered.registration_access_token; - regSpin.stop("OAuth client registered"); - } else { - clientId = existing.client_id; - registrationAccessToken = existing.registration_access_token; - } - } else { - const regSpin = p.spinner(); - regSpin.start("Registering OAuth client..."); - const registered = await registerClient(metadata.registration_endpoint); - clientId = registered.client_id; - registrationAccessToken = registered.registration_access_token; - regSpin.stop("OAuth client registered"); - } - - const codeVerifier = generateCodeVerifier(); - const codeChallenge = generateCodeChallenge(codeVerifier); - const state = generateState(); - const scopes = metadata.scopes_supported.join(" "); - - const authUrl = new URL(metadata.authorization_endpoint); - authUrl.searchParams.set("response_type", "code"); - authUrl.searchParams.set("client_id", clientId); - authUrl.searchParams.set("redirect_uri", REDIRECT_URI); - authUrl.searchParams.set("code_challenge", codeChallenge); - authUrl.searchParams.set("code_challenge_method", "S256"); - authUrl.searchParams.set("state", state); - authUrl.searchParams.set("scope", scopes); - - p.log.info("Opening browser for authentication..."); - openBrowser(authUrl.toString()); - - const spin2 = p.spinner(); - spin2.start("Waiting for authorization (2 min timeout)..."); - - const { code } = await waitForCallback(state); - spin2.stop("Authorization received"); - - const spin3 = p.spinner(); - spin3.start("Exchanging authorization code..."); - - const tokenResponse = await exchangeCode(metadata.token_endpoint, clientId, code, codeVerifier); - - const auth: StoredAuth = { - server_url: serverUrl, - client_id: clientId, - registration_access_token: registrationAccessToken, - access_token: tokenResponse.access_token, - refresh_token: tokenResponse.refresh_token, - expires_at: Date.now() + tokenResponse.expires_in * 1000, - scopes: tokenResponse.scope ? tokenResponse.scope.split(" ") : [], - team_id: "", - }; - - await saveAuth(auth); - spin3.stop("Authenticated successfully"); - - return auth; -} - -export async function ensureAuthenticated(serverUrl: string): Promise { - const stored = await loadAuth(); - - if (!stored || stored.server_url !== serverUrl) { - return login(serverUrl); - } - - if (!isTokenExpired(stored)) { - return stored; - } - - // Try silent refresh - try { - const tokenResponse = await refreshAccessToken( - serverUrl, - stored.client_id, - stored.refresh_token, - ); - - const refreshed: StoredAuth = { - ...stored, - access_token: tokenResponse.access_token, - refresh_token: tokenResponse.refresh_token, - expires_at: Date.now() + tokenResponse.expires_in * 1000, - scopes: tokenResponse.scope ? tokenResponse.scope.split(" ") : [], - }; - - await saveAuth(refreshed); - return refreshed; - } catch { - // Refresh failed — client may have been revoked on the server - p.log.warn("Token refresh failed. The client may have been removed from the server."); - await clearAuth(); - return login(serverUrl); - } -} diff --git a/packages/plannable/src/auth/token-store.ts b/packages/plannable/src/auth/token-store.ts deleted file mode 100644 index 4c8b297..0000000 --- a/packages/plannable/src/auth/token-store.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { readFile, writeFile, mkdir, rm } from "node:fs/promises"; -import { existsSync } from "node:fs"; -import { homedir } from "node:os"; -import { join } from "node:path"; - -export type StoredAuth = { - server_url: string; - client_id: string; - registration_access_token: string; - access_token: string; - refresh_token: string; - expires_at: number; - scopes: string[]; - team_id: string; -}; - -const TOKEN_DIR = join(homedir(), ".plannable"); -const TOKEN_FILE = join(TOKEN_DIR, "auth.json"); - -export async function loadAuth(): Promise { - if (!existsSync(TOKEN_FILE)) return null; - - try { - const content = await readFile(TOKEN_FILE, "utf-8"); - return JSON.parse(content) as StoredAuth; - } catch { - return null; - } -} - -export async function saveAuth(auth: StoredAuth): Promise { - await mkdir(TOKEN_DIR, { recursive: true, mode: 0o700 }); - await writeFile(TOKEN_FILE, JSON.stringify(auth, null, 2) + "\n", { - encoding: "utf-8", - mode: 0o600, - }); -} - -export async function clearAuth(): Promise { - if (existsSync(TOKEN_FILE)) { - await rm(TOKEN_FILE); - } -} - -export function isTokenExpired(auth: StoredAuth): boolean { - // Consider expired if within 60 seconds of expiry - return Date.now() >= auth.expires_at - 60_000; -} - -export function getAuthFilePath(): string { - return TOKEN_FILE; -} diff --git a/packages/plannable/src/commands/remove.ts b/packages/plannable/src/commands/remove.ts deleted file mode 100644 index 8bd0196..0000000 --- a/packages/plannable/src/commands/remove.ts +++ /dev/null @@ -1,71 +0,0 @@ -import * as p from "@clack/prompts"; -import { registry } from "@premierstudio/ai-hooks/adapters"; -import { askConfirm } from "../ui/prompts.js"; -import { clearAuth, getAuthFilePath, loadAuth } from "../auth/token-store.js"; -import { removeMcpEntry } from "../config/mcp-config.js"; -import { removeConfig } from "../config/ai-hooks-config.js"; - -export async function removeCommand(): Promise { - p.intro("Plannable — Remove"); - - const confirmed = await askConfirm("Remove all Plannable configuration from this project?"); - - if (!confirmed) { - p.outro("Nothing removed."); - return; - } - - const spin = p.spinner(); - spin.start("Removing Plannable configuration..."); - - const removed: string[] = []; - - // Remove MCP entries from all detected tools (both scopes) - const detected = await registry.detectAll(); - for (const adapter of detected) { - for (const scope of ["project", "global"] as const) { - try { - const didRemove = await removeMcpEntry(adapter, scope); - if (didRemove) { - removed.push(`MCP entry from ${adapter.name} (${scope})`); - } - } catch { - // Skip if removal fails - } - } - - // Uninstall ai-hooks configs - try { - await adapter.uninstall(); - removed.push(`ai-hooks config for ${adapter.name}`); - } catch { - // Skip if uninstall fails - } - } - - // Remove ai-hooks.config.ts if generated by us - const configRemoved = await removeConfig(); - if (configRemoved) { - removed.push("ai-hooks.config.ts"); - } - - // Clear stored auth - const auth = await loadAuth(); - if (auth) { - await clearAuth(); - removed.push(getAuthFilePath()); - } - - spin.stop("Removal complete"); - - if (removed.length > 0) { - p.log.success("Removed:"); - for (const item of removed) { - p.log.info(` - ${item}`); - } - } else { - p.log.info("No Plannable configuration found to remove."); - } - - p.outro("Plannable has been removed from this project."); -} diff --git a/packages/plannable/src/commands/setup.ts b/packages/plannable/src/commands/setup.ts deleted file mode 100644 index 5b68a04..0000000 --- a/packages/plannable/src/commands/setup.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { readFileSync } from "node:fs"; -import { dirname, join } from "node:path"; -import { fileURLToPath } from "node:url"; -import * as p from "@clack/prompts"; -import { registry } from "@premierstudio/ai-hooks/adapters"; -import type { Adapter } from "@premierstudio/ai-hooks"; -import { showBanner } from "../ui/brand.js"; -import { - askToolSelection, - askLanguageConfirm, - askFeatureToggles, - askMcpScope, -} from "../ui/prompts.js"; -import { login } from "../auth/oauth.js"; -import { detectLanguage } from "../detect/language.js"; -import { installMcpEntry } from "../config/mcp-config.js"; -import type { McpScope } from "../config/mcp-config.js"; -import { writeConfig } from "../config/ai-hooks-config.js"; - -function getVersion(): string { - try { - const dir = dirname(fileURLToPath(import.meta.url)); - const pkg = JSON.parse(readFileSync(join(dir, "..", "package.json"), "utf-8")) as { - version: string; - }; - return pkg.version; - } catch { - return "0.0.0"; - } -} - -export async function setupCommand(serverUrl: string): Promise { - showBanner(getVersion()); - - p.log.info(`Server: ${serverUrl}`); - - // Step 2: OAuth login - const auth = await login(serverUrl); - p.log.success("Authenticated to Plannable"); - - // Step 3: Detect AI tools - const spin = p.spinner(); - spin.start("Detecting AI coding tools..."); - - const detected = await registry.detectAll(); - const allIds = registry.list(); - const allAdapters: Array<{ id: string; name: string }> = []; - for (const id of allIds) { - const adapter = registry.get(id); - if (adapter) { - allAdapters.push({ id: adapter.id, name: adapter.name }); - } - } - - spin.stop(`Found ${detected.length} AI tool(s)`); - - // Step 4: Tool selection - const selectedIds = await askToolSelection( - detected.map((d) => ({ id: d.id, name: d.name })), - allAdapters, - ); - - // Step 5: MCP scope (global vs project) - const mcpScope: McpScope = await askMcpScope(); - - // Step 6: Language detection - const langResult = await detectLanguage(); - const language = await askLanguageConfirm(langResult.primaryLanguage); - - if (langResult.frameworks.length > 0) { - p.log.info(`Frameworks detected: ${langResult.frameworks.join(", ")}`); - } - - // Step 7: Feature toggles - const features = await askFeatureToggles(); - - // Step 8: Install - const installSpin = p.spinner(); - installSpin.start("Generating configuration..."); - - // Generate ai-hooks.config.ts (always project-level) - const configPath = await writeConfig(language, features); - - // Install MCP entries for each selected tool - const installedTools: string[] = []; - const mcpPaths: string[] = []; - - for (const id of selectedIds) { - const adapter: Adapter | undefined = registry.get(id); - if (!adapter) continue; - - // Generate and install ai-hooks configs - try { - const { loadConfig } = await import("@premierstudio/ai-hooks"); - const config = await loadConfig(configPath); - const configs = await adapter.generate(config.hooks); - await adapter.install(configs); - } catch { - // Some adapters may not support hook generation - } - - // Install MCP entry - const mcpPath = await installMcpEntry( - adapter, - `${serverUrl}/api/mcp`, - auth.access_token, - mcpScope, - ); - if (mcpPath) { - mcpPaths.push(mcpPath); - } - - installedTools.push(adapter.name); - } - - installSpin.stop("Configuration complete"); - - // Step 9: Summary - const dim = "\x1b[2m"; - const r = "\x1b[0m"; - const i4 = "\x1b[38;2;129;140;248m"; - const scopeLabel = mcpScope === "global" ? "global" : "project"; - - p.log.success(`${i4}Plannable is connected${r}`); - - const summary = [ - ` ${dim}tools${r} ${installedTools.join(", ")}`, - ` ${dim}scope${r} ${scopeLabel}`, - ` ${dim}language${r} ${language}`, - ` ${dim}config${r} ${configPath}`, - "", - ` ${dim}1.${r} Restart your AI tool to pick up the MCP connection`, - ` ${dim}2.${r} ${serverUrl}/dashboard`, - ` ${dim}3.${r} ${dim}npx @premierstudio/plannable status${r}`, - ].join("\n"); - p.log.message(summary); - - p.outro(""); -} diff --git a/packages/plannable/src/commands/status.ts b/packages/plannable/src/commands/status.ts deleted file mode 100644 index 8b31bc1..0000000 --- a/packages/plannable/src/commands/status.ts +++ /dev/null @@ -1,60 +0,0 @@ -import * as p from "@clack/prompts"; -import { registry } from "@premierstudio/ai-hooks/adapters"; -import { loadAuth, isTokenExpired } from "../auth/token-store.js"; -import { hasPlannableConfig } from "../config/ai-hooks-config.js"; -import { hasMcpEntry } from "../config/mcp-config.js"; - -export async function statusCommand(): Promise { - p.intro("Plannable — Status"); - - // Check authentication - const auth = await loadAuth(); - if (!auth) { - p.log.warn("Not authenticated. Run `npx @premierstudio/plannable setup` to connect."); - p.outro(""); - return; - } - - const expired = isTokenExpired(auth); - const authStatus = expired - ? "\x1b[33mexpired (will refresh on next use)\x1b[0m" - : "\x1b[32mactive\x1b[0m"; - - p.log.info(`Server: ${auth.server_url}`); - p.log.info(`Auth: ${authStatus}`); - p.log.info(`Scopes: ${auth.scopes.join(", ") || "none"}`); - - // Check config file - const hasConfig = hasPlannableConfig(); - p.log.info( - `Config: ${hasConfig ? "\x1b[32mai-hooks.config.ts (Plannable)\x1b[0m" : "\x1b[90mnot found\x1b[0m"}`, - ); - - // Check tools - p.log.info(""); - p.log.info("AI Tools:"); - - const detected = await registry.detectAll(); - const allIds = registry.list(); - - for (const id of allIds) { - const adapter = registry.get(id); - if (!adapter) continue; - - const isDetected = detected.some((d) => d.id === id); - const hasMcp = hasMcpEntry(adapter); - - let status: string; - if (isDetected && hasMcp) { - status = "\x1b[32mconnected\x1b[0m"; - } else if (isDetected) { - status = "\x1b[33mdetected (no MCP)\x1b[0m"; - } else { - status = "\x1b[90mnot detected\x1b[0m"; - } - - p.log.info(` ${adapter.name.padEnd(20)} ${status}`); - } - - p.outro(""); -} diff --git a/packages/plannable/src/config/ai-hooks-config.ts b/packages/plannable/src/config/ai-hooks-config.ts deleted file mode 100644 index 9aa38e7..0000000 --- a/packages/plannable/src/config/ai-hooks-config.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { readFile, writeFile, rm } from "node:fs/promises"; -import { existsSync, readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import { generateHooksSource } from "./hooks-generator.js"; -import type { FeatureToggles } from "../ui/prompts.js"; - -const CONFIG_FILENAME = "ai-hooks.config.ts"; -const MARKER_COMMENT = "// Generated by plannable"; - -export function generateConfigSource(language: string, features: FeatureToggles): string { - const hooksSource = generateHooksSource(language, features); - - return `${MARKER_COMMENT} -// Do not edit manually — re-run \`npx @premierstudio/plannable setup\` to regenerate. - -import { defineConfig, hook, builtinHooks } from "@premierstudio/ai-hooks"; - -export default defineConfig({ - extends: [{ hooks: builtinHooks }], - - hooks: [ - ${hooksSource}, - ], - - settings: { - logLevel: "warn", - hookTimeout: 5000, - failMode: "open", - }, -}); -`; -} - -export async function writeConfig( - language: string, - features: FeatureToggles, - cwd: string = process.cwd(), -): Promise { - const configPath = resolve(cwd, CONFIG_FILENAME); - const source = generateConfigSource(language, features); - await writeFile(configPath, source, "utf-8"); - return configPath; -} - -export async function removeConfig(cwd: string = process.cwd()): Promise { - const configPath = resolve(cwd, CONFIG_FILENAME); - if (!existsSync(configPath)) return false; - - try { - const content = await readFile(configPath, "utf-8"); - if (!content.includes(MARKER_COMMENT)) { - return false; // Not generated by us, don't remove - } - await rm(configPath); - return true; - } catch { - return false; - } -} - -export function hasPlannableConfig(cwd: string = process.cwd()): boolean { - const configPath = resolve(cwd, CONFIG_FILENAME); - if (!existsSync(configPath)) return false; - - try { - const content = readFileSync(configPath, "utf-8"); - return content.includes(MARKER_COMMENT); - } catch { - return false; - } -} diff --git a/packages/plannable/src/config/hooks-generator.ts b/packages/plannable/src/config/hooks-generator.ts deleted file mode 100644 index 0d2cb78..0000000 --- a/packages/plannable/src/config/hooks-generator.ts +++ /dev/null @@ -1,418 +0,0 @@ -import type { FeatureToggles } from "../ui/prompts.js"; - -type HookSnippet = { - id: string; - name: string; - description: string; - events: string; - priority: number; - body: string; -}; - -function universalHooks(features: FeatureToggles): HookSnippet[] { - const hooks: HookSnippet[] = []; - - if (features.signals) { - hooks.push( - { - id: "plannable:signal-file-activity", - name: "Signal: File Activity", - description: "Collects file modification signals for Plannable", - events: '["tool:after"]', - priority: 900, - body: ` ctx.results.push({ - data: { - signal: { - type: "file_activity", - tool: ctx.tool.name, - event: ctx.event.type, - toolName: ctx.event.toolName, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next();`, - }, - { - id: "plannable:signal-shell-activity", - name: "Signal: Shell Activity", - description: "Collects shell command signals for velocity tracking", - events: '["shell:after"]', - priority: 901, - body: ` ctx.results.push({ - data: { - signal: { - type: "shell_activity", - tool: ctx.tool.name, - command: ctx.event.command, - exitCode: ctx.event.exitCode, - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next();`, - }, - { - id: "plannable:signal-tool-usage", - name: "Signal: Tool Usage", - description: "Collects tool usage signals for analytics", - events: '["tool:after", "mcp:after"]', - priority: 902, - body: ` ctx.results.push({ - data: { - signal: { - type: "tool_usage", - tool: ctx.tool.name, - toolName: ctx.event.type === "tool:after" ? ctx.event.toolName : "mcp", - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next();`, - }, - ); - } - - // Work tracking hooks are always included - hooks.push( - { - id: "plannable:track-session-start", - name: "Track Session Start", - description: "Notifies Plannable when a developer starts an AI coding session", - events: '["session:start"]', - priority: 800, - body: ` ctx.results.push({ - data: { - plannable: { - event: "session_start", - tool: ctx.tool.name, - cwd: ctx.cwd, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next();`, - }, - { - id: "plannable:track-session-end", - name: "Track Session End", - description: "Notifies Plannable when a developer ends an AI coding session", - events: '["session:end"]', - priority: 801, - body: ` ctx.results.push({ - data: { - plannable: { - event: "session_end", - tool: ctx.tool.name, - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next();`, - }, - ); - - return hooks; -} - -function noTodosHook(): HookSnippet { - return { - id: "plannable:no-todos", - name: "No TODO Comments", - description: "Enforces no TODO/FIXME/HACK comments", - events: '["file:write", "file:edit"]', - priority: 20, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - if (/\\/\\/\\s*(TODO|FIXME|HACK|XXX)\\b/i.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Code contains TODO/FIXME comments. Resolve the issue or create a work item instead.", - }); - return; - } - await next();`, - }; -} - -type LanguageHookSet = { - hooks: HookSnippet[]; -}; - -function typescriptHooks(): LanguageHookSet { - return { - hooks: [ - noTodosHook(), - { - id: "plannable:no-console-log", - name: "No Console.log", - description: "Enforces proper logging instead of console.log", - events: '["file:write", "file:edit"]', - priority: 21, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes(".test.") || path.includes(".spec.") || path.includes("__tests__") || path.endsWith(".config.ts") || path.endsWith(".config.js")) { - await next(); - return; - } - if (/console\\.(log|debug|info)\\s*\\(/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not contain console.log/debug/info. Use a proper logger.", - }); - return; - } - await next();`, - }, - { - id: "plannable:no-any", - name: "No Any Types", - description: "Enforces proper TypeScript types instead of 'any'", - events: '["file:write", "file:edit"]', - priority: 22, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (!path.endsWith(".ts") && !path.endsWith(".tsx")) { - await next(); - return; - } - if (/:s*any\\b/.test(content) || /as\\s+any\\b/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "TypeScript 'any' type is not allowed. Use proper type annotations.", - }); - return; - } - await next();`, - }, - ], - }; -} - -function pythonHooks(): LanguageHookSet { - return { - hooks: [ - noTodosHook(), - { - id: "plannable:no-debug-print", - name: "No Debug Print", - description: "Enforces no print() statements in production code", - events: '["file:write", "file:edit"]', - priority: 21, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes("test_") || path.includes("_test.py") || path.includes("conftest")) { - await next(); - return; - } - if (/^\\s*print\\s*\\(/m.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not contain print() statements. Use a proper logger.", - }); - return; - } - await next();`, - }, - { - id: "plannable:type-hints", - name: "Type Hints Check", - description: "Encourages type hints on function definitions", - events: '["file:write", "file:edit"]', - priority: 22, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (!path.endsWith(".py")) { - await next(); - return; - } - if (/^def\\s+\\w+\\([^)]*\\)\\s*:/m.test(content) && !/->/.test(content)) { - ctx.results.push({ - blocked: false, - reason: "Consider adding return type hints to function definitions.", - }); - } - await next();`, - }, - ], - }; -} - -function goHooks(): LanguageHookSet { - return { - hooks: [ - noTodosHook(), - { - id: "plannable:no-fmt-println", - name: "No fmt.Println", - description: "Enforces proper logging instead of fmt.Println", - events: '["file:write", "file:edit"]', - priority: 21, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes("_test.go")) { - await next(); - return; - } - if (/fmt\\.Print(ln|f)?\\s*\\(/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not use fmt.Print. Use a structured logger.", - }); - return; - } - await next();`, - }, - { - id: "plannable:error-handling", - name: "Error Handling Check", - description: "Checks for ignored error returns in Go", - events: '["file:write", "file:edit"]', - priority: 22, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (!path.endsWith(".go")) { - await next(); - return; - } - if (/\\b_\\s*=.*err/.test(content)) { - ctx.results.push({ - blocked: false, - reason: "Consider handling the error instead of ignoring it with _.", - }); - } - await next();`, - }, - ], - }; -} - -function rustHooks(): LanguageHookSet { - return { - hooks: [ - noTodosHook(), - { - id: "plannable:no-println", - name: "No println! Macro", - description: "Enforces proper logging instead of println!", - events: '["file:write", "file:edit"]', - priority: 21, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes("/tests/") || path.includes("/examples/")) { - await next(); - return; - } - if (/println!\\s*\\(/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not use println!. Use a logging crate (log, tracing).", - }); - return; - } - await next();`, - }, - { - id: "plannable:no-unwrap", - name: "No Unwrap", - description: "Discourages .unwrap() in production code", - events: '["file:write", "file:edit"]', - priority: 22, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes("/tests/") || path.includes("/examples/")) { - await next(); - return; - } - if (/\\.unwrap\\(\\)/.test(content)) { - ctx.results.push({ - blocked: false, - reason: "Consider using ? operator or .expect() with a descriptive message instead of .unwrap().", - }); - } - await next();`, - }, - ], - }; -} - -function csharpHooks(): LanguageHookSet { - return { - hooks: [ - noTodosHook(), - { - id: "plannable:no-console-writeline", - name: "No Console.WriteLine", - description: "Enforces proper logging instead of Console.WriteLine", - events: '["file:write", "file:edit"]', - priority: 21, - body: ` const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - const path = ctx.event.path; - if (path.includes("Test") || path.includes("test")) { - await next(); - return; - } - if (/Console\\.Write(Line)?\\s*\\(/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not use Console.WriteLine. Use ILogger.", - }); - return; - } - await next();`, - }, - ], - }; -} - -function getLanguageHooks(language: string): LanguageHookSet { - switch (language) { - case "typescript": - return typescriptHooks(); - case "javascript": - return typescriptHooks(); // Same console.log/no-any checks apply - case "python": - return pythonHooks(); - case "go": - return goHooks(); - case "rust": - return rustHooks(); - case "csharp": - return csharpHooks(); - default: - return { hooks: [noTodosHook()] }; - } -} - -function renderHook(snippet: HookSnippet, phase: "before" | "after"): string { - return `hook("${phase}", ${snippet.events}, async (ctx, next) => { -${snippet.body} -}) - .id("${snippet.id}") - .name("${snippet.name}") - .description("${snippet.description}") - .priority(${snippet.priority}) - .build()`; -} - -export function generateHooksSource(language: string, features: FeatureToggles): string { - const allHooks: string[] = []; - - // Language-specific guardrails - if (features.guardrails) { - const langHooks = getLanguageHooks(language); - for (const h of langHooks.hooks) { - allHooks.push(renderHook(h, "before")); - } - } - - // Universal hooks (signals, tracking) - const universal = universalHooks(features); - for (const h of universal) { - allHooks.push(renderHook(h, "after")); - } - - return allHooks.join(",\n\n "); -} diff --git a/packages/plannable/src/config/mcp-config.ts b/packages/plannable/src/config/mcp-config.ts deleted file mode 100644 index 19614a8..0000000 --- a/packages/plannable/src/config/mcp-config.ts +++ /dev/null @@ -1,327 +0,0 @@ -import { readFile, writeFile, mkdir } from "node:fs/promises"; -import { existsSync, readFileSync } from "node:fs"; -import { resolve, dirname, join } from "node:path"; -import { homedir } from "node:os"; -import { platform } from "node:os"; -import type { Adapter } from "@premierstudio/ai-hooks"; - -export type McpScope = "project" | "global"; - -type McpServerEntry = { - type?: string; - url: string; - headers?: Record; -}; - -type McpConfigFile = { - mcpServers?: Record; - [key: string]: unknown; -}; - -/** Claude Code's ~/.claude.json has a unique nested structure */ -type ClaudeCodeConfig = { - projects?: Record< - string, - { mcpServers?: Record; [key: string]: unknown } - >; - mcpServers?: Record; - [key: string]: unknown; -}; - -type ToolPaths = { - project: string; - global: string | null; -}; - -function getClaudeConfigPath(): string { - return join(homedir(), ".claude.json"); -} - -function getToolPaths(adapterId: string): ToolPaths | null { - const home = homedir(); - - switch (adapterId) { - case "claude-code": { - // Claude Code stores MCP servers in ~/.claude.json - // Project scope: projects[cwd].mcpServers - // Global scope: mcpServers (root) - const configPath = getClaudeConfigPath(); - return { project: configPath, global: configPath }; - } - case "cursor": - return { - project: ".cursor/mcp.json", - global: join(home, ".cursor", "mcp.json"), - }; - case "codex": - return { - project: ".codex/mcp.json", - global: join(home, ".codex", "mcp.json"), - }; - case "gemini-cli": - return { - project: ".gemini/settings.json", - global: join(home, ".gemini", "settings.json"), - }; - case "kiro": - return { - project: ".kiro/mcp.json", - global: join(home, ".kiro", "mcp.json"), - }; - case "amp": - return { - project: ".amp/mcp.json", - global: join(home, ".amp", "mcp.json"), - }; - case "goose": - return { - project: ".goose/mcp.json", - global: join(home, ".config", "goose", "mcp.json"), - }; - case "aider": - return { - project: ".aider/mcp.json", - global: join(home, ".aider", "mcp.json"), - }; - case "opencode": - return { - project: ".opencode/mcp.json", - global: join(home, ".opencode", "mcp.json"), - }; - case "continue": - return { - project: ".continue/mcp.json", - global: join(home, ".continue", "mcp.json"), - }; - case "roo-code": - return { - project: ".roo/mcp.json", - global: join(home, ".roo", "mcp.json"), - }; - case "warp": { - const os = platform(); - const globalDir = - os === "darwin" - ? join(home, "Library", "Application Support", "dev.warp.Warp") - : join(process.env.XDG_CONFIG_HOME ?? join(home, ".config"), "warp-terminal"); - return { - project: ".warp/mcp.json", - global: join(globalDir, "mcp.json"), - }; - } - case "droid": - return { - project: ".factory/mcp.json", - global: join(home, ".factory", "mcp.json"), - }; - default: - return null; - } -} - -function resolveConfigPath(adapterId: string, scope: McpScope): string | null { - const paths = getToolPaths(adapterId); - if (!paths) return null; - - // Claude Code always uses ~/.claude.json regardless of scope - if (adapterId === "claude-code") { - return getClaudeConfigPath(); - } - - if (scope === "global") { - return paths.global; - } - - return resolve(process.cwd(), paths.project); -} - -async function readJsonSafe(path: string): Promise { - if (!existsSync(path)) return null; - try { - const content = await readFile(path, "utf-8"); - return JSON.parse(content) as T; - } catch { - return null; - } -} - -export function supportsGlobal(adapterId: string): boolean { - const paths = getToolPaths(adapterId); - return paths?.global !== null; -} - -// ---- Claude Code helpers ---- - -async function installClaudeCodeEntry( - endpointUrl: string, - token: string, - scope: McpScope, -): Promise { - const configPath = getClaudeConfigPath(); - const config = (await readJsonSafe(configPath)) ?? {}; - - const entry: McpServerEntry = { - type: "http", - url: endpointUrl, - headers: { Authorization: `Bearer ${token}` }, - }; - - if (scope === "global") { - if (!config.mcpServers) config.mcpServers = {}; - config.mcpServers["plannable"] = entry; - } else { - const projectPath = process.cwd(); - if (!config.projects) config.projects = {}; - if (!config.projects[projectPath]) config.projects[projectPath] = {}; - const project = config.projects[projectPath]; - if (!project.mcpServers) project.mcpServers = {}; - project.mcpServers["plannable"] = entry; - } - - await writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8"); - return configPath; -} - -async function removeClaudeCodeEntry(scope: McpScope): Promise { - const configPath = getClaudeConfigPath(); - const config = await readJsonSafe(configPath); - if (!config) return false; - - let removed = false; - - if (scope === "global") { - if (config.mcpServers?.["plannable"]) { - delete config.mcpServers["plannable"]; - removed = true; - } - } else { - const projectPath = process.cwd(); - const servers = config.projects?.[projectPath]?.mcpServers; - if (servers?.["plannable"]) { - delete servers["plannable"]; - removed = true; - } - } - - if (removed) { - await writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8"); - } - return removed; -} - -function hasClaudeCodeEntry(scope?: McpScope): boolean { - const configPath = getClaudeConfigPath(); - if (!existsSync(configPath)) return false; - - try { - const content = readFileSync(configPath, "utf-8"); - const config = JSON.parse(content) as ClaudeCodeConfig; - - const scopes: McpScope[] = scope ? [scope] : ["project", "global"]; - - for (const s of scopes) { - if (s === "global") { - if (config.mcpServers?.["plannable"]) return true; - } else { - const projectPath = process.cwd(); - if (config.projects?.[projectPath]?.mcpServers?.["plannable"]) return true; - } - } - } catch { - return false; - } - - return false; -} - -// ---- Public API ---- - -export async function installMcpEntry( - adapter: Adapter, - endpointUrl: string, - token: string, - scope: McpScope = "project", -): Promise { - if (!getToolPaths(adapter.id)) return null; - - // Claude Code has a unique config structure - if (adapter.id === "claude-code") { - await installClaudeCodeEntry(endpointUrl, token, scope); - return scope === "project" - ? `~/.claude.json (project: ${process.cwd()})` - : "~/.claude.json (global)"; - } - - const fullPath = resolveConfigPath(adapter.id, scope); - if (!fullPath) return null; - - const existing = await readJsonSafe(fullPath); - const config: McpConfigFile = existing ?? {}; - - if (!config.mcpServers) { - config.mcpServers = {}; - } - - config.mcpServers["plannable"] = { - url: endpointUrl, - headers: { - Authorization: `Bearer ${token}`, - }, - }; - - await mkdir(dirname(fullPath), { recursive: true }); - await writeFile(fullPath, JSON.stringify(config, null, 2) + "\n", "utf-8"); - - // Return a display-friendly path - const paths = getToolPaths(adapter.id); - if (scope === "project" && paths) { - return paths.project; - } - return fullPath; -} - -export async function removeMcpEntry( - adapter: Adapter, - scope: McpScope = "project", -): Promise { - if (!getToolPaths(adapter.id)) return false; - - if (adapter.id === "claude-code") { - return removeClaudeCodeEntry(scope); - } - - const fullPath = resolveConfigPath(adapter.id, scope); - if (!fullPath || !existsSync(fullPath)) return false; - - const config = await readJsonSafe(fullPath); - if (!config?.mcpServers?.["plannable"]) return false; - - delete config.mcpServers["plannable"]; - await writeFile(fullPath, JSON.stringify(config, null, 2) + "\n", "utf-8"); - return true; -} - -export function hasMcpEntry(adapter: Adapter, scope?: McpScope): boolean { - if (!getToolPaths(adapter.id)) return false; - - if (adapter.id === "claude-code") { - return hasClaudeCodeEntry(scope); - } - - const scopes: McpScope[] = scope ? [scope] : ["project", "global"]; - - for (const s of scopes) { - const fullPath = resolveConfigPath(adapter.id, s); - if (!fullPath || !existsSync(fullPath)) continue; - - try { - const content = readFileSync(fullPath, "utf-8"); - const config = JSON.parse(content) as McpConfigFile; - if (config.mcpServers?.["plannable"]) return true; - } catch { - continue; - } - } - - return false; -} diff --git a/packages/plannable/src/detect/language.ts b/packages/plannable/src/detect/language.ts deleted file mode 100644 index f360781..0000000 --- a/packages/plannable/src/detect/language.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { existsSync, readdirSync } from "node:fs"; -import { readFile } from "node:fs/promises"; -import { resolve } from "node:path"; - -export type DetectedLanguage = { - primaryLanguage: string; - frameworks: string[]; -}; - -type MarkerCheck = { - language: string; - markers: string[]; - frameworkDetectors?: Array<{ - file: string; - key: string; - frameworks: Record; - }>; -}; - -const MARKER_CHECKS: MarkerCheck[] = [ - { - language: "typescript", - markers: ["tsconfig.json"], - frameworkDetectors: [ - { - file: "package.json", - key: "dependencies", - frameworks: { - next: "Next.js", - react: "React", - vue: "Vue", - angular: "Angular", - svelte: "Svelte", - express: "Express", - fastify: "Fastify", - hono: "Hono", - nestjs: "NestJS", - }, - }, - ], - }, - { - language: "javascript", - markers: ["package.json", "jsconfig.json"], - frameworkDetectors: [ - { - file: "package.json", - key: "dependencies", - frameworks: { - next: "Next.js", - react: "React", - vue: "Vue", - express: "Express", - }, - }, - ], - }, - { - language: "python", - markers: ["pyproject.toml", "setup.py", "requirements.txt"], - frameworkDetectors: [ - { - file: "requirements.txt", - key: "__raw__", - frameworks: { - django: "Django", - flask: "Flask", - fastapi: "FastAPI", - }, - }, - ], - }, - { language: "go", markers: ["go.mod"] }, - { language: "rust", markers: ["Cargo.toml"] }, - { language: "csharp", markers: ["*.csproj", "*.sln"] }, - { language: "java", markers: ["build.gradle", "pom.xml"] }, - { language: "ruby", markers: ["Gemfile"] }, - { language: "php", markers: ["composer.json"] }, -]; - -async function detectFrameworks( - cwd: string, - detectors: MarkerCheck["frameworkDetectors"], -): Promise { - if (!detectors) return []; - - const frameworks: string[] = []; - - for (const detector of detectors) { - const filePath = resolve(cwd, detector.file); - if (!existsSync(filePath)) continue; - - try { - const content = await readFile(filePath, "utf-8"); - - if (detector.key === "__raw__") { - // Plain text search (e.g., requirements.txt) - const lower = content.toLowerCase(); - for (const [pkg, name] of Object.entries(detector.frameworks)) { - if (lower.includes(pkg)) { - frameworks.push(name); - } - } - } else { - // JSON key search (e.g., package.json dependencies) - const json = JSON.parse(content) as Record>; - const section = json[detector.key]; - if (section && typeof section === "object") { - for (const [pkg, name] of Object.entries(detector.frameworks)) { - if (pkg in section) { - frameworks.push(name); - } - } - } - - // Also check devDependencies for package.json - if (detector.key === "dependencies") { - const devSection = json["devDependencies"]; - if (devSection && typeof devSection === "object") { - for (const [pkg, name] of Object.entries(detector.frameworks)) { - if (pkg in devSection && !frameworks.includes(name)) { - frameworks.push(name); - } - } - } - } - } - } catch { - // Ignore parse errors - } - } - - return frameworks; -} - -function hasGlobMatch(cwd: string, pattern: string): boolean { - // Simple glob: just check if any file matches *.ext - if (pattern.startsWith("*")) { - const ext = pattern.slice(1); // e.g., ".csproj" - try { - const entries = readdirSync(cwd); - return entries.some((e) => e.endsWith(ext)); - } catch { - return false; - } - } - return existsSync(resolve(cwd, pattern)); -} - -export async function detectLanguage(cwd: string = process.cwd()): Promise { - // Check for TypeScript specifically: must have tsconfig.json OR typescript in package.json - const hasTsConfig = existsSync(resolve(cwd, "tsconfig.json")); - if (!hasTsConfig) { - // Check if package.json has typescript as a dependency - const pkgPath = resolve(cwd, "package.json"); - if (existsSync(pkgPath)) { - try { - const pkg = JSON.parse(await readFile(pkgPath, "utf-8")) as Record< - string, - Record - >; - const deps = pkg["dependencies"] ?? {}; - const devDeps = pkg["devDependencies"] ?? {}; - if ("typescript" in deps || "typescript" in devDeps) { - const tsCheck = MARKER_CHECKS.find((c) => c.language === "typescript"); - const frameworks = await detectFrameworks(cwd, tsCheck?.frameworkDetectors); - return { primaryLanguage: "typescript", frameworks }; - } - } catch { - // Ignore - } - } - } - - for (const check of MARKER_CHECKS) { - // For JavaScript, skip if we already matched TypeScript - if (check.language === "javascript" && hasTsConfig) continue; - - const found = check.markers.some((marker) => hasGlobMatch(cwd, marker)); - if (found) { - const frameworks = await detectFrameworks(cwd, check.frameworkDetectors); - return { primaryLanguage: check.language, frameworks }; - } - } - - return { primaryLanguage: "unknown", frameworks: [] }; -} diff --git a/packages/plannable/src/index.ts b/packages/plannable/src/index.ts deleted file mode 100644 index df0d3ce..0000000 --- a/packages/plannable/src/index.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { registry } from "@premierstudio/ai-hooks/adapters"; - -// Import all adapters to register them -import "@premierstudio/ai-hooks/adapters/all"; - -const DEFAULT_SERVER = "https://plannable.ai"; - -const HELP = ` -plannable - Connect AI coding tools to Plannable - -USAGE: - plannable [command] [options] - -COMMANDS: - setup Interactive setup: login, detect tools, configure hooks (default) - remove Remove all Plannable configuration from this project - status Show current connection status - help Show this help message - -OPTIONS: - --server Plannable server URL (default: ${DEFAULT_SERVER}) - Also configurable via PLANNABLE_SERVER env var - -EXAMPLES: - npx @premierstudio/plannable # Setup with production server - npx @premierstudio/plannable --server https://your-instance.example.com - npx @premierstudio/plannable status # Check connection - npx @premierstudio/plannable remove # Clean removal -`; - -export { registry }; - -// Re-export preset (merged from @premierstudio/preset-plannable) -export { - plannablePreset, - enforceNoTodos, - enforceNoConsoleLog, - enforceTypeAnnotations, - signalFileActivity, - signalShellActivity, - signalToolUsage, - trackSessionStart, - trackSessionEnd, - createProtectedFilesHook, - createBranchNamingHook, -} from "./preset.js"; -export type { PlannablePresetOptions } from "./preset.js"; - -function resolveServerUrl(args: string[]): string { - const serverIdx = args.indexOf("--server"); - const serverValue = serverIdx !== -1 ? args[serverIdx + 1] : undefined; - if (serverValue) return serverValue; - return process.env.PLANNABLE_SERVER ?? DEFAULT_SERVER; -} - -function stripFlags(args: string[]): string[] { - const result: string[] = []; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - if (arg === "--server") { - i++; // skip value - } else if (arg) { - result.push(arg); - } - } - return result; -} - -export async function run(args: string[]): Promise { - const serverUrl = resolveServerUrl(args); - const cleanArgs = stripFlags(args); - const command = cleanArgs[0]; - - switch (command) { - case "setup": - case undefined: { - const { setupCommand } = await import("./commands/setup.js"); - await setupCommand(serverUrl); - break; - } - case "remove": { - const { removeCommand } = await import("./commands/remove.js"); - await removeCommand(); - break; - } - case "status": { - const { statusCommand } = await import("./commands/status.js"); - await statusCommand(); - break; - } - case "help": - case "--help": - case "-h": - console.log(HELP); - break; - default: - console.error(`Unknown command: ${command}`); - console.log(HELP); - process.exit(1); - } -} diff --git a/packages/plannable/src/preset.ts b/packages/plannable/src/preset.ts deleted file mode 100644 index 603f3a9..0000000 --- a/packages/plannable/src/preset.ts +++ /dev/null @@ -1,342 +0,0 @@ -/** - * Opinionated hooks preset for Plannable's autonomous PM-AI. - * When a developer installs this preset, Plannable can: - * - * 1. Assign and track work items via MCP - * 2. Enforce coding standards via before hooks - * 3. Validate work before it's marked complete - * 4. Coordinate across multiple developers' AI tools - * 5. Collect signals and metrics from development activity - * - * Usage in ai-hooks.config.ts: - * import { plannablePreset } from "@premierstudio/plannable"; - * - * export default defineConfig({ - * extends: [plannablePreset()], - * hooks: [ - * // Your additional hooks here - * ], - * }); - */ -import { hook, builtinHooks } from "@premierstudio/ai-hooks"; -import type { AiHooksConfig, HookDefinition } from "@premierstudio/ai-hooks"; - -export type PlannablePresetOptions = { - /** Plannable server URL for MCP communication. */ - serverUrl?: string; - /** Project ID in Plannable. */ - projectId?: string; - /** Whether to enforce coding standards. Default: true. */ - enforceStandards?: boolean; - /** Whether to collect activity signals. Default: true. */ - collectSignals?: boolean; - /** Whether to include built-in security hooks. Default: true. */ - includeSecurityHooks?: boolean; - /** File patterns to protect from modification. */ - protectedPatterns?: string[]; - /** Branch naming convention regex. */ - branchPattern?: RegExp; -}; - -/** - * Create a Plannable preset configuration. - */ -export function plannablePreset(options: PlannablePresetOptions = {}): AiHooksConfig { - const { - enforceStandards = true, - collectSignals = true, - includeSecurityHooks = true, - protectedPatterns = [], - branchPattern, - } = options; - - const hooks: HookDefinition[] = []; - - // Include built-in security hooks - if (includeSecurityHooks) { - hooks.push(...builtinHooks); - } - - // Standards enforcement hooks - if (enforceStandards) { - hooks.push(enforceNoTodos); - hooks.push(enforceNoConsoleLog); - hooks.push(enforceTypeAnnotations); - } - - // Signal collection hooks - if (collectSignals) { - hooks.push(signalFileActivity); - hooks.push(signalShellActivity); - hooks.push(signalToolUsage); - } - - // Protected file patterns - if (protectedPatterns.length > 0) { - hooks.push(createProtectedFilesHook(protectedPatterns)); - } - - // Branch naming convention - if (branchPattern) { - hooks.push(createBranchNamingHook(branchPattern)); - } - - // Work tracking hooks - hooks.push(trackSessionStart); - hooks.push(trackSessionEnd); - - return { hooks }; -} - -// ── Standards Enforcement ────────────────────────────────── - -const enforceNoTodos = hook("before", ["file:write", "file:edit"], async (ctx, next) => { - const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - - // Check for TODO/FIXME/HACK comments - if (/\/\/\s*(TODO|FIXME|HACK|XXX)\b/i.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Code contains TODO/FIXME comments. Resolve the issue or create a work item instead.", - }); - return; - } - - await next(); -}) - .id("plannable:no-todos") - .name("No TODO Comments") - .description("Enforces no TODO/FIXME/HACK comments - create work items instead") - .priority(20) - .build(); - -const enforceNoConsoleLog = hook("before", ["file:write", "file:edit"], async (ctx, next) => { - const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - - const path = ctx.event.path; - - // Skip test files and config files - if ( - path.includes(".test.") || - path.includes(".spec.") || - path.includes("__tests__") || - path.endsWith(".config.ts") || - path.endsWith(".config.js") - ) { - await next(); - return; - } - - if (/console\.(log|debug|info)\s*\(/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "Production code should not contain console.log/debug/info. Use a proper logger.", - }); - return; - } - - await next(); -}) - .id("plannable:no-console-log") - .name("No Console.log") - .description("Enforces proper logging instead of console.log in production code") - .priority(21) - .build(); - -const enforceTypeAnnotations = hook("before", ["file:write", "file:edit"], async (ctx, next) => { - const content = ctx.event.type === "file:write" ? ctx.event.content : ctx.event.newContent; - - const path = ctx.event.path; - - // Only check TypeScript files - if (!path.endsWith(".ts") && !path.endsWith(".tsx")) { - await next(); - return; - } - - // Check for `any` type usage - if (/:\s*any\b/.test(content) || /as\s+any\b/.test(content)) { - ctx.results.push({ - blocked: true, - reason: "TypeScript 'any' type is not allowed. Use proper type annotations.", - }); - return; - } - - await next(); -}) - .id("plannable:no-any") - .name("No Any Types") - .description("Enforces proper TypeScript types instead of 'any'") - .priority(22) - .build(); - -// ── Signal Collection ────────────────────────────────────── - -const signalFileActivity = hook("after", ["tool:after"], async (ctx, next) => { - ctx.results.push({ - data: { - signal: { - type: "file_activity", - tool: ctx.tool.name, - event: ctx.event.type, - toolName: ctx.event.toolName, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next(); -}) - .id("plannable:signal-file-activity") - .name("Signal: File Activity") - .description("Collects file modification signals for Plannable's risk radar") - .priority(900) - .build(); - -const signalShellActivity = hook("after", ["shell:after"], async (ctx, next) => { - ctx.results.push({ - data: { - signal: { - type: "shell_activity", - tool: ctx.tool.name, - command: ctx.event.command, - exitCode: ctx.event.exitCode, - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next(); -}) - .id("plannable:signal-shell-activity") - .name("Signal: Shell Activity") - .description("Collects shell command signals for velocity tracking") - .priority(901) - .build(); - -const signalToolUsage = hook("after", ["tool:after", "mcp:after"], async (ctx, next) => { - ctx.results.push({ - data: { - signal: { - type: "tool_usage", - tool: ctx.tool.name, - toolName: ctx.event.type === "tool:after" ? ctx.event.toolName : "mcp", - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next(); -}) - .id("plannable:signal-tool-usage") - .name("Signal: Tool Usage") - .description("Collects tool usage signals for Plannable's analytics") - .priority(902) - .build(); - -// ── Work Tracking ────────────────────────────────────────── - -const trackSessionStart = hook("after", ["session:start"], async (ctx, next) => { - ctx.results.push({ - data: { - plannable: { - event: "session_start", - tool: ctx.tool.name, - cwd: ctx.cwd, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next(); -}) - .id("plannable:track-session-start") - .name("Track Session Start") - .description("Notifies Plannable when a developer starts an AI coding session") - .priority(800) - .build(); - -const trackSessionEnd = hook("after", ["session:end"], async (ctx, next) => { - ctx.results.push({ - data: { - plannable: { - event: "session_end", - tool: ctx.tool.name, - duration: ctx.event.duration, - timestamp: ctx.event.timestamp, - }, - }, - }); - await next(); -}) - .id("plannable:track-session-end") - .name("Track Session End") - .description("Notifies Plannable when a developer ends an AI coding session") - .priority(801) - .build(); - -// ── Dynamic Hook Factories ───────────────────────────────── - -function createProtectedFilesHook(patterns: string[]): HookDefinition { - const regexes = patterns.map((p) => new RegExp(p)); - - return hook("before", ["file:write", "file:edit", "file:delete"], async (ctx, next) => { - const path = ctx.event.path; - const matched = regexes.find((r) => r.test(path)); - - if (matched) { - ctx.results.push({ - blocked: true, - reason: `File "${path}" is protected by Plannable policy. Pattern: ${matched.source}`, - }); - return; - } - - await next(); - }) - .id("plannable:protected-files") - .name("Protected Files") - .description("Prevents modification of files matching protected patterns") - .priority(5) - .build(); -} - -function createBranchNamingHook(pattern: RegExp): HookDefinition { - return hook("before", ["shell:before"], async (ctx, next) => { - const command = ctx.event.command; - - // Check git checkout -b and git branch commands - const branchMatch = command.match(/git\s+(?:checkout\s+-b|branch)\s+(\S+)/); - if (branchMatch) { - const branchName = branchMatch[1]; - if (branchName && !pattern.test(branchName)) { - ctx.results.push({ - blocked: true, - reason: `Branch name "${branchName}" doesn't match required pattern: ${pattern.source}`, - }); - return; - } - } - - await next(); - }) - .id("plannable:branch-naming") - .name("Branch Naming Convention") - .description("Enforces branch naming conventions") - .priority(15) - .build(); -} - -// ── Exports ──────────────────────────────────────────────── - -export { - enforceNoTodos, - enforceNoConsoleLog, - enforceTypeAnnotations, - signalFileActivity, - signalShellActivity, - signalToolUsage, - trackSessionStart, - trackSessionEnd, - createProtectedFilesHook, - createBranchNamingHook, -}; diff --git a/packages/plannable/src/ui/brand.ts b/packages/plannable/src/ui/brand.ts deleted file mode 100644 index 9d67498..0000000 --- a/packages/plannable/src/ui/brand.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as p from "@clack/prompts"; - -// Brand indigo palette — true color ANSI -const I = { - 100: "\x1b[38;2;224;231;255m", - 300: "\x1b[38;2;165;180;252m", - 400: "\x1b[38;2;129;140;248m", - 500: "\x1b[38;2;99;102;241m", - 600: "\x1b[38;2;79;70;229m", -} as const; - -const DIM = "\x1b[2m"; -const R = "\x1b[0m"; - -function shimmer(text: string): string { - const gradient = [I[600], I[500], I[400], I[300], I[100], I[300], I[400], I[500], I[600]]; - return ( - text - .split("") - .map((ch, i) => { - const ci = Math.round((i / Math.max(text.length - 1, 1)) * (gradient.length - 1)); - return `${gradient[ci] ?? ""}${ch}`; - }) - .join("") + R - ); -} - -export function showBanner(version: string): void { - p.intro(`${I[400]}◆${R} ${shimmer("plannable")} ${DIM}v${version}${R}`); -} diff --git a/packages/plannable/src/ui/prompts.ts b/packages/plannable/src/ui/prompts.ts deleted file mode 100644 index db982e2..0000000 --- a/packages/plannable/src/ui/prompts.ts +++ /dev/null @@ -1,124 +0,0 @@ -import * as p from "@clack/prompts"; - -function ensureNotCancelled(value: T | symbol): T { - if (p.isCancel(value)) { - p.cancel("Setup cancelled."); - process.exit(0); - } - return value; -} - -export async function askToolSelection( - detected: Array<{ id: string; name: string }>, - all: Array<{ id: string; name: string }>, -): Promise { - const detectedIds = new Set(detected.map((d) => d.id)); - const options = all.map((tool) => ({ - value: tool.id, - label: tool.name, - hint: detectedIds.has(tool.id) ? "detected" : undefined, - })); - - return ensureNotCancelled( - await p.multiselect({ - message: "Which AI tools should connect to Plannable?", - options, - initialValues: detected.map((d) => d.id), - required: true, - }), - ); -} - -export async function askLanguageConfirm(language: string): Promise { - const confirmed = ensureNotCancelled( - await p.confirm({ - message: `Detected project language: ${language}. Correct?`, - }), - ); - - if (confirmed) return language; - - return ensureNotCancelled( - await p.select({ - message: "Select your primary project language", - options: [ - { value: "typescript", label: "TypeScript" }, - { value: "javascript", label: "JavaScript" }, - { value: "python", label: "Python" }, - { value: "go", label: "Go" }, - { value: "rust", label: "Rust" }, - { value: "csharp", label: ".NET / C#" }, - { value: "java", label: "Java / Kotlin" }, - { value: "ruby", label: "Ruby" }, - { value: "php", label: "PHP" }, - ], - }), - ); -} - -export type FeatureToggles = { - guardrails: boolean; - signals: boolean; - protectedFiles: boolean; -}; - -type FeatureKey = "guardrails" | "signals" | "protectedFiles"; - -export async function askFeatureToggles(): Promise { - const features = ensureNotCancelled( - await p.multiselect({ - message: "Which features do you want to enable?", - options: [ - { - value: "guardrails", - label: "Code guardrails", - hint: "language-specific linting hooks", - }, - { - value: "signals", - label: "Activity signals", - hint: "file, shell, tool usage tracking", - }, - { - value: "protectedFiles", - label: "Protected files", - hint: "block modifications to critical files", - }, - ], - initialValues: ["guardrails", "signals"], - required: false, - }), - ); - - return { - guardrails: features.includes("guardrails"), - signals: features.includes("signals"), - protectedFiles: features.includes("protectedFiles"), - }; -} - -export type McpScopeChoice = "global" | "project"; - -export async function askMcpScope(): Promise { - return ensureNotCancelled( - await p.select({ - message: "Where should the Plannable MCP connection be installed?", - options: [ - { - value: "global", - label: "Global (all projects)", - hint: "writes to user-level config (~/.claude/, ~/.cursor/, etc.)", - }, - { - value: "project", - label: "This project only", - hint: "writes to project-level config (.claude/, .cursor/, etc.)", - }, - ], - }), - ); -} - -export async function askConfirm(message: string): Promise { - return ensureNotCancelled(await p.confirm({ message })); -} diff --git a/packages/rules/CLAUDE.md b/packages/rules/CLAUDE.md new file mode 100644 index 0000000..901cc4f --- /dev/null +++ b/packages/rules/CLAUDE.md @@ -0,0 +1,26 @@ +# packages/rules + +Project rules configuration. Distinguished by its scoping system and priority. + +## Key Types (`types/definition.ts`) + +`RuleDefinition`: `id`, `name`, `description?`, `content`, `scope`, `priority?`, `tags?`, `enabled?` + +`RuleScope` is the unique feature — four scoping modes: + +- `{ type: "always" }` — rule applies everywhere +- `{ type: "glob", patterns: string[] }` — rule applies to matching file paths +- `{ type: "manual" }` — user-invoked only +- `{ type: "agent", agentId: string }` — scoped to a specific agent + +## Naming Exception + +This package exports `defineRulesConfig()` — NOT `defineConfig()` like every other package. This is intentional (avoids ambiguity with the more generic name). + +## Markdown Format + +YAML frontmatter with `description`, `globs` (if glob scope), and `priority`. Claude Code adapter generates `.claude/rules/*.md`. + +## CLI Commands + +`detect`, `generate`, `install`, `import`, `sync`, `export`, `help` (no `init`). diff --git a/packages/rules/package.json b/packages/rules/package.json new file mode 100644 index 0000000..d46a820 --- /dev/null +++ b/packages/rules/package.json @@ -0,0 +1,52 @@ +{ + "name": "@premierstudio/ai-rules", + "version": "1.1.8", + "description": "Universal project rules configuration for AI coding tools", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/PremierStudio/ai-tools" + }, + "bin": { + "ai-rules": "dist/cli/bin.js" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + }, + "./adapters": { + "types": "./dist/adapters/index.d.ts", + "import": "./dist/adapters/index.js" + }, + "./adapters/all": { + "types": "./dist/adapters/all.d.ts", + "import": "./dist/adapters/all.js" + }, + "./cli": { + "types": "./dist/cli/index.d.ts", + "import": "./dist/cli/index.js" + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "clean": "rm -rf dist" + }, + "engines": { + "node": ">=22.0.0" + } +} diff --git a/packages/rules/src/adapters/all.ts b/packages/rules/src/adapters/all.ts new file mode 100644 index 0000000..982629d --- /dev/null +++ b/packages/rules/src/adapters/all.ts @@ -0,0 +1,13 @@ +import "./claude-code.js"; +import "./cursor.js"; +import "./cline.js"; +import "./codex.js"; +import "./gemini-cli.js"; +import "./amp.js"; +import "./kiro.js"; +import "./opencode.js"; +import "./droid.js"; +import "./copilot.js"; +import "./windsurf.js"; +import "./roo-code.js"; +import "./continue.js"; diff --git a/packages/rules/src/adapters/amp.test.ts b/packages/rules/src/adapters/amp.test.ts new file mode 100644 index 0000000..68c9f4c --- /dev/null +++ b/packages/rules/src/adapters/amp.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { AmpRuleAdapter } from "./amp.js"; + +describe("AmpRuleAdapter", () => { + let adapter: AmpRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new AmpRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("amp")); + it("has correct name", () => expect(adapter.name).toBe("Amp")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".amp/rules")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".amp/rules/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("uses process.cwd() when cwd is not provided", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + }); +}); diff --git a/packages/rules/src/adapters/amp.ts b/packages/rules/src/adapters/amp.ts new file mode 100644 index 0000000..b95a149 --- /dev/null +++ b/packages/rules/src/adapters/amp.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class AmpRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "amp"; + readonly name = "Amp"; + readonly nativeSupport = true; + readonly configDir = ".amp/rules"; + readonly command = "amp"; +} + +const adapter = new AmpRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/base.test.ts b/packages/rules/src/adapters/base.test.ts new file mode 100644 index 0000000..4e32fa5 --- /dev/null +++ b/packages/rules/src/adapters/base.test.ts @@ -0,0 +1,177 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { BaseRuleAdapter } from "./base.js"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; + +// Concrete subclass for testing the abstract BaseRuleAdapter +class TestRuleAdapter extends BaseRuleAdapter { + readonly id = "test-rules"; + readonly name = "Test Rules"; + readonly nativeSupport = true; + readonly configDir = ".test/rules"; + + async generate(rules: RuleDefinition[]): Promise { + return rules.map((r) => ({ + path: `.test/rules/${r.id}.md`, + content: `# ${r.name}\n\n${r.content}\n`, + format: "md" as const, + })); + } + + async import(_cwd?: string): Promise { + return []; + } +} + +// Mock node:fs and node:fs/promises +vi.mock("node:fs", () => ({ + existsSync: vi.fn(), +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: vi.fn(), + mkdir: vi.fn(), +})); + +// Import mocked modules so we can control them +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; + +const mockedExistsSync = vi.mocked(existsSync); +const mockedWriteFile = vi.mocked(writeFile); +const mockedMkdir = vi.mocked(mkdir); + +describe("BaseRuleAdapter", () => { + let adapter: TestRuleAdapter; + + beforeEach(() => { + adapter = new TestRuleAdapter(); + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("abstract properties", () => { + it("exposes id, name, nativeSupport, and configDir", () => { + expect(adapter.id).toBe("test-rules"); + expect(adapter.name).toBe("Test Rules"); + expect(adapter.nativeSupport).toBe(true); + expect(adapter.configDir).toBe(".test/rules"); + }); + }); + + describe("detect()", () => { + it("returns true when configDir exists", async () => { + mockedExistsSync.mockReturnValue(true); + const result = await adapter.detect(); + expect(result).toBe(true); + }); + + it("returns false when configDir does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + const result = await adapter.detect(); + expect(result).toBe(false); + }); + + it("uses provided cwd to resolve configDir", async () => { + mockedExistsSync.mockReturnValue(true); + await adapter.detect("/custom/dir"); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining("custom/dir")); + }); + + it("uses process.cwd() when no cwd is provided", async () => { + mockedExistsSync.mockReturnValue(false); + await adapter.detect(); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining(adapter.configDir)); + }); + }); + + describe("generate()", () => { + it("calls the subclass implementation with rules", async () => { + const rules: RuleDefinition[] = [ + { + id: "typescript", + name: "TypeScript Standards", + content: "Use strict TypeScript", + scope: { type: "always" }, + }, + ]; + const files = await adapter.generate(rules); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".test/rules/typescript.md"); + expect(files[0]?.content).toContain("TypeScript Standards"); + }); + + it("handles empty rules array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + }); + + describe("install()", () => { + it("writes all files to disk", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: ".rules/rule1.md", content: "# Rule 1", format: "md" }, + { path: ".rules/rule2.md", content: "# Rule 2", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("rule1.md"), + "# Rule 1", + "utf-8", + ); + }); + + it("creates parent directories recursively", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: "deep/nested/dir/rule.md", content: "test", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledWith(expect.stringContaining("deep/nested/dir"), { + recursive: true, + }); + }); + + it("handles empty file array", async () => { + await adapter.install([]); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); + + it("uses provided cwd to resolve file paths", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [{ path: "rule.md", content: "test", format: "md" }]; + + await adapter.install(files, "/custom/project"); + + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("/custom/project"), + "test", + "utf-8", + ); + }); + }); + + describe("uninstall()", () => { + it("is a no-op by default", async () => { + // Default implementation does nothing; subclasses override + await adapter.uninstall(); + // No error thrown means it worked + }); + }); +}); diff --git a/packages/rules/src/adapters/base.ts b/packages/rules/src/adapters/base.ts new file mode 100644 index 0000000..5ec7002 --- /dev/null +++ b/packages/rules/src/adapters/base.ts @@ -0,0 +1,45 @@ +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; +import { dirname, resolve } from "node:path"; + +export abstract class BaseRuleAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + + /** CLI binary name for detection (e.g., "claude", "cursor"). Override in subclass. */ + readonly command?: string; + + abstract generate(rules: RuleDefinition[]): Promise; + abstract import(cwd?: string): Promise; + + async detect(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const hasDir = existsSync(resolve(dir, this.configDir)); + if (hasDir) return true; + if (this.command) return this.commandExists(this.command); + return false; + } + + protected async commandExists(command: string): Promise { + const { exec } = await import("node:child_process"); + return new Promise((ok) => { + exec(`which ${command}`, (error: Error | null) => { + ok(!error); + }); + }); + } + + async install(files: GeneratedFile[], cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + for (const file of files) { + const fullPath = resolve(dir, file.path); + await mkdir(dirname(fullPath), { recursive: true }); + await writeFile(fullPath, file.content, "utf-8"); + } + } + + async uninstall(_cwd?: string): Promise {} +} diff --git a/packages/rules/src/adapters/claude-code.test.ts b/packages/rules/src/adapters/claude-code.test.ts new file mode 100644 index 0000000..1445c49 --- /dev/null +++ b/packages/rules/src/adapters/claude-code.test.ts @@ -0,0 +1,202 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +vi.mock("node:child_process", () => ({ + exec: vi.fn((_cmd: string, callback: (error: Error | null) => void) => { + callback(new Error("not found")); + }), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClaudeCodeRuleAdapter } from "./claude-code.js"; +import type { RuleDefinition } from "../types/index.js"; + +describe("ClaudeCodeRuleAdapter", () => { + let adapter: ClaudeCodeRuleAdapter; + + const testRule: RuleDefinition = { + id: "typescript", + name: "TypeScript Standards", + description: "TypeScript coding standards", + content: "Always use strict TypeScript.\nNo any types.", + scope: { type: "glob", patterns: ["*.ts", "*.tsx"] }, + }; + + const alwaysRule: RuleDefinition = { + id: "general", + name: "General", + description: "General rules", + content: "Be concise.", + scope: { type: "always" }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClaudeCodeRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("claude-code")); + it("has correct name", () => expect(adapter.name).toBe("Claude Code")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".claude/rules")); + it("has nativeSupport true", () => expect(adapter.nativeSupport).toBe(true)); + }); + + describe("generate", () => { + it("generates one file per rule", async () => { + const files = await adapter.generate([testRule]); + expect(files).toHaveLength(1); + expect(files[0].path).toBe(".claude/rules/typescript.md"); + expect(files[0].format).toBe("md"); + }); + + it("includes frontmatter with description and globs", async () => { + const files = await adapter.generate([testRule]); + expect(files[0].content).toContain("---"); + expect(files[0].content).toContain("description: TypeScript coding standards"); + expect(files[0].content).toContain('- "*.ts"'); + expect(files[0].content).toContain('- "*.tsx"'); + }); + + it("includes rule content after frontmatter", async () => { + const files = await adapter.generate([testRule]); + expect(files[0].content).toContain("Always use strict TypeScript."); + expect(files[0].content).toContain("No any types."); + }); + + it("handles always scope without globs", async () => { + const files = await adapter.generate([alwaysRule]); + expect(files[0].content).toContain("description: General rules"); + expect(files[0].content).not.toContain("globs:"); + }); + + it("generates rule without description", async () => { + const noDescRule: RuleDefinition = { + id: "nodesc", + name: "No Description", + content: "Content only.", + scope: { type: "always" }, + }; + const files = await adapter.generate([noDescRule]); + expect(files[0].content).not.toContain("description:"); + expect(files[0].content).toContain("Content only."); + }); + + it("generates rule with glob scope patterns", async () => { + const files = await adapter.generate([testRule]); + expect(files[0].content).toContain("globs:"); + expect(files[0].content).toContain('- "*.ts"'); + expect(files[0].content).toContain('- "*.tsx"'); + }); + + it("handles empty rules array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("generates multiple files for multiple rules", async () => { + const files = await adapter.generate([testRule, alwaysRule]); + expect(files).toHaveLength(2); + expect(files[0].path).toBe(".claude/rules/typescript.md"); + expect(files[1].path).toBe(".claude/rules/general.md"); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports rules with frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["ts.md"] as unknown); + vi.mocked(readFile).mockResolvedValue( + '---\ndescription: TS rules\nglobs:\n - "*.ts"\n---\n\nUse strict TS.', + ); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("ts"); + expect(result[0].description).toBe("TS rules"); + expect(result[0].scope).toEqual({ type: "glob", patterns: ["*.ts"] }); + expect(result[0].content).toBe("Use strict TS."); + }); + + it("imports rules without frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["simple.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just plain content."); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("simple"); + expect(result[0].content).toBe("Just plain content."); + expect(result[0].scope).toEqual({ type: "always" }); + }); + + it("uses process.cwd() when cwd is not provided", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + + it("imports rules with frontmatter but no globs", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["general.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("---\ndescription: General rules\n---\n\nBe concise."); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("general"); + expect(result[0].description).toBe("General rules"); + expect(result[0].scope).toEqual({ type: "always" }); + expect(result[0].content).toBe("Be concise."); + }); + + it("skips non-md files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Content"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("rule"); + }); + }); + + describe("detect", () => { + it("returns true when configDir exists", async () => { + vi.mocked(existsSync).mockReturnValue(true); + expect(await adapter.detect("/test")).toBe(true); + }); + + it("returns false when configDir missing and command not found", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.detect("/test")).toBe(false); + }); + + it("returns true when configDir missing but command exists", async () => { + const { exec } = await import("node:child_process"); + vi.mocked(exec).mockImplementation((_cmd: unknown, callback: unknown) => { + (callback as (error: Error | null) => void)(null); + return undefined as never; + }); + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.detect("/test")).toBe(true); + }); + }); +}); diff --git a/packages/rules/src/adapters/claude-code.ts b/packages/rules/src/adapters/claude-code.ts new file mode 100644 index 0000000..d0ffde2 --- /dev/null +++ b/packages/rules/src/adapters/claude-code.ts @@ -0,0 +1,94 @@ +import { BaseRuleAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +export class ClaudeCodeRuleAdapter extends BaseRuleAdapter { + readonly id = "claude-code"; + readonly name = "Claude Code"; + readonly nativeSupport = true; + readonly configDir = ".claude/rules"; + readonly command = "claude"; + + async generate(rules: RuleDefinition[]): Promise { + return rules.map((rule) => ({ + path: `${this.configDir}/${rule.id}.md`, + content: this.formatRule(rule), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const rulesDir = resolve(dir, this.configDir); + if (!existsSync(rulesDir)) return []; + + const files = await readdir(rulesDir); + const rules: RuleDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(rulesDir, file), "utf-8"); + const id = basename(file, ".md"); + rules.push(this.parseRule(id, content)); + } + + return rules; + } + + private formatRule(rule: RuleDefinition): string { + let md = "---\n"; + if (rule.description) md += `description: ${rule.description}\n`; + if (rule.scope.type === "glob" && rule.scope.patterns.length > 0) { + md += "globs:\n"; + for (const pattern of rule.scope.patterns) { + md += ` - "${pattern}"\n`; + } + } + md += "---\n\n"; + md += rule.content + "\n"; + return md; + } + + private parseRule(id: string, raw: string): RuleDefinition { + const rule: RuleDefinition = { id, name: id, content: "", scope: { type: "always" } }; + + if (raw.startsWith("---")) { + const endIdx = raw.indexOf("---", 3); + if (endIdx !== -1) { + const fm = raw.slice(3, endIdx).trim(); + const body = raw.slice(endIdx + 3).trim(); + + const globs: string[] = []; + const lines = fm.split("\n"); + for (const line of lines) { + if (line.startsWith("description:")) { + rule.description = line.slice(12).trim(); + } else if (line.trim().startsWith("- ")) { + const pattern = line + .trim() + .slice(2) + .replace(/^["']|["']$/g, ""); + globs.push(pattern); + } + } + + if (globs.length > 0) { + rule.scope = { type: "glob", patterns: globs }; + } + + rule.content = body; + } + } else { + rule.content = raw.trim(); + } + + return rule; + } +} + +const adapter = new ClaudeCodeRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/cline.test.ts b/packages/rules/src/adapters/cline.test.ts new file mode 100644 index 0000000..f9e7f7a --- /dev/null +++ b/packages/rules/src/adapters/cline.test.ts @@ -0,0 +1,107 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClineRuleAdapter } from "./cline.js"; +import type { RuleDefinition } from "../types/index.js"; + +describe("ClineRuleAdapter", () => { + let adapter: ClineRuleAdapter; + + const testRule: RuleDefinition = { + id: "typescript", + name: "TypeScript Standards", + content: "Always use strict TypeScript.", + scope: { type: "always" }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClineRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cline")); + it("has correct name", () => expect(adapter.name).toBe("Cline")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".clinerules")); + }); + + describe("generate", () => { + it("generates one file per rule", async () => { + const files = await adapter.generate([testRule]); + expect(files).toHaveLength(1); + expect(files[0].path).toBe(".clinerules/typescript.md"); + expect(files[0].format).toBe("md"); + }); + + it("uses simple markdown format with heading", async () => { + const files = await adapter.generate([testRule]); + expect(files[0].content).toBe("# TypeScript Standards\n\nAlways use strict TypeScript.\n"); + }); + + it("handles empty rules array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports rules with heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["ts.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# TypeScript Rules\n\nUse strict TS."); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("ts"); + expect(result[0].name).toBe("TypeScript Rules"); + expect(result[0].content).toBe("Use strict TS."); + }); + + it("imports rules without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content."); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("plain"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content."); + }); + + it("skips non-md files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Content"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/cline.ts b/packages/rules/src/adapters/cline.ts new file mode 100644 index 0000000..b99e04d --- /dev/null +++ b/packages/rules/src/adapters/cline.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class ClineRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "cline"; + readonly name = "Cline"; + readonly nativeSupport = true; + readonly configDir = ".clinerules"; + readonly command = "cline"; +} + +const adapter = new ClineRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/codex.test.ts b/packages/rules/src/adapters/codex.test.ts new file mode 100644 index 0000000..041e81c --- /dev/null +++ b/packages/rules/src/adapters/codex.test.ts @@ -0,0 +1,83 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CodexRuleAdapter } from "./codex.js"; + +describe("CodexRuleAdapter", () => { + let adapter: CodexRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CodexRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("codex")); + it("has correct name", () => expect(adapter.name).toBe("Codex")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".codex/instructions")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".codex/instructions/test.md"); + expect(files[0].content).toBe("# Test\n\nContent\n"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + expect(result[0].content).toBe("Content here."); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/codex.ts b/packages/rules/src/adapters/codex.ts new file mode 100644 index 0000000..afc48fc --- /dev/null +++ b/packages/rules/src/adapters/codex.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class CodexRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "codex"; + readonly name = "Codex"; + readonly nativeSupport = true; + readonly configDir = ".codex/instructions"; + readonly command = "codex"; +} + +const adapter = new CodexRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/continue.test.ts b/packages/rules/src/adapters/continue.test.ts new file mode 100644 index 0000000..3e9147b --- /dev/null +++ b/packages/rules/src/adapters/continue.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ContinueRuleAdapter } from "./continue.js"; + +describe("ContinueRuleAdapter", () => { + let adapter: ContinueRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ContinueRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("continue")); + it("has correct name", () => expect(adapter.name).toBe("Continue")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".continue/rules")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".continue/rules/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/continue.ts b/packages/rules/src/adapters/continue.ts new file mode 100644 index 0000000..f932fe4 --- /dev/null +++ b/packages/rules/src/adapters/continue.ts @@ -0,0 +1,13 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class ContinueRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "continue"; + readonly name = "Continue"; + readonly nativeSupport = true; + readonly configDir = ".continue/rules"; +} + +const adapter = new ContinueRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/copilot.test.ts b/packages/rules/src/adapters/copilot.test.ts new file mode 100644 index 0000000..ae64e42 --- /dev/null +++ b/packages/rules/src/adapters/copilot.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CopilotRuleAdapter } from "./copilot.js"; + +describe("CopilotRuleAdapter", () => { + let adapter: CopilotRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CopilotRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("copilot")); + it("has correct name", () => expect(adapter.name).toBe("Copilot")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".github/instructions")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".github/instructions/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/copilot.ts b/packages/rules/src/adapters/copilot.ts new file mode 100644 index 0000000..e73a036 --- /dev/null +++ b/packages/rules/src/adapters/copilot.ts @@ -0,0 +1,13 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class CopilotRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "copilot"; + readonly name = "Copilot"; + readonly nativeSupport = true; + readonly configDir = ".github/instructions"; +} + +const adapter = new CopilotRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/cursor.test.ts b/packages/rules/src/adapters/cursor.test.ts new file mode 100644 index 0000000..aafe554 --- /dev/null +++ b/packages/rules/src/adapters/cursor.test.ts @@ -0,0 +1,192 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CursorRuleAdapter } from "./cursor.js"; +import type { RuleDefinition } from "../types/index.js"; + +describe("CursorRuleAdapter", () => { + let adapter: CursorRuleAdapter; + + const testRule: RuleDefinition = { + id: "typescript", + name: "TypeScript Standards", + description: "TypeScript coding standards", + content: "Always use strict TypeScript.", + scope: { type: "glob", patterns: ["*.ts", "*.tsx"] }, + }; + + const alwaysRule: RuleDefinition = { + id: "general", + name: "General", + description: "General rules", + content: "Be concise.", + scope: { type: "always" }, + }; + + const manualRule: RuleDefinition = { + id: "manual", + name: "Manual Rule", + description: "A manual rule", + content: "Invoke when needed.", + scope: { type: "manual" }, + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CursorRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cursor")); + it("has correct name", () => expect(adapter.name).toBe("Cursor")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".cursor/rules")); + it("has nativeSupport true", () => expect(adapter.nativeSupport).toBe(true)); + }); + + describe("generate", () => { + it("uses subdirectory pattern with RULE.md", async () => { + const files = await adapter.generate([testRule]); + expect(files).toHaveLength(1); + expect(files[0].path).toBe(".cursor/rules/typescript/RULE.md"); + expect(files[0].format).toBe("md"); + }); + + it("includes frontmatter with alwaysApply false for glob scope", async () => { + const files = await adapter.generate([testRule]); + expect(files[0].content).toContain("alwaysApply: false"); + expect(files[0].content).toContain('- "*.ts"'); + }); + + it("includes alwaysApply true for always scope", async () => { + const files = await adapter.generate([alwaysRule]); + expect(files[0].content).toContain("alwaysApply: true"); + }); + + it("includes alwaysApply false for manual scope", async () => { + const files = await adapter.generate([manualRule]); + expect(files[0].content).toContain("alwaysApply: false"); + expect(files[0].content).not.toContain("globs:"); + }); + + it("generates rule without description", async () => { + const noDescRule: RuleDefinition = { + id: "nodesc", + name: "No Description", + content: "Content only.", + scope: { type: "always" }, + }; + const files = await adapter.generate([noDescRule]); + expect(files[0].content).not.toContain("description:"); + expect(files[0].content).toContain("alwaysApply: true"); + expect(files[0].content).toContain("Content only."); + }); + + it("handles empty rules array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("generates multiple files for multiple rules", async () => { + const files = await adapter.generate([testRule, alwaysRule]); + expect(files).toHaveLength(2); + expect(files[0].path).toBe(".cursor/rules/typescript/RULE.md"); + expect(files[1].path).toBe(".cursor/rules/general/RULE.md"); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports rules from subdirectories", async () => { + vi.mocked(existsSync).mockImplementation((p) => { + const path = String(p); + return path.includes(".cursor/rules") || path.includes("RULE.md"); + }); + vi.mocked(readdir).mockResolvedValue(["typescript"] as unknown); + vi.mocked(readFile).mockResolvedValue( + '---\ndescription: TS rules\nalwaysApply: false\nglobs:\n - "*.ts"\n---\n\nUse strict TS.', + ); + + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("typescript"); + expect(result[0].description).toBe("TS rules"); + expect(result[0].scope).toEqual({ type: "glob", patterns: ["*.ts"] }); + }); + + it("parses alwaysApply true as always scope", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["general"] as unknown); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: General\nalwaysApply: true\n---\n\nBe concise.", + ); + + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].scope).toEqual({ type: "always" }); + }); + + it("parses alwaysApply false without globs as manual scope", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["manual"] as unknown); + vi.mocked(readFile).mockResolvedValue( + "---\ndescription: Manual\nalwaysApply: false\n---\n\nManual rule.", + ); + + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].scope).toEqual({ type: "manual" }); + }); + + it("uses process.cwd() when cwd is not provided", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + + it("imports rule without frontmatter", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just plain content."); + + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0].id).toBe("plain"); + expect(result[0].content).toBe(""); + expect(result[0].scope).toEqual({ type: "always" }); + }); + + it("skips entries without RULE.md", async () => { + vi.mocked(existsSync).mockImplementation((p) => { + const path = String(p); + if (path.includes("RULE.md")) return false; + return path.includes(".cursor/rules"); + }); + vi.mocked(readdir).mockResolvedValue(["norule"] as unknown); + + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/cursor.ts b/packages/rules/src/adapters/cursor.ts new file mode 100644 index 0000000..e64ab43 --- /dev/null +++ b/packages/rules/src/adapters/cursor.ts @@ -0,0 +1,101 @@ +import { BaseRuleAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve } from "node:path"; + +export class CursorRuleAdapter extends BaseRuleAdapter { + readonly id = "cursor"; + readonly name = "Cursor"; + readonly nativeSupport = true; + readonly configDir = ".cursor/rules"; + readonly command = "cursor"; + + async generate(rules: RuleDefinition[]): Promise { + return rules.map((rule) => ({ + path: `${this.configDir}/${rule.id}/RULE.md`, + content: this.formatRule(rule), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const rulesDir = resolve(dir, this.configDir); + if (!existsSync(rulesDir)) return []; + + const entries = await readdir(rulesDir); + const rules: RuleDefinition[] = []; + + for (const entry of entries) { + const rulePath = resolve(rulesDir, entry, "RULE.md"); + if (!existsSync(rulePath)) continue; + const content = await readFile(rulePath, "utf-8"); + rules.push(this.parseRule(entry, content)); + } + + return rules; + } + + private formatRule(rule: RuleDefinition): string { + let md = "---\n"; + if (rule.description) md += `description: ${rule.description}\n`; + md += `alwaysApply: ${rule.scope.type === "always"}\n`; + if (rule.scope.type === "glob" && rule.scope.patterns.length > 0) { + md += "globs:\n"; + for (const pattern of rule.scope.patterns) { + md += ` - "${pattern}"\n`; + } + } + md += "---\n\n"; + md += rule.content + "\n"; + return md; + } + + private parseRule(id: string, raw: string): RuleDefinition { + const rule: RuleDefinition = { id, name: id, content: "", scope: { type: "always" } }; + + if (raw.startsWith("---")) { + const endIdx = raw.indexOf("---", 3); + if (endIdx !== -1) { + const fm = raw.slice(3, endIdx).trim(); + const body = raw.slice(endIdx + 3).trim(); + + const globs: string[] = []; + let alwaysApply = false; + + const lines = fm.split("\n"); + for (const line of lines) { + if (line.startsWith("description:")) { + rule.description = line.slice(12).trim(); + } else if (line.startsWith("alwaysApply:")) { + alwaysApply = line.slice(12).trim() === "true"; + } else if (line.trim().startsWith("- ")) { + const pattern = line + .trim() + .slice(2) + .replace(/^["']|["']$/g, ""); + globs.push(pattern); + } + } + + if (globs.length > 0) { + rule.scope = { type: "glob", patterns: globs }; + } else if (alwaysApply) { + rule.scope = { type: "always" }; + } else { + rule.scope = { type: "manual" }; + } + + rule.content = body; + } + } + + return rule; + } +} + +const adapter = new CursorRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/droid.test.ts b/packages/rules/src/adapters/droid.test.ts new file mode 100644 index 0000000..051112c --- /dev/null +++ b/packages/rules/src/adapters/droid.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { DroidRuleAdapter } from "./droid.js"; + +describe("DroidRuleAdapter", () => { + let adapter: DroidRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new DroidRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("droid")); + it("has correct name", () => expect(adapter.name).toBe("Droid")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".factory/instructions")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".factory/instructions/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/droid.ts b/packages/rules/src/adapters/droid.ts new file mode 100644 index 0000000..fa3e503 --- /dev/null +++ b/packages/rules/src/adapters/droid.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class DroidRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "droid"; + readonly name = "Droid"; + readonly nativeSupport = true; + readonly configDir = ".factory/instructions"; + readonly command = "droid"; +} + +const adapter = new DroidRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/gemini-cli.test.ts b/packages/rules/src/adapters/gemini-cli.test.ts new file mode 100644 index 0000000..81dc64c --- /dev/null +++ b/packages/rules/src/adapters/gemini-cli.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { GeminiCliRuleAdapter } from "./gemini-cli.js"; + +describe("GeminiCliRuleAdapter", () => { + let adapter: GeminiCliRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new GeminiCliRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("gemini-cli")); + it("has correct name", () => expect(adapter.name).toBe("Gemini CLI")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".gemini/rules")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".gemini/rules/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/gemini-cli.ts b/packages/rules/src/adapters/gemini-cli.ts new file mode 100644 index 0000000..ebeb85e --- /dev/null +++ b/packages/rules/src/adapters/gemini-cli.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class GeminiCliRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "gemini-cli"; + readonly name = "Gemini CLI"; + readonly nativeSupport = true; + readonly configDir = ".gemini/rules"; + readonly command = "gemini"; +} + +const adapter = new GeminiCliRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/index.ts b/packages/rules/src/adapters/index.ts new file mode 100644 index 0000000..34d44f4 --- /dev/null +++ b/packages/rules/src/adapters/index.ts @@ -0,0 +1,2 @@ +export { BaseRuleAdapter } from "./base.js"; +export { registry } from "./registry.js"; diff --git a/packages/rules/src/adapters/kiro.test.ts b/packages/rules/src/adapters/kiro.test.ts new file mode 100644 index 0000000..0ab759e --- /dev/null +++ b/packages/rules/src/adapters/kiro.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { KiroRuleAdapter } from "./kiro.js"; + +describe("KiroRuleAdapter", () => { + let adapter: KiroRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new KiroRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("kiro")); + it("has correct name", () => expect(adapter.name).toBe("Kiro")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".kiro/steering")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".kiro/steering/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/kiro.ts b/packages/rules/src/adapters/kiro.ts new file mode 100644 index 0000000..db97cca --- /dev/null +++ b/packages/rules/src/adapters/kiro.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class KiroRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "kiro"; + readonly name = "Kiro"; + readonly nativeSupport = true; + readonly configDir = ".kiro/steering"; + readonly command = "kiro"; +} + +const adapter = new KiroRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/opencode.test.ts b/packages/rules/src/adapters/opencode.test.ts new file mode 100644 index 0000000..4f77994 --- /dev/null +++ b/packages/rules/src/adapters/opencode.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { OpenCodeRuleAdapter } from "./opencode.js"; + +describe("OpenCodeRuleAdapter", () => { + let adapter: OpenCodeRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new OpenCodeRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("opencode")); + it("has correct name", () => expect(adapter.name).toBe("OpenCode")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".opencode/instructions")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".opencode/instructions/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/opencode.ts b/packages/rules/src/adapters/opencode.ts new file mode 100644 index 0000000..2ad826a --- /dev/null +++ b/packages/rules/src/adapters/opencode.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class OpenCodeRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "opencode"; + readonly name = "OpenCode"; + readonly nativeSupport = true; + readonly configDir = ".opencode/instructions"; + readonly command = "opencode"; +} + +const adapter = new OpenCodeRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/registry.test.ts b/packages/rules/src/adapters/registry.test.ts new file mode 100644 index 0000000..7ce2d19 --- /dev/null +++ b/packages/rules/src/adapters/registry.test.ts @@ -0,0 +1,154 @@ +import { describe, it, expect, beforeEach, vi } from "vitest"; +import { registry } from "./registry.js"; +import type { BaseRuleAdapter } from "./base.js"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; + +function makeFakeAdapter(id: string, detects: boolean = true): BaseRuleAdapter { + return { + id, + name: `${id} Adapter`, + nativeSupport: true, + configDir: `.${id}/rules`, + detect: async () => detects, + generate: async (_rules: RuleDefinition[]) => [] as GeneratedFile[], + import: async () => [] as RuleDefinition[], + install: async () => {}, + uninstall: async () => {}, + } as unknown as BaseRuleAdapter; +} + +describe("Real RuleAdapterRegistry singleton", () => { + beforeEach(() => { + registry.clear(); + }); + + describe("register / get", () => { + it("registers and retrieves an adapter by id", () => { + const adapter = makeFakeAdapter("claude-code"); + registry.register(adapter); + expect(registry.get("claude-code")).toBe(adapter); + }); + + it("returns undefined for unknown adapter id", () => { + expect(registry.get("nonexistent")).toBeUndefined(); + }); + + it("overwrites a previously registered adapter with the same id", () => { + const first = makeFakeAdapter("dupe"); + const second = makeFakeAdapter("dupe"); + registry.register(first); + registry.register(second); + expect(registry.get("dupe")).toBe(second); + }); + }); + + describe("list", () => { + it("returns empty array when nothing registered", () => { + expect(registry.list()).toEqual([]); + }); + + it("lists all registered adapter IDs", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + expect(registry.list().toSorted()).toEqual(["a", "b"]); + }); + }); + + describe("getAll", () => { + it("returns empty array when nothing registered", () => { + expect(registry.getAll()).toEqual([]); + }); + + it("returns all registered adapters", () => { + const a = makeFakeAdapter("a"); + const b = makeFakeAdapter("b"); + registry.register(a); + registry.register(b); + const all = registry.getAll(); + expect(all).toHaveLength(2); + expect(all).toContain(a); + expect(all).toContain(b); + }); + }); + + describe("detectAll", () => { + it("returns adapters that detect successfully", async () => { + registry.register(makeFakeAdapter("found", true)); + registry.register(makeFakeAdapter("missing", false)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("found"); + }); + + it("skips adapters that throw during detection", async () => { + const throwingAdapter = makeFakeAdapter("broken"); + throwingAdapter.detect = async () => { + throw new Error("detection crashed"); + }; + registry.register(throwingAdapter); + registry.register(makeFakeAdapter("stable", true)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("stable"); + }); + + it("returns empty array when no adapters detect", async () => { + registry.register(makeFakeAdapter("a", false)); + registry.register(makeFakeAdapter("b", false)); + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + }); + + it("passes cwd to adapter.detect", async () => { + const detectFn = vi.fn().mockResolvedValue(true); + const adapter = makeFakeAdapter("with-cwd"); + adapter.detect = detectFn; + registry.register(adapter); + + await registry.detectAll("/custom/dir"); + expect(detectFn).toHaveBeenCalledWith("/custom/dir"); + }); + + it("handles multiple throwing adapters gracefully", async () => { + const spy = vi.fn(); + for (let i = 0; i < 3; i++) { + const adapter = makeFakeAdapter(`throw-${i}`); + adapter.detect = async () => { + spy(); + throw new Error(`fail-${i}`); + }; + registry.register(adapter); + } + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + expect(spy).toHaveBeenCalledTimes(3); + }); + }); + + describe("clear", () => { + it("removes all adapters", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + + registry.clear(); + + expect(registry.list()).toEqual([]); + expect(registry.get("a")).toBeUndefined(); + expect(registry.get("b")).toBeUndefined(); + }); + + it("allows re-registration after clear", () => { + registry.register(makeFakeAdapter("x")); + registry.clear(); + registry.register(makeFakeAdapter("y")); + + expect(registry.list()).toEqual(["y"]); + expect(registry.get("x")).toBeUndefined(); + expect(registry.get("y")?.id).toBe("y"); + }); + }); +}); diff --git a/packages/rules/src/adapters/registry.ts b/packages/rules/src/adapters/registry.ts new file mode 100644 index 0000000..ab4a8f2 --- /dev/null +++ b/packages/rules/src/adapters/registry.ts @@ -0,0 +1,47 @@ +import type { BaseRuleAdapter } from "./base.js"; + +class RuleAdapterRegistry { + private adapters: Map = new Map(); + + register(adapter: BaseRuleAdapter): void { + this.adapters.set(adapter.id, adapter); + } + + get(id: string): BaseRuleAdapter | undefined { + return this.adapters.get(id); + } + + list(): string[] { + return [...this.adapters.keys()]; + } + + getAll(): BaseRuleAdapter[] { + return [...this.adapters.values()]; + } + + async detectAll(cwd?: string): Promise { + const detected: BaseRuleAdapter[] = []; + for (const adapter of this.adapters.values()) { + try { + const found = await adapter.detect(cwd); + if (found) { + detected.push(adapter); + } + } catch { + // Detection failed, skip + } + } + return detected; + } + + clear(): void { + this.adapters.clear(); + } +} + +declare global { + // eslint-disable-next-line no-var + var __premierstudio_rules_registry: RuleAdapterRegistry | undefined; +} + +export const registry = (globalThis.__premierstudio_rules_registry ??= new RuleAdapterRegistry()); diff --git a/packages/rules/src/adapters/roo-code.test.ts b/packages/rules/src/adapters/roo-code.test.ts new file mode 100644 index 0000000..af42d33 --- /dev/null +++ b/packages/rules/src/adapters/roo-code.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { RooCodeRuleAdapter } from "./roo-code.js"; + +describe("RooCodeRuleAdapter", () => { + let adapter: RooCodeRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new RooCodeRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("roo-code")); + it("has correct name", () => expect(adapter.name).toBe("Roo Code")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".roo/rules")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".roo/rules/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/roo-code.ts b/packages/rules/src/adapters/roo-code.ts new file mode 100644 index 0000000..ca327a0 --- /dev/null +++ b/packages/rules/src/adapters/roo-code.ts @@ -0,0 +1,13 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class RooCodeRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "roo-code"; + readonly name = "Roo Code"; + readonly nativeSupport = true; + readonly configDir = ".roo/rules"; +} + +const adapter = new RooCodeRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/adapters/simple-adapter.ts b/packages/rules/src/adapters/simple-adapter.ts new file mode 100644 index 0000000..3024e06 --- /dev/null +++ b/packages/rules/src/adapters/simple-adapter.ts @@ -0,0 +1,48 @@ +import { BaseRuleAdapter } from "./base.js"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +export abstract class SimpleMarkdownRuleAdapter extends BaseRuleAdapter { + async generate(rules: RuleDefinition[]): Promise { + return rules.map((rule) => ({ + path: `${this.configDir}/${rule.id}.md`, + content: `# ${rule.name}\n\n${rule.content}\n`, + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const rulesDir = resolve(dir, this.configDir); + if (!existsSync(rulesDir)) return []; + + const files = await readdir(rulesDir); + const rules: RuleDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(rulesDir, file), "utf-8"); + const id = basename(file, ".md"); + rules.push(this.parseSimpleRule(id, content)); + } + + return rules; + } + + private parseSimpleRule(id: string, raw: string): RuleDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content, scope: { type: "always" } }; + } +} diff --git a/packages/rules/src/adapters/windsurf.test.ts b/packages/rules/src/adapters/windsurf.test.ts new file mode 100644 index 0000000..7879fcf --- /dev/null +++ b/packages/rules/src/adapters/windsurf.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ registry: { register: vi.fn() } })); +vi.mock("node:fs", () => ({ existsSync: vi.fn(() => false) })); +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { WindsurfRuleAdapter } from "./windsurf.js"; + +describe("WindsurfRuleAdapter", () => { + let adapter: WindsurfRuleAdapter; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new WindsurfRuleAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("windsurf")); + it("has correct name", () => expect(adapter.name).toBe("Windsurf")); + it("has correct configDir", () => expect(adapter.configDir).toBe(".windsurf/rules")); + }); + + describe("generate", () => { + it("generates files at correct path", async () => { + const files = await adapter.generate([ + { id: "test", name: "Test", content: "Content", scope: { type: "always" } }, + ]); + expect(files[0].path).toBe(".windsurf/rules/test.md"); + }); + + it("handles empty rules", async () => { + expect(await adapter.generate([])).toHaveLength(0); + }); + }); + + describe("import", () => { + it("returns empty when dir missing", async () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(await adapter.import("/test")).toEqual([]); + }); + + it("imports rules", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Rule\n\nContent here."); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("My Rule"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "rule.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports rule without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0].name).toBe("plain"); + expect(result[0].content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/rules/src/adapters/windsurf.ts b/packages/rules/src/adapters/windsurf.ts new file mode 100644 index 0000000..52b99d2 --- /dev/null +++ b/packages/rules/src/adapters/windsurf.ts @@ -0,0 +1,14 @@ +import { SimpleMarkdownRuleAdapter } from "./simple-adapter.js"; +import { registry } from "./registry.js"; + +export class WindsurfRuleAdapter extends SimpleMarkdownRuleAdapter { + readonly id = "windsurf"; + readonly name = "Windsurf"; + readonly nativeSupport = true; + readonly configDir = ".windsurf/rules"; + readonly command = "windsurf"; +} + +const adapter = new WindsurfRuleAdapter(); +registry.register(adapter); +export default adapter; diff --git a/packages/rules/src/cli/bin.ts b/packages/rules/src/cli/bin.ts new file mode 100644 index 0000000..3adf2a9 --- /dev/null +++ b/packages/rules/src/cli/bin.ts @@ -0,0 +1,6 @@ +import { run } from "./index.js"; + +run(process.argv.slice(2)).catch((err) => { + console.error(err.message); + process.exit(1); +}); diff --git a/packages/rules/src/cli/index.test.ts b/packages/rules/src/cli/index.test.ts new file mode 100644 index 0000000..d638613 --- /dev/null +++ b/packages/rules/src/cli/index.test.ts @@ -0,0 +1,668 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import type { RuleDefinition, GeneratedFile } from "../types/index.js"; +import type { BaseRuleAdapter } from "../adapters/base.js"; + +const { mockRegistryDetectAll, mockRegistryList, mockRegistryGet, mockWriteFile, mockMkdir } = + vi.hoisted(() => ({ + mockRegistryDetectAll: vi.fn(), + mockRegistryList: vi.fn(), + mockRegistryGet: vi.fn(), + mockWriteFile: vi.fn(), + mockMkdir: vi.fn(), + })); + +vi.mock("../adapters/all.js", () => ({})); + +vi.mock("../adapters/registry.js", () => ({ + registry: { + detectAll: (...args: unknown[]) => mockRegistryDetectAll(...args), + list: () => mockRegistryList(), + get: (id: string) => mockRegistryGet(id), + register: vi.fn(), + }, +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: (...args: unknown[]) => mockWriteFile(...args), + mkdir: (...args: unknown[]) => mockMkdir(...args), +})); + +import { run } from "./index.js"; + +function makeAdapter(overrides: Partial = {}): BaseRuleAdapter { + return { + id: overrides.id ?? "test-tool", + name: overrides.name ?? "Test Tool", + nativeSupport: true, + configDir: overrides.configDir ?? ".test/rules", + detect: overrides.detect ?? vi.fn<() => Promise>().mockResolvedValue(true), + generate: + overrides.generate ?? + vi + .fn<(rules: RuleDefinition[]) => Promise>() + .mockResolvedValue([ + { path: ".test/rules/test.md", content: "# Test\n\nContent\n", format: "md" }, + ]), + import: overrides.import ?? vi.fn<() => Promise>().mockResolvedValue([]), + install: + overrides.install ?? + vi.fn<(files: GeneratedFile[]) => Promise>().mockResolvedValue(undefined), + uninstall: overrides.uninstall ?? vi.fn<() => Promise>().mockResolvedValue(undefined), + } as BaseRuleAdapter; +} + +let logOutput: string[]; +let errorOutput: string[]; +let warnOutput: string[]; +let exitCode: number | undefined; + +const originalLog = console.log; +const originalError = console.error; +const originalWarn = console.warn; +const originalExit = process.exit; + +beforeEach(() => { + logOutput = []; + errorOutput = []; + warnOutput = []; + exitCode = undefined; + + console.log = vi.fn((...args: unknown[]) => { + logOutput.push(args.map(String).join(" ")); + }); + console.error = vi.fn((...args: unknown[]) => { + errorOutput.push(args.map(String).join(" ")); + }); + console.warn = vi.fn((...args: unknown[]) => { + warnOutput.push(args.map(String).join(" ")); + }); + process.exit = vi.fn((code?: number) => { + exitCode = code ?? 0; + throw new Error(`process.exit(${code})`); + }) as never; + + vi.clearAllMocks(); + mockWriteFile.mockResolvedValue(undefined); + mockMkdir.mockResolvedValue(undefined); +}); + +afterEach(() => { + console.log = originalLog; + console.error = originalError; + console.warn = originalWarn; + process.exit = originalExit; +}); + +function allLog(): string { + return logOutput.join("\n"); +} + +function allError(): string { + return errorOutput.join("\n"); +} + +function allWarn(): string { + return warnOutput.join("\n"); +} + +describe("run() - help output", () => { + it('prints help text for "help" command', async () => { + await run(["help"]); + expect(allLog()).toContain("ai-rules"); + expect(allLog()).toContain("USAGE:"); + expect(allLog()).toContain("COMMANDS:"); + expect(allLog()).toContain("OPTIONS:"); + expect(allLog()).toContain("EXAMPLES:"); + }); + + it("prints help text for --help flag", async () => { + await run(["--help"]); + expect(allLog()).toContain("ai-rules"); + expect(allLog()).toContain("COMMANDS"); + }); + + it("prints help text for -h flag", async () => { + await run(["-h"]); + expect(allLog()).toContain("ai-rules"); + }); + + it("prints help text when no arguments provided", async () => { + await run([]); + expect(allLog()).toContain("ai-rules"); + }); + + it("includes all documented commands in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const cmd of [ + "init", + "detect", + "generate", + "install", + "import", + "sync", + "export", + "help", + ]) { + expect(output).toContain(cmd); + } + }); +}); + +describe("run() - unknown command", () => { + it("prints error and help, then exits with code 1", async () => { + await expect(run(["foobar"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: foobar"); + expect(allLog()).toContain("USAGE:"); + expect(exitCode).toBe(1); + }); + + it("prints the actual command name in the error message", async () => { + await expect(run(["deploy-everything"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: deploy-everything"); + }); +}); + +describe("run() - init command", () => { + it("creates config file", async () => { + await run(["init"]); + expect(mockWriteFile).toHaveBeenCalledOnce(); + expect(mockWriteFile).toHaveBeenCalledWith( + "ai-rules.config.ts", + expect.stringContaining("defineRulesConfig"), + "utf-8", + ); + expect(allLog()).toContain("Created ai-rules.config.ts"); + }); + + it("respects --dry-run flag and does not write files", async () => { + await run(["init", "--dry-run"]); + expect(mockWriteFile).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Would create ai-rules.config.ts"); + }); + + it("writes template with defineRulesConfig and example rule", async () => { + await run(["init"]); + const firstCall = mockWriteFile.mock.calls[0]!; + const writtenContent = firstCall[1] as string; + expect(writtenContent).toContain("defineRulesConfig"); + expect(writtenContent).toContain("rules:"); + expect(writtenContent).toContain("TypeScript Standards"); + }); +}); + +describe("run() - detect command", () => { + it("shows detection header", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryList.mockReturnValue([]); + await run(["detect"]); + expect(allLog()).toContain("Detecting AI coding tools..."); + }); + + it("lists detected and undetected tools", async () => { + const detected = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const missing = makeAdapter({ id: "cursor", name: "Cursor" }); + + mockRegistryDetectAll.mockResolvedValue([detected]); + mockRegistryList.mockReturnValue(["claude-code", "cursor"]); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return detected; + if (id === "cursor") return missing; + return undefined; + }); + + await run(["detect"]); + const output = allLog(); + expect(output).toContain("\u2713"); + expect(output).toContain("Claude Code"); + expect(output).toContain("\u2717"); + expect(output).toContain("Cursor"); + }); + + it("shows detection summary with counts", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryList.mockReturnValue(["claude-code", "cursor", "codex"]); + mockRegistryGet.mockReturnValue(adapter); + + await run(["detect"]); + expect(allLog()).toContain("Detected 1/3 tools"); + }); + + it("skips adapters that registry.get returns undefined for", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryList.mockReturnValue(["ghost-adapter"]); + mockRegistryGet.mockReturnValue(undefined); + + await run(["detect"]); + const output = allLog(); + expect(output).toContain("Detected 0/1 tools"); + }); + + it("shows configDir for each adapter", async () => { + const adapter = makeAdapter({ + id: "claude-code", + name: "Claude Code", + configDir: ".claude/rules", + }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryList.mockReturnValue(["claude-code"]); + mockRegistryGet.mockReturnValue(adapter); + + await run(["detect"]); + expect(allLog()).toContain(".claude/rules"); + }); +}); + +describe("run() - generate command", () => { + it("prints message when no tools detected (no --tools, detectAll empty)", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["generate"]); + expect(allLog()).toContain("No tools specified"); + }); + + it("generates for adapters resolved via --tools flag", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code"]); + expect(allLog()).toContain("Generating rules for 1 tool(s)"); + expect(adapter.generate).toHaveBeenCalled(); + }); + + it("generates for all detected adapters when no --tools flag", async () => { + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor" }); + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["generate"]); + expect(allLog()).toContain("Generating rules for 2 tool(s)"); + expect(adapter1.generate).toHaveBeenCalled(); + expect(adapter2.generate).toHaveBeenCalled(); + }); + + it("warns for unknown adapter IDs in --tools flag", async () => { + mockRegistryGet.mockReturnValue(undefined); + + await run(["generate", "--tools=nonexistent"]); + expect(allWarn()).toContain('Warning: Unknown adapter "nonexistent"'); + expect(allLog()).toContain("No tools specified"); + }); + + it("handles mixed known and unknown tools in --tools flag", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code,nonexistent"]); + expect(allWarn()).toContain('Warning: Unknown adapter "nonexistent"'); + expect(allLog()).toContain("Generating rules for 1 tool(s)"); + }); +}); + +describe("run() - install command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["install"]); + expect(allLog()).toContain("No tools specified"); + }); + + it("installs rules into tools resolved via --tools flag", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["install", "--tools=claude-code"]); + expect(allLog()).toContain("Installing rules into 1 tool(s)"); + expect(allLog()).toContain("\u2713 Claude Code"); + }); + + it("installs for all detected adapters when no --tools flag", async () => { + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor" }); + mockRegistryDetectAll.mockResolvedValue([adapter1, adapter2]); + + await run(["install"]); + expect(allLog()).toContain("Installing rules into 2 tool(s)"); + expect(allLog()).toContain("\u2713 Claude Code"); + expect(allLog()).toContain("\u2713 Cursor"); + }); +}); + +describe("run() - install guard", () => { + it("skips undetected tool in --tools and warns", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro"]); + expect(allLog()).toContain("No tools specified"); + }); + + it("--force bypasses detection check for --tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro", "--force"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing rules into 1 tool(s)"); + }); +}); + +describe("run() - import command", () => { + it("prompts for --from when not specified", async () => { + await run(["import"]); + expect(allLog()).toContain("Specify source tool with --from"); + }); + + it("shows error for unknown --from tool", async () => { + mockRegistryGet.mockReturnValue(undefined); + await run(["import", "--from=nonexistent"]); + expect(allError()).toContain("Unknown tool: nonexistent"); + }); + + it("imports rules from specified tool", async () => { + const importFn = vi.fn<() => Promise>().mockResolvedValue([ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + { id: "test", name: "Testing", content: "Write tests", scope: { type: "always" } }, + ]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--from=claude-code"]); + expect(importFn).toHaveBeenCalled(); + expect(allLog()).toContain("Imported 2 rule(s) from Claude Code"); + expect(allLog()).toContain("TypeScript (ts)"); + expect(allLog()).toContain("Testing (test)"); + }); + + it("shows zero count when no rules found", async () => { + const importFn = vi.fn<() => Promise>().mockResolvedValue([]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--from=claude-code"]); + expect(allLog()).toContain("Imported 0 rule(s) from Claude Code"); + }); +}); + +describe("run() - export command", () => { + it("prompts for --from when not specified", async () => { + await run(["export"]); + expect(allLog()).toContain("Specify source tool with --from"); + }); + + it("shows error for unknown --from tool", async () => { + mockRegistryGet.mockReturnValue(undefined); + await run(["export", "--from=nonexistent"]); + expect(allError()).toContain("Unknown tool: nonexistent"); + }); + + it("exports rules as JSON to stdout", async () => { + const rules: RuleDefinition[] = [ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(rules); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["export", "--from=claude-code"]); + const output = allLog(); + expect(output).toContain('"id": "ts"'); + expect(output).toContain('"name": "TypeScript"'); + expect(output).toContain('"content": "Use strict TS"'); + }); +}); + +describe("run() - sync command", () => { + it("prompts for --from when not specified", async () => { + await run(["sync"]); + expect(allLog()).toContain("Specify source tool with --from"); + }); + + it("shows error for unknown --from tool", async () => { + mockRegistryGet.mockReturnValue(undefined); + await run(["sync", "--from=nonexistent"]); + expect(allError()).toContain("Unknown tool: nonexistent"); + }); + + it("syncs rules from source to targets", async () => { + const rules: RuleDefinition[] = [ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(rules); + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const generateFn = vi + .fn<(rules: RuleDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".cursor/rules/ts.md", content: "# TS", format: "md" }]); + + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + const target = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: generateFn, + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return source; + if (id === "cursor") return target; + return undefined; + }); + mockRegistryDetectAll.mockResolvedValue([source, target]); + + await run(["sync", "--from=claude-code"]); + expect(importFn).toHaveBeenCalled(); + expect(generateFn).toHaveBeenCalledWith(rules); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Imported 1 rule(s) from Claude Code"); + expect(allLog()).toContain("\u2713 Cursor (1 files)"); + }); + + it("respects --dry-run during sync", async () => { + const rules: RuleDefinition[] = [ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(rules); + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const generateFn = vi + .fn<(rules: RuleDefinition[]) => Promise>() + .mockResolvedValue([{ path: ".cursor/rules/ts.md", content: "# TS", format: "md" }]); + + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + const target = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: generateFn, + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return source; + if (id === "cursor") return target; + return undefined; + }); + mockRegistryDetectAll.mockResolvedValue([source, target]); + + await run(["sync", "--from=claude-code", "--dry-run"]); + expect(installFn).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Cursor: .cursor/rules/ts.md"); + }); + + it("excludes source tool from targets during sync", async () => { + const rules: RuleDefinition[] = [ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(rules); + const sourceInstallFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + + const source = makeAdapter({ + id: "claude-code", + name: "Claude Code", + import: importFn, + install: sourceInstallFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return source; + return undefined; + }); + // When only source is detected, it should be excluded as a target + mockRegistryDetectAll.mockResolvedValue([source]); + + await run(["sync", "--from=claude-code"]); + expect(sourceInstallFn).not.toHaveBeenCalled(); + }); +}); + +describe("run() - flag parsing", () => { + it("parses --tools flag with = syntax", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools=claude-code"]); + expect(mockRegistryGet).toHaveBeenCalledWith("claude-code"); + }); + + it("parses --from flag with = syntax", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--from=claude-code"]); + expect(mockRegistryGet).toHaveBeenCalledWith("claude-code"); + }); + + it("parses --dry-run flag", async () => { + await run(["init", "--dry-run"]); + expect(allLog()).toContain("[dry-run]"); + expect(mockWriteFile).not.toHaveBeenCalled(); + }); + + it("resolves multiple comma-separated tools", async () => { + const adapter1 = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const adapter2 = makeAdapter({ id: "cursor", name: "Cursor" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter1; + if (id === "cursor") return adapter2; + return undefined; + }); + + await run(["generate", "--tools=claude-code,cursor"]); + expect(allLog()).toContain("Generating rules for 2 tool(s)"); + }); + + it("parses --force flag", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["generate", "--tools=kiro", "--force"]); + expect(allLog()).toContain("Generating rules for 1 tool(s)"); + }); + + it("trims whitespace in --tools values", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["generate", "--tools= claude-code "]); + expect(allLog()).toContain("Generating rules for 1 tool(s)"); + }); +}); + +describe("run() - error propagation", () => { + it("propagates adapter import errors", async () => { + const adapter = makeAdapter({ + id: "broken", + name: "Broken", + import: vi + .fn<() => Promise>() + .mockRejectedValue(new Error("import failed")), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "broken") return adapter; + return undefined; + }); + + await expect(run(["import", "--from=broken"])).rejects.toThrow("import failed"); + }); + + it("propagates adapter generate errors during sync", async () => { + const rules: RuleDefinition[] = [ + { id: "ts", name: "TypeScript", content: "Use strict TS", scope: { type: "always" } }, + ]; + const importFn = vi.fn<() => Promise>().mockResolvedValue(rules); + const source = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + const target = makeAdapter({ + id: "cursor", + name: "Cursor", + generate: vi + .fn<(rules: RuleDefinition[]) => Promise>() + .mockRejectedValue(new Error("generate failed")), + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return source; + if (id === "cursor") return target; + return undefined; + }); + mockRegistryDetectAll.mockResolvedValue([source, target]); + + await expect(run(["sync", "--from=claude-code"])).rejects.toThrow("generate failed"); + }); +}); diff --git a/packages/rules/src/cli/index.ts b/packages/rules/src/cli/index.ts new file mode 100644 index 0000000..d242c10 --- /dev/null +++ b/packages/rules/src/cli/index.ts @@ -0,0 +1,315 @@ +import { registry } from "../adapters/registry.js"; +import type { BaseRuleAdapter } from "../adapters/base.js"; +import type { RulesConfig } from "../types/index.js"; + +import "../adapters/all.js"; + +const HELP = ` +ai-rules - Universal project rules configuration for AI coding tools + +USAGE: + ai-rules [options] + +COMMANDS: + init Create an ai-rules.config.ts in the current directory + detect Detect which AI tools have rules configured + generate Generate rule files for detected/specified tools + install Generate and install rules into detected tools + import Import existing rules from a tool + sync Sync rules across all detected tools + export Export rules to stdout as JSON + help Show this help message + +OPTIONS: + --tools Comma-separated list of tools (e.g., --tools=claude-code,cursor) + --from Source tool for import/sync (e.g., --from=claude-code) + --config Path to config file (default: ai-rules.config.ts) + --dry-run Show what would be generated without writing files + --force Skip detection checks for --tools (install even if tool not found) + +EXAMPLES: + ai-rules init # Create config file + ai-rules detect # See which tools have rules + ai-rules generate --tools=claude-code # Generate rules for Claude Code + ai-rules sync --from=claude-code # Sync rules from Claude Code to all tools +`; + +type Flags = { + tools?: string; + from?: string; + config?: string; + dryRun?: boolean; + force?: boolean; +}; + +export async function run(args: string[]): Promise { + const command = args[0]; + const flags = parseFlags(args.slice(1)); + + switch (command) { + case "init": + await cmdInit(flags); + break; + case "detect": + await cmdDetect(); + break; + case "generate": + await cmdGenerate(flags); + break; + case "install": + await cmdInstall(flags); + break; + case "import": + await cmdImport(flags); + break; + case "sync": + await cmdSync(flags); + break; + case "export": + await cmdExport(flags); + break; + case "help": + case "--help": + case "-h": + case undefined: + console.log(HELP); + break; + default: + console.error(`Unknown command: ${command}`); + console.log(HELP); + process.exit(1); + } +} + +async function cmdInit(flags: Flags): Promise { + const { writeFile } = await import("node:fs/promises"); + + const template = `import { defineRulesConfig } from "@premierstudio/ai-rules"; + +export default defineRulesConfig({ + rules: [ + // { + // id: "typescript", + // name: "TypeScript Standards", + // description: "TypeScript coding standards", + // content: "Always use strict TypeScript. No any types.", + // scope: { type: "glob", patterns: ["*.ts", "*.tsx"] }, + // }, + ], +}); +`; + + if (flags.dryRun) { + console.log("[dry-run] Would create ai-rules.config.ts"); + return; + } + + await writeFile("ai-rules.config.ts", template, "utf-8"); + console.log("Created ai-rules.config.ts"); +} + +async function cmdDetect(): Promise { + console.log("Detecting AI coding tools...\n"); + + const detected = await registry.detectAll(); + const all = registry.list(); + + for (const id of all) { + const adapter = registry.get(id); + if (!adapter) continue; + + const isDetected = detected.some((d) => d.id === id); + const icon = isDetected ? "\u2713" : "\u2717"; + const color = isDetected ? "\x1b[32m" : "\x1b[90m"; + const reset = "\x1b[0m"; + + console.log(` ${color}${icon}${reset} ${adapter.name.padEnd(20)} ${adapter.configDir}`); + } + + console.log(`\nDetected ${detected.length}/${all.length} tools`); +} + +async function cmdGenerate(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No tools specified. Use --tools to specify tools."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Generating rules for ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.rules); + + for (const file of files) { + if (flags.dryRun) { + console.log(` [dry-run] Would write: ${file.path}`); + } else { + console.log(` Generated: ${file.path}`); + } + } + + if (!flags.dryRun) { + await adapter.install(files); + } + } + + console.log("\nDone!"); +} + +async function cmdInstall(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No tools specified. Use --tools to specify tools."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Installing rules into ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.rules); + + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would install: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nRules installed!"); +} + +async function cmdImport(flags: Flags): Promise { + const fromId = flags.from; + if (!fromId) { + console.log("Specify source tool with --from (e.g., --from=claude-code)"); + return; + } + + const adapter = registry.get(fromId); + if (!adapter) { + console.error(`Unknown tool: ${fromId}`); + return; + } + + const rules = await adapter.import(); + console.log(`Imported ${rules.length} rule(s) from ${adapter.name}`); + + for (const rule of rules) { + console.log(` - ${rule.name} (${rule.id})`); + } +} + +async function cmdSync(flags: Flags): Promise { + const fromId = flags.from; + if (!fromId) { + console.log("Specify source tool with --from (e.g., --from=claude-code)"); + return; + } + + const source = registry.get(fromId); + if (!source) { + console.error(`Unknown tool: ${fromId}`); + return; + } + + const rules = await source.import(); + console.log(`Imported ${rules.length} rule(s) from ${source.name}`); + + const targets = (await resolveAdapters(flags)).filter((a) => a.id !== fromId); + + for (const target of targets) { + const files = await target.generate(rules); + if (flags.dryRun) { + for (const f of files) { + console.log(` [dry-run] ${target.name}: ${f.path}`); + } + } else { + await target.install(files); + console.log(` \u2713 ${target.name} (${files.length} files)`); + } + } +} + +async function cmdExport(flags: Flags): Promise { + const fromId = flags.from; + if (!fromId) { + console.log("Specify source tool with --from (e.g., --from=claude-code)"); + return; + } + + const adapter = registry.get(fromId); + if (!adapter) { + console.error(`Unknown tool: ${fromId}`); + return; + } + + const rules = await adapter.import(); + console.log(JSON.stringify(rules, null, 2)); +} + +function parseFlags(args: string[]): Flags { + const flags: Flags = {}; + for (const arg of args) { + if (arg.startsWith("--tools=")) { + flags.tools = arg.slice(8); + } else if (arg.startsWith("--from=")) { + flags.from = arg.slice(7); + } else if (arg.startsWith("--config=")) { + flags.config = arg.slice(9); + } else if (arg === "--dry-run") { + flags.dryRun = true; + } else if (arg === "--force") { + flags.force = true; + } + } + return flags; +} + +async function loadConfig(configPath?: string): Promise { + const path = configPath ?? "ai-rules.config.ts"; + const { existsSync } = await import("node:fs"); + + if (!existsSync(path)) { + if (configPath) { + throw new Error(`Config file not found: ${path}`); + } + return { rules: [] }; + } + + const { resolve } = await import("node:path"); + const fullPath = resolve(process.cwd(), path); + const mod = await import(fullPath); + return mod.default as RulesConfig; +} + +async function resolveAdapters(flags: Flags): Promise { + if (flags.tools) { + const ids = flags.tools.split(",").map((t) => t.trim()); + const adapters: BaseRuleAdapter[] = []; + for (const id of ids) { + const adapter = registry.get(id); + if (!adapter) { + console.warn(` Warning: Unknown adapter "${id}"`); + continue; + } + if (!flags.force && !(await adapter.detect())) { + console.warn(` Warning: ${adapter.name} not detected, skipping (use --force to override)`); + continue; + } + adapters.push(adapter); + } + return adapters; + } + + return registry.detectAll(); +} diff --git a/packages/rules/src/config/define.test.ts b/packages/rules/src/config/define.test.ts new file mode 100644 index 0000000..7bbd367 --- /dev/null +++ b/packages/rules/src/config/define.test.ts @@ -0,0 +1,23 @@ +import { describe, it, expect } from "vitest"; +import { defineRulesConfig } from "./define.js"; + +describe("defineRulesConfig", () => { + it("returns the config as-is", () => { + const config = { + rules: [ + { + id: "test", + name: "Test Rule", + content: "Test content", + scope: { type: "always" as const }, + }, + ], + }; + expect(defineRulesConfig(config)).toBe(config); + }); + + it("works with empty rules", () => { + const config = { rules: [] }; + expect(defineRulesConfig(config)).toEqual({ rules: [] }); + }); +}); diff --git a/packages/rules/src/config/define.ts b/packages/rules/src/config/define.ts new file mode 100644 index 0000000..0ed0632 --- /dev/null +++ b/packages/rules/src/config/define.ts @@ -0,0 +1,5 @@ +import type { RulesConfig } from "../types/index.js"; + +export function defineRulesConfig(config: RulesConfig): RulesConfig { + return config; +} diff --git a/packages/rules/src/config/index.ts b/packages/rules/src/config/index.ts new file mode 100644 index 0000000..70c8ddc --- /dev/null +++ b/packages/rules/src/config/index.ts @@ -0,0 +1 @@ +export { defineRulesConfig } from "./define.js"; diff --git a/packages/rules/src/index.ts b/packages/rules/src/index.ts new file mode 100644 index 0000000..39a604c --- /dev/null +++ b/packages/rules/src/index.ts @@ -0,0 +1,5 @@ +export { defineRulesConfig } from "./config/index.js"; +export { registry } from "./adapters/index.js"; +export { BaseRuleAdapter } from "./adapters/index.js"; + +export type { RuleScope, RuleDefinition, RulesConfig, GeneratedFile } from "./types/index.js"; diff --git a/packages/rules/src/types/config.ts b/packages/rules/src/types/config.ts new file mode 100644 index 0000000..b9a276a --- /dev/null +++ b/packages/rules/src/types/config.ts @@ -0,0 +1,11 @@ +import type { RuleDefinition } from "./definition.js"; + +export type RulesConfig = { + rules: RuleDefinition[]; +}; + +export type GeneratedFile = { + path: string; + content: string; + format: "md" | "json" | "yaml"; +}; diff --git a/packages/rules/src/types/definition.ts b/packages/rules/src/types/definition.ts new file mode 100644 index 0000000..96545a8 --- /dev/null +++ b/packages/rules/src/types/definition.ts @@ -0,0 +1,16 @@ +export type RuleScope = + | { type: "always" } + | { type: "glob"; patterns: string[] } + | { type: "manual" } + | { type: "agent"; agentId: string }; + +export type RuleDefinition = { + id: string; + name: string; + description?: string; + content: string; + scope: RuleScope; + priority?: number; + tags?: string[]; + enabled?: boolean; +}; diff --git a/packages/rules/src/types/index.ts b/packages/rules/src/types/index.ts new file mode 100644 index 0000000..b7e1db5 --- /dev/null +++ b/packages/rules/src/types/index.ts @@ -0,0 +1,2 @@ +export type { RuleScope, RuleDefinition } from "./definition.js"; +export type { RulesConfig, GeneratedFile } from "./config.js"; diff --git a/packages/rules/tsconfig.json b/packages/rules/tsconfig.json new file mode 100644 index 0000000..329ab40 --- /dev/null +++ b/packages/rules/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/rules/tsup.config.ts b/packages/rules/tsup.config.ts new file mode 100644 index 0000000..d87215c --- /dev/null +++ b/packages/rules/tsup.config.ts @@ -0,0 +1,29 @@ +import { defineConfig } from "tsup"; + +export default defineConfig([ + { + entry: ["src/index.ts", "src/adapters/index.ts", "src/adapters/all.ts", "src/cli/index.ts"], + format: ["esm"], + dts: true, + clean: true, + sourcemap: true, + target: "node22", + outDir: "dist", + splitting: true, + treeshake: true, + }, + { + entry: ["src/cli/bin.ts"], + format: ["esm"], + dts: false, + clean: false, + sourcemap: true, + target: "node22", + outDir: "dist/cli", + splitting: false, + treeshake: true, + banner: { + js: "#!/usr/bin/env node", + }, + }, +]); diff --git a/packages/skills/CLAUDE.md b/packages/skills/CLAUDE.md new file mode 100644 index 0000000..52bc0f8 --- /dev/null +++ b/packages/skills/CLAUDE.md @@ -0,0 +1,17 @@ +# packages/skills + +Skills/prompts configuration. The simplest content package — no metadata beyond name. + +## Key Types (`types/definition.ts`) + +`SkillDefinition`: `id`, `name`, `description?`, `content` (raw prompt text), `tags?`, `enabled?` + +Skills are just name + content. No model, tools, scope, or priority fields (contrast with agents and rules). + +## Markdown Format + +Simple: markdown H1 title as name, body as content. No YAML frontmatter. Claude Code adapter generates `.claude/commands/*.md`. + +## CLI Commands + +`detect`, `generate`, `install`, `import`, `sync`, `export`, `help` (no `init`). diff --git a/packages/skills/package.json b/packages/skills/package.json new file mode 100644 index 0000000..5a53718 --- /dev/null +++ b/packages/skills/package.json @@ -0,0 +1,52 @@ +{ + "name": "@premierstudio/ai-skills", + "version": "1.1.8", + "description": "Universal skills/prompts configuration for AI coding tools", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/PremierStudio/ai-tools" + }, + "bin": { + "ai-skills": "dist/cli/bin.js" + }, + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + }, + "./adapters": { + "types": "./dist/adapters/index.d.ts", + "import": "./dist/adapters/index.js" + }, + "./adapters/all": { + "types": "./dist/adapters/all.d.ts", + "import": "./dist/adapters/all.js" + }, + "./cli": { + "types": "./dist/cli/index.d.ts", + "import": "./dist/cli/index.js" + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "clean": "rm -rf dist" + }, + "engines": { + "node": ">=22.0.0" + } +} diff --git a/packages/skills/src/adapters/all.ts b/packages/skills/src/adapters/all.ts new file mode 100644 index 0000000..982629d --- /dev/null +++ b/packages/skills/src/adapters/all.ts @@ -0,0 +1,13 @@ +import "./claude-code.js"; +import "./cursor.js"; +import "./cline.js"; +import "./codex.js"; +import "./gemini-cli.js"; +import "./amp.js"; +import "./kiro.js"; +import "./opencode.js"; +import "./droid.js"; +import "./copilot.js"; +import "./windsurf.js"; +import "./roo-code.js"; +import "./continue.js"; diff --git a/packages/skills/src/adapters/amp.test.ts b/packages/skills/src/adapters/amp.test.ts new file mode 100644 index 0000000..629d66a --- /dev/null +++ b/packages/skills/src/adapters/amp.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { AmpSkillAdapter } from "./amp.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("AmpSkillAdapter", () => { + let adapter: AmpSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new AmpSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("amp")); + it("has correct name", () => expect(adapter.name).toBe("Amp")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".amp/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".amp/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/amp.ts b/packages/skills/src/adapters/amp.ts new file mode 100644 index 0000000..9d07d90 --- /dev/null +++ b/packages/skills/src/adapters/amp.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class AmpSkillAdapter extends BaseSkillAdapter { + readonly id = "amp"; + readonly name = "Amp"; + readonly nativeSupport = true; + readonly configDir = ".amp/prompts"; + readonly command = "amp"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new AmpSkillAdapter(); +registry.register(adapter); +export { AmpSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/base.test.ts b/packages/skills/src/adapters/base.test.ts new file mode 100644 index 0000000..d4e3af4 --- /dev/null +++ b/packages/skills/src/adapters/base.test.ts @@ -0,0 +1,172 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { BaseSkillAdapter } from "./base.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; + +// Concrete subclass for testing the abstract BaseSkillAdapter +class TestSkillAdapter extends BaseSkillAdapter { + readonly id = "test-skills"; + readonly name = "Test Skills"; + readonly nativeSupport = true; + readonly configDir = ".test/prompts"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((s) => ({ + path: `.test/prompts/${s.id}.md`, + content: `# ${s.name}\n\n${s.content}\n`, + format: "md" as const, + })); + } + + async import(_cwd?: string): Promise { + return []; + } +} + +// Mock node:fs and node:fs/promises +vi.mock("node:fs", () => ({ + existsSync: vi.fn(), +})); + +vi.mock("node:fs/promises", () => ({ + writeFile: vi.fn(), + mkdir: vi.fn(), +})); + +// Import mocked modules so we can control them +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; + +const mockedExistsSync = vi.mocked(existsSync); +const mockedWriteFile = vi.mocked(writeFile); +const mockedMkdir = vi.mocked(mkdir); + +describe("BaseSkillAdapter", () => { + let adapter: TestSkillAdapter; + + beforeEach(() => { + adapter = new TestSkillAdapter(); + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("abstract properties", () => { + it("exposes id, name, nativeSupport, and configDir", () => { + expect(adapter.id).toBe("test-skills"); + expect(adapter.name).toBe("Test Skills"); + expect(adapter.nativeSupport).toBe(true); + expect(adapter.configDir).toBe(".test/prompts"); + }); + }); + + describe("detect()", () => { + it("returns true when configDir exists", async () => { + mockedExistsSync.mockReturnValue(true); + const result = await adapter.detect(); + expect(result).toBe(true); + }); + + it("returns false when configDir does not exist", async () => { + mockedExistsSync.mockReturnValue(false); + const result = await adapter.detect(); + expect(result).toBe(false); + }); + + it("uses provided cwd to resolve configDir", async () => { + mockedExistsSync.mockReturnValue(true); + await adapter.detect("/custom/dir"); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining("custom/dir")); + }); + + it("uses process.cwd() when no cwd is provided", async () => { + mockedExistsSync.mockReturnValue(false); + await adapter.detect(); + expect(mockedExistsSync).toHaveBeenCalledWith(expect.stringContaining(adapter.configDir)); + }); + }); + + describe("generate()", () => { + it("calls the subclass implementation with skills", async () => { + const skills: SkillDefinition[] = [ + { id: "review", name: "Code Review", content: "Review the code" }, + ]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".test/prompts/review.md"); + expect(files[0]?.content).toContain("Code Review"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + }); + + describe("install()", () => { + it("writes all files to disk", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: ".prompts/skill1.md", content: "# Skill 1", format: "md" }, + { path: ".prompts/skill2.md", content: "# Skill 2", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledTimes(2); + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("skill1.md"), + "# Skill 1", + "utf-8", + ); + }); + + it("creates parent directories recursively", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [ + { path: "deep/nested/dir/skill.md", content: "test", format: "md" }, + ]; + + await adapter.install(files); + + expect(mockedMkdir).toHaveBeenCalledWith(expect.stringContaining("deep/nested/dir"), { + recursive: true, + }); + }); + + it("handles empty file array", async () => { + await adapter.install([]); + expect(mockedMkdir).not.toHaveBeenCalled(); + expect(mockedWriteFile).not.toHaveBeenCalled(); + }); + + it("uses provided cwd to resolve file paths", async () => { + mockedMkdir.mockResolvedValue(undefined); + mockedWriteFile.mockResolvedValue(undefined); + + const files: GeneratedFile[] = [{ path: "skill.md", content: "test", format: "md" }]; + + await adapter.install(files, "/custom/project"); + + expect(mockedWriteFile).toHaveBeenCalledWith( + expect.stringContaining("/custom/project"), + "test", + "utf-8", + ); + }); + }); + + describe("uninstall()", () => { + it("is a no-op by default", async () => { + // Default implementation does nothing; subclasses override + await adapter.uninstall(); + // No error thrown means it worked + }); + }); +}); diff --git a/packages/skills/src/adapters/base.ts b/packages/skills/src/adapters/base.ts new file mode 100644 index 0000000..9cccc93 --- /dev/null +++ b/packages/skills/src/adapters/base.ts @@ -0,0 +1,47 @@ +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { writeFile, mkdir } from "node:fs/promises"; +import { dirname, resolve } from "node:path"; + +export abstract class BaseSkillAdapter { + abstract readonly id: string; + abstract readonly name: string; + abstract readonly nativeSupport: boolean; + abstract readonly configDir: string; + + /** CLI binary name for detection (e.g., "claude", "cursor"). Override in subclass. */ + readonly command?: string; + + abstract generate(skills: SkillDefinition[]): Promise; + abstract import(cwd?: string): Promise; + + async detect(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const hasDir = existsSync(resolve(dir, this.configDir)); + if (hasDir) return true; + if (this.command) return this.commandExists(this.command); + return false; + } + + protected async commandExists(command: string): Promise { + const { exec } = await import("node:child_process"); + return new Promise((ok) => { + exec(`which ${command}`, (error: Error | null) => { + ok(!error); + }); + }); + } + + async install(files: GeneratedFile[], cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + for (const file of files) { + const fullPath = resolve(dir, file.path); + await mkdir(dirname(fullPath), { recursive: true }); + await writeFile(fullPath, file.content, "utf-8"); + } + } + + async uninstall(_cwd?: string): Promise { + // Subclass can override + } +} diff --git a/packages/skills/src/adapters/claude-code.test.ts b/packages/skills/src/adapters/claude-code.test.ts new file mode 100644 index 0000000..bf7ff5a --- /dev/null +++ b/packages/skills/src/adapters/claude-code.test.ts @@ -0,0 +1,129 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClaudeCodeSkillAdapter } from "./claude-code.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("ClaudeCodeSkillAdapter", () => { + let adapter: ClaudeCodeSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClaudeCodeSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("claude-code")); + it("has correct name", () => expect(adapter.name).toBe("Claude Code")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".claude/commands")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".claude/commands/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports skill with heading but no blank line after", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["test.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Skill\nDirect content"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("My Skill"); + expect(result[0]?.content).toBe("Direct content"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/claude-code.ts b/packages/skills/src/adapters/claude-code.ts new file mode 100644 index 0000000..844cc84 --- /dev/null +++ b/packages/skills/src/adapters/claude-code.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class ClaudeCodeSkillAdapter extends BaseSkillAdapter { + readonly id = "claude-code"; + readonly name = "Claude Code"; + readonly nativeSupport = true; + readonly configDir = ".claude/commands"; + readonly command = "claude"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new ClaudeCodeSkillAdapter(); +registry.register(adapter); +export { ClaudeCodeSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/cline.test.ts b/packages/skills/src/adapters/cline.test.ts new file mode 100644 index 0000000..9089a76 --- /dev/null +++ b/packages/skills/src/adapters/cline.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ClineSkillAdapter } from "./cline.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("ClineSkillAdapter", () => { + let adapter: ClineSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ClineSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cline")); + it("has correct name", () => expect(adapter.name).toBe("Cline")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".cline/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".cline/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/cline.ts b/packages/skills/src/adapters/cline.ts new file mode 100644 index 0000000..13d1ace --- /dev/null +++ b/packages/skills/src/adapters/cline.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class ClineSkillAdapter extends BaseSkillAdapter { + readonly id = "cline"; + readonly name = "Cline"; + readonly nativeSupport = true; + readonly configDir = ".cline/prompts"; + readonly command = "cline"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new ClineSkillAdapter(); +registry.register(adapter); +export { ClineSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/codex.test.ts b/packages/skills/src/adapters/codex.test.ts new file mode 100644 index 0000000..c43281e --- /dev/null +++ b/packages/skills/src/adapters/codex.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CodexSkillAdapter } from "./codex.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("CodexSkillAdapter", () => { + let adapter: CodexSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CodexSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("codex")); + it("has correct name", () => expect(adapter.name).toBe("Codex")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".codex/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".codex/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/codex.ts b/packages/skills/src/adapters/codex.ts new file mode 100644 index 0000000..0a97bf8 --- /dev/null +++ b/packages/skills/src/adapters/codex.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class CodexSkillAdapter extends BaseSkillAdapter { + readonly id = "codex"; + readonly name = "Codex"; + readonly nativeSupport = true; + readonly configDir = ".codex/prompts"; + readonly command = "codex"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new CodexSkillAdapter(); +registry.register(adapter); +export { CodexSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/continue.test.ts b/packages/skills/src/adapters/continue.test.ts new file mode 100644 index 0000000..a0c2e16 --- /dev/null +++ b/packages/skills/src/adapters/continue.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { ContinueSkillAdapter } from "./continue.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("ContinueSkillAdapter", () => { + let adapter: ContinueSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new ContinueSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("continue")); + it("has correct name", () => expect(adapter.name).toBe("Continue")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".continue/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".continue/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/continue.ts b/packages/skills/src/adapters/continue.ts new file mode 100644 index 0000000..2604def --- /dev/null +++ b/packages/skills/src/adapters/continue.ts @@ -0,0 +1,66 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class ContinueSkillAdapter extends BaseSkillAdapter { + readonly id = "continue"; + readonly name = "Continue"; + readonly nativeSupport = true; + readonly configDir = ".continue/prompts"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new ContinueSkillAdapter(); +registry.register(adapter); +export { ContinueSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/copilot.test.ts b/packages/skills/src/adapters/copilot.test.ts new file mode 100644 index 0000000..8dbe85d --- /dev/null +++ b/packages/skills/src/adapters/copilot.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CopilotSkillAdapter } from "./copilot.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("CopilotSkillAdapter", () => { + let adapter: CopilotSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CopilotSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("copilot")); + it("has correct name", () => expect(adapter.name).toBe("Copilot")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".github/skills")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".github/skills/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/copilot.ts b/packages/skills/src/adapters/copilot.ts new file mode 100644 index 0000000..5a586bf --- /dev/null +++ b/packages/skills/src/adapters/copilot.ts @@ -0,0 +1,66 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class CopilotSkillAdapter extends BaseSkillAdapter { + readonly id = "copilot"; + readonly name = "Copilot"; + readonly nativeSupport = true; + readonly configDir = ".github/skills"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new CopilotSkillAdapter(); +registry.register(adapter); +export { CopilotSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/cursor.test.ts b/packages/skills/src/adapters/cursor.test.ts new file mode 100644 index 0000000..0fba374 --- /dev/null +++ b/packages/skills/src/adapters/cursor.test.ts @@ -0,0 +1,129 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { CursorSkillAdapter } from "./cursor.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("CursorSkillAdapter", () => { + let adapter: CursorSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new CursorSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("cursor")); + it("has correct name", () => expect(adapter.name).toBe("Cursor")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".cursor/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".cursor/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports skill with heading but no blank line after", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["test.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# My Skill\nDirect content"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("My Skill"); + expect(result[0]?.content).toBe("Direct content"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/cursor.ts b/packages/skills/src/adapters/cursor.ts new file mode 100644 index 0000000..9b80e88 --- /dev/null +++ b/packages/skills/src/adapters/cursor.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class CursorSkillAdapter extends BaseSkillAdapter { + readonly id = "cursor"; + readonly name = "Cursor"; + readonly nativeSupport = true; + readonly configDir = ".cursor/prompts"; + readonly command = "cursor"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new CursorSkillAdapter(); +registry.register(adapter); +export { CursorSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/droid.test.ts b/packages/skills/src/adapters/droid.test.ts new file mode 100644 index 0000000..b9b45ff --- /dev/null +++ b/packages/skills/src/adapters/droid.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { DroidSkillAdapter } from "./droid.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("DroidSkillAdapter", () => { + let adapter: DroidSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new DroidSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("droid")); + it("has correct name", () => expect(adapter.name).toBe("Droid")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".factory/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".factory/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/droid.ts b/packages/skills/src/adapters/droid.ts new file mode 100644 index 0000000..a624d36 --- /dev/null +++ b/packages/skills/src/adapters/droid.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class DroidSkillAdapter extends BaseSkillAdapter { + readonly id = "droid"; + readonly name = "Droid"; + readonly nativeSupport = true; + readonly configDir = ".factory/prompts"; + readonly command = "droid"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new DroidSkillAdapter(); +registry.register(adapter); +export { DroidSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/gemini-cli.test.ts b/packages/skills/src/adapters/gemini-cli.test.ts new file mode 100644 index 0000000..1e2e9c7 --- /dev/null +++ b/packages/skills/src/adapters/gemini-cli.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { GeminiCliSkillAdapter } from "./gemini-cli.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("GeminiCliSkillAdapter", () => { + let adapter: GeminiCliSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new GeminiCliSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("gemini-cli")); + it("has correct name", () => expect(adapter.name).toBe("Gemini CLI")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".gemini/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".gemini/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/gemini-cli.ts b/packages/skills/src/adapters/gemini-cli.ts new file mode 100644 index 0000000..ff8bff6 --- /dev/null +++ b/packages/skills/src/adapters/gemini-cli.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class GeminiCliSkillAdapter extends BaseSkillAdapter { + readonly id = "gemini-cli"; + readonly name = "Gemini CLI"; + readonly nativeSupport = true; + readonly configDir = ".gemini/prompts"; + readonly command = "gemini"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new GeminiCliSkillAdapter(); +registry.register(adapter); +export { GeminiCliSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/index.ts b/packages/skills/src/adapters/index.ts new file mode 100644 index 0000000..b43fa2b --- /dev/null +++ b/packages/skills/src/adapters/index.ts @@ -0,0 +1,2 @@ +export { BaseSkillAdapter } from "./base.js"; +export { registry } from "./registry.js"; diff --git a/packages/skills/src/adapters/kiro.test.ts b/packages/skills/src/adapters/kiro.test.ts new file mode 100644 index 0000000..9b444ae --- /dev/null +++ b/packages/skills/src/adapters/kiro.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { KiroSkillAdapter } from "./kiro.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("KiroSkillAdapter", () => { + let adapter: KiroSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new KiroSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("kiro")); + it("has correct name", () => expect(adapter.name).toBe("Kiro")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".kiro/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".kiro/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/kiro.ts b/packages/skills/src/adapters/kiro.ts new file mode 100644 index 0000000..8ea2395 --- /dev/null +++ b/packages/skills/src/adapters/kiro.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class KiroSkillAdapter extends BaseSkillAdapter { + readonly id = "kiro"; + readonly name = "Kiro"; + readonly nativeSupport = true; + readonly configDir = ".kiro/prompts"; + readonly command = "kiro"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new KiroSkillAdapter(); +registry.register(adapter); +export { KiroSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/opencode.test.ts b/packages/skills/src/adapters/opencode.test.ts new file mode 100644 index 0000000..9e70610 --- /dev/null +++ b/packages/skills/src/adapters/opencode.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { OpenCodeSkillAdapter } from "./opencode.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("OpenCodeSkillAdapter", () => { + let adapter: OpenCodeSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new OpenCodeSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("opencode")); + it("has correct name", () => expect(adapter.name).toBe("OpenCode")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".opencode/prompts")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".opencode/prompts/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/opencode.ts b/packages/skills/src/adapters/opencode.ts new file mode 100644 index 0000000..399dd75 --- /dev/null +++ b/packages/skills/src/adapters/opencode.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class OpenCodeSkillAdapter extends BaseSkillAdapter { + readonly id = "opencode"; + readonly name = "OpenCode"; + readonly nativeSupport = true; + readonly configDir = ".opencode/prompts"; + readonly command = "opencode"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new OpenCodeSkillAdapter(); +registry.register(adapter); +export { OpenCodeSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/registry.test.ts b/packages/skills/src/adapters/registry.test.ts new file mode 100644 index 0000000..793100e --- /dev/null +++ b/packages/skills/src/adapters/registry.test.ts @@ -0,0 +1,137 @@ +import { describe, it, expect, beforeEach, vi } from "vitest"; +import { registry } from "./registry.js"; +import type { BaseSkillAdapter } from "./base.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; + +function makeFakeAdapter(id: string, detects: boolean = true): BaseSkillAdapter { + return { + id, + name: `${id} Adapter`, + nativeSupport: true, + configDir: `.${id}/prompts`, + detect: async () => detects, + generate: async (_skills: SkillDefinition[]) => [] as GeneratedFile[], + import: async () => [] as SkillDefinition[], + install: async () => {}, + uninstall: async () => {}, + } as unknown as BaseSkillAdapter; +} + +describe("Real SkillAdapterRegistry singleton", () => { + beforeEach(() => { + registry.clear(); + }); + + describe("register / get", () => { + it("registers and retrieves an adapter by id", () => { + const adapter = makeFakeAdapter("claude-code"); + registry.register(adapter); + expect(registry.get("claude-code")).toBe(adapter); + }); + + it("returns undefined for unknown adapter id", () => { + expect(registry.get("nonexistent")).toBeUndefined(); + }); + + it("overwrites a previously registered adapter with the same id", () => { + const first = makeFakeAdapter("dupe"); + const second = makeFakeAdapter("dupe"); + registry.register(first); + registry.register(second); + expect(registry.get("dupe")).toBe(second); + }); + }); + + describe("list", () => { + it("returns empty array when nothing registered", () => { + expect(registry.list()).toEqual([]); + }); + + it("lists all registered adapter IDs", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + expect(registry.list().toSorted()).toEqual(["a", "b"]); + }); + }); + + describe("detectAll", () => { + it("returns adapters that detect successfully", async () => { + registry.register(makeFakeAdapter("found", true)); + registry.register(makeFakeAdapter("missing", false)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("found"); + }); + + it("skips adapters that throw during detection", async () => { + const throwingAdapter = makeFakeAdapter("broken"); + throwingAdapter.detect = async () => { + throw new Error("detection crashed"); + }; + registry.register(throwingAdapter); + registry.register(makeFakeAdapter("stable", true)); + + const detected = await registry.detectAll(); + expect(detected).toHaveLength(1); + expect(detected[0]?.id).toBe("stable"); + }); + + it("returns empty array when no adapters detect", async () => { + registry.register(makeFakeAdapter("a", false)); + registry.register(makeFakeAdapter("b", false)); + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + }); + + it("passes cwd to adapter.detect", async () => { + const detectFn = vi.fn().mockResolvedValue(true); + const adapter = makeFakeAdapter("with-cwd"); + adapter.detect = detectFn; + registry.register(adapter); + + await registry.detectAll("/custom/dir"); + expect(detectFn).toHaveBeenCalledWith("/custom/dir"); + }); + + it("handles multiple throwing adapters gracefully", async () => { + const spy = vi.fn(); + for (let i = 0; i < 3; i++) { + const adapter = makeFakeAdapter(`throw-${i}`); + adapter.detect = async () => { + spy(); + throw new Error(`fail-${i}`); + }; + registry.register(adapter); + } + + const detected = await registry.detectAll(); + expect(detected).toEqual([]); + expect(spy).toHaveBeenCalledTimes(3); + }); + }); + + describe("clear", () => { + it("removes all adapters", () => { + registry.register(makeFakeAdapter("a")); + registry.register(makeFakeAdapter("b")); + + registry.clear(); + + expect(registry.list()).toEqual([]); + expect(registry.get("a")).toBeUndefined(); + expect(registry.get("b")).toBeUndefined(); + }); + + it("allows re-registration after clear", () => { + registry.register(makeFakeAdapter("x")); + registry.clear(); + registry.register(makeFakeAdapter("y")); + + expect(registry.list()).toEqual(["y"]); + expect(registry.get("x")).toBeUndefined(); + expect(registry.get("y")?.id).toBe("y"); + }); + }); +}); diff --git a/packages/skills/src/adapters/registry.ts b/packages/skills/src/adapters/registry.ts new file mode 100644 index 0000000..9ff5f0d --- /dev/null +++ b/packages/skills/src/adapters/registry.ts @@ -0,0 +1,45 @@ +import type { BaseSkillAdapter } from "./base.js"; + +class SkillAdapterRegistry { + private adapters: Map = new Map(); + + register(adapter: BaseSkillAdapter): void { + this.adapters.set(adapter.id, adapter); + } + + get(id: string): BaseSkillAdapter | undefined { + return this.adapters.get(id); + } + + list(): string[] { + return [...this.adapters.keys()]; + } + + async detectAll(cwd?: string): Promise { + const detected: BaseSkillAdapter[] = []; + + for (const adapter of this.adapters.values()) { + try { + const found = await adapter.detect(cwd); + if (found) { + detected.push(adapter); + } + } catch { + // Detection failed, skip + } + } + + return detected; + } + + clear(): void { + this.adapters.clear(); + } +} + +declare global { + // eslint-disable-next-line no-var + var __premierstudio_skills_registry: SkillAdapterRegistry | undefined; +} + +export const registry = (globalThis.__premierstudio_skills_registry ??= new SkillAdapterRegistry()); diff --git a/packages/skills/src/adapters/roo-code.test.ts b/packages/skills/src/adapters/roo-code.test.ts new file mode 100644 index 0000000..e666e85 --- /dev/null +++ b/packages/skills/src/adapters/roo-code.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { RooCodeSkillAdapter } from "./roo-code.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("RooCodeSkillAdapter", () => { + let adapter: RooCodeSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new RooCodeSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("roo-code")); + it("has correct name", () => expect(adapter.name).toBe("Roo Code")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".roo/skills")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".roo/skills/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/roo-code.ts b/packages/skills/src/adapters/roo-code.ts new file mode 100644 index 0000000..75bce2a --- /dev/null +++ b/packages/skills/src/adapters/roo-code.ts @@ -0,0 +1,66 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class RooCodeSkillAdapter extends BaseSkillAdapter { + readonly id = "roo-code"; + readonly name = "Roo Code"; + readonly nativeSupport = true; + readonly configDir = ".roo/skills"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new RooCodeSkillAdapter(); +registry.register(adapter); +export { RooCodeSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/adapters/windsurf.test.ts b/packages/skills/src/adapters/windsurf.test.ts new file mode 100644 index 0000000..ae4fb0d --- /dev/null +++ b/packages/skills/src/adapters/windsurf.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +vi.mock("./registry.js", () => ({ + registry: { register: vi.fn() }, +})); + +vi.mock("node:fs", () => ({ + existsSync: vi.fn(() => false), +})); + +vi.mock("node:fs/promises", () => ({ + readFile: vi.fn(), + readdir: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + rm: vi.fn(), +})); + +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { WindsurfSkillAdapter } from "./windsurf.js"; +import type { SkillDefinition } from "../types/index.js"; + +describe("WindsurfSkillAdapter", () => { + let adapter: WindsurfSkillAdapter; + + const testSkill: SkillDefinition = { + id: "review", + name: "Code Review", + description: "Review code for best practices", + content: + "Please review the selected code for:\n- Security issues\n- Performance\n- Best practices", + }; + + beforeEach(() => { + vi.clearAllMocks(); + adapter = new WindsurfSkillAdapter(); + }); + + describe("metadata", () => { + it("has correct id", () => expect(adapter.id).toBe("windsurf")); + it("has correct name", () => expect(adapter.name).toBe("Windsurf")); + it("has native support", () => expect(adapter.nativeSupport).toBe(true)); + it("has correct config dir", () => expect(adapter.configDir).toBe(".windsurf/skills")); + }); + + describe("generate", () => { + it("generates one file per skill", async () => { + const files = await adapter.generate([testSkill]); + expect(files).toHaveLength(1); + expect(files[0]?.path).toBe(".windsurf/skills/review.md"); + expect(files[0]?.format).toBe("md"); + }); + + it("formats skill with name heading and content", async () => { + const files = await adapter.generate([testSkill]); + expect(files[0]?.content).toContain("# Code Review"); + expect(files[0]?.content).toContain("Review code for best practices"); + expect(files[0]?.content).toContain("Security issues"); + }); + + it("handles empty skills array", async () => { + const files = await adapter.generate([]); + expect(files).toHaveLength(0); + }); + + it("handles multiple skills", async () => { + const skills = [testSkill, { ...testSkill, id: "debug", name: "Debug" }]; + const files = await adapter.generate(skills); + expect(files).toHaveLength(2); + }); + + it("handles skill without description", async () => { + const skill: SkillDefinition = { id: "test", name: "Test", content: "Content here" }; + const files = await adapter.generate([skill]); + expect(files[0]?.content).toBe("# Test\n\nContent here\n"); + }); + }); + + describe("import", () => { + it("returns empty array when dir does not exist", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import("/test"); + expect(result).toEqual([]); + }); + + it("imports skills from markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["review.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Code Review\n\nReview the code"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + expect(result[0]?.id).toBe("review"); + expect(result[0]?.name).toBe("Code Review"); + }); + + it("skips non-markdown files", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["readme.txt", "skill.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("# Skill\n\nContent"); + const result = await adapter.import("/test"); + expect(result).toHaveLength(1); + }); + + it("imports skill without heading", async () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readdir).mockResolvedValue(["plain.md"] as unknown); + vi.mocked(readFile).mockResolvedValue("Just content, no heading"); + const result = await adapter.import("/test"); + expect(result[0]?.name).toBe("plain"); + expect(result[0]?.content).toBe("Just content, no heading"); + }); + + it("imports without cwd argument", async () => { + vi.mocked(existsSync).mockReturnValue(false); + const result = await adapter.import(); + expect(result).toEqual([]); + }); + }); +}); diff --git a/packages/skills/src/adapters/windsurf.ts b/packages/skills/src/adapters/windsurf.ts new file mode 100644 index 0000000..468d834 --- /dev/null +++ b/packages/skills/src/adapters/windsurf.ts @@ -0,0 +1,67 @@ +import { BaseSkillAdapter } from "./base.js"; +import { registry } from "./registry.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; +import { existsSync } from "node:fs"; +import { readFile, readdir } from "node:fs/promises"; +import { resolve, basename } from "node:path"; + +class WindsurfSkillAdapter extends BaseSkillAdapter { + readonly id = "windsurf"; + readonly name = "Windsurf"; + readonly nativeSupport = true; + readonly configDir = ".windsurf/skills"; + readonly command = "windsurf"; + + async generate(skills: SkillDefinition[]): Promise { + return skills.map((skill) => ({ + path: `${this.configDir}/${skill.id}.md`, + content: this.formatSkill(skill), + format: "md" as const, + })); + } + + async import(cwd?: string): Promise { + const dir = cwd ?? process.cwd(); + const skillsDir = resolve(dir, this.configDir); + if (!existsSync(skillsDir)) return []; + + const files = await readdir(skillsDir); + const skills: SkillDefinition[] = []; + + for (const file of files) { + if (!file.endsWith(".md")) continue; + const content = await readFile(resolve(skillsDir, file), "utf-8"); + const id = basename(file, ".md"); + skills.push(this.parseSkill(id, content)); + } + + return skills; + } + + private formatSkill(skill: SkillDefinition): string { + let md = `# ${skill.name}\n\n`; + if (skill.description) md += `${skill.description}\n\n`; + md += skill.content + "\n"; + return md; + } + + private parseSkill(id: string, raw: string): SkillDefinition { + const lines = raw.trim().split("\n"); + let name = id; + let contentStart = 0; + + if (lines[0]?.startsWith("# ")) { + name = lines[0].slice(2).trim(); + contentStart = 1; + if (lines[contentStart]?.trim() === "") contentStart++; + } + + const content = lines.slice(contentStart).join("\n").trim(); + return { id, name, content }; + } +} + +const adapter = new WindsurfSkillAdapter(); +registry.register(adapter); +export { WindsurfSkillAdapter }; +export default adapter; diff --git a/packages/skills/src/cli/bin.ts b/packages/skills/src/cli/bin.ts new file mode 100644 index 0000000..3adf2a9 --- /dev/null +++ b/packages/skills/src/cli/bin.ts @@ -0,0 +1,6 @@ +import { run } from "./index.js"; + +run(process.argv.slice(2)).catch((err) => { + console.error(err.message); + process.exit(1); +}); diff --git a/packages/skills/src/cli/index.test.ts b/packages/skills/src/cli/index.test.ts new file mode 100644 index 0000000..521b554 --- /dev/null +++ b/packages/skills/src/cli/index.test.ts @@ -0,0 +1,366 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +// ── Hoisted mocks ────────────────────────────────────────── + +const { mockRegistryDetectAll, mockRegistryList, mockRegistryGet, mockWriteFile, mockExistsSync } = + vi.hoisted(() => ({ + mockRegistryDetectAll: vi.fn(), + mockRegistryList: vi.fn(), + mockRegistryGet: vi.fn(), + mockWriteFile: vi.fn(), + mockExistsSync: vi.fn(), + })); + +// ── Mock adapter self-registration ────────────────────────── + +vi.mock("../adapters/all.js", () => ({})); + +// ── Mock registry ─────────────────────────────────────────── + +vi.mock("../adapters/registry.js", () => ({ + registry: { + detectAll: (...args: unknown[]) => mockRegistryDetectAll(...args), + list: () => mockRegistryList(), + get: (id: string) => mockRegistryGet(id), + }, +})); + +// ── Mock node:fs ──────────────────────────────────────────── + +vi.mock("node:fs", () => ({ + existsSync: (...args: unknown[]) => mockExistsSync(...args), +})); + +// ── Mock node:fs/promises ─────────────────────────────────── + +vi.mock("node:fs/promises", () => ({ + writeFile: (...args: unknown[]) => mockWriteFile(...args), + mkdir: vi.fn(), +})); + +// ── Import under test ─────────────────────────────────────── + +import { run } from "./index.js"; +import type { BaseSkillAdapter } from "../adapters/base.js"; +import type { SkillDefinition, GeneratedFile } from "../types/index.js"; + +// ── Mock adapter factory ──────────────────────────────────── + +function makeAdapter(overrides: Partial = {}): BaseSkillAdapter { + return { + id: overrides.id ?? "test-tool", + name: overrides.name ?? "Test Tool", + nativeSupport: overrides.nativeSupport ?? true, + configDir: overrides.configDir ?? ".test/prompts", + detect: overrides.detect ?? vi.fn<() => Promise>().mockResolvedValue(true), + generate: + overrides.generate ?? + vi + .fn<(skills: SkillDefinition[]) => Promise>() + .mockResolvedValue([ + { path: ".test/prompts/skill.md", content: "# Skill\n\nContent\n", format: "md" }, + ]), + install: + overrides.install ?? + vi.fn<(files: GeneratedFile[]) => Promise>().mockResolvedValue(undefined), + import: overrides.import ?? vi.fn<() => Promise>().mockResolvedValue([]), + uninstall: overrides.uninstall ?? vi.fn<() => Promise>().mockResolvedValue(undefined), + } as BaseSkillAdapter; +} + +// ── Console / process mocks ───────────────────────────────── + +let logOutput: string[]; +let errorOutput: string[]; +let warnOutput: string[]; +let exitCode: number | undefined; + +const originalLog = console.log; +const originalError = console.error; +const originalWarn = console.warn; +const originalExit = process.exit; + +beforeEach(() => { + logOutput = []; + errorOutput = []; + warnOutput = []; + exitCode = undefined; + + console.log = vi.fn((...args: unknown[]) => { + logOutput.push(args.map(String).join(" ")); + }); + console.error = vi.fn((...args: unknown[]) => { + errorOutput.push(args.map(String).join(" ")); + }); + console.warn = vi.fn((...args: unknown[]) => { + warnOutput.push(args.map(String).join(" ")); + }); + process.exit = vi.fn((code?: number) => { + exitCode = code ?? 0; + throw new Error(`process.exit(${code})`); + }) as never; + + vi.clearAllMocks(); + mockWriteFile.mockResolvedValue(undefined); + mockExistsSync.mockReturnValue(false); +}); + +afterEach(() => { + console.log = originalLog; + console.error = originalError; + console.warn = originalWarn; + process.exit = originalExit; +}); + +// ── Helpers ───────────────────────────────────────────────── + +function allLog(): string { + return logOutput.join("\n"); +} + +function allError(): string { + return errorOutput.join("\n"); +} + +// ── Tests ─────────────────────────────────────────────────── + +describe("run() - help output", () => { + it('prints help text for "help" command', async () => { + await run(["help"]); + expect(allLog()).toContain("ai-skills - Universal skills/prompts configuration"); + expect(allLog()).toContain("USAGE:"); + expect(allLog()).toContain("COMMANDS:"); + }); + + it("prints help text for --help flag", async () => { + await run(["--help"]); + expect(allLog()).toContain("ai-skills - Universal skills/prompts configuration"); + }); + + it("prints help text for -h flag", async () => { + await run(["-h"]); + expect(allLog()).toContain("ai-skills - Universal skills/prompts configuration"); + }); + + it("prints help text when no arguments are provided", async () => { + await run([]); + expect(allLog()).toContain("ai-skills - Universal skills/prompts configuration"); + }); + + it("includes all documented commands in help text", async () => { + await run(["help"]); + const output = allLog(); + for (const cmd of [ + "init", + "detect", + "generate", + "install", + "import", + "sync", + "export", + "help", + ]) { + expect(output).toContain(cmd); + } + }); +}); + +describe("run() - unknown command", () => { + it("prints error and help, then exits with code 1", async () => { + await expect(run(["foobar"])).rejects.toThrow("process.exit(1)"); + expect(allError()).toContain("Unknown command: foobar"); + expect(allLog()).toContain("USAGE:"); + expect(exitCode).toBe(1); + }); +}); + +describe("run() - init command", () => { + it("skips when config already exists", async () => { + mockExistsSync.mockReturnValue(true); + await run(["init"]); + expect(allLog()).toContain("Config already exists: ai-skills.config.ts"); + expect(mockWriteFile).not.toHaveBeenCalled(); + }); + + it("creates config file when none exists", async () => { + mockExistsSync.mockReturnValue(false); + await run(["init"]); + expect(mockWriteFile).toHaveBeenCalledOnce(); + expect(mockWriteFile).toHaveBeenCalledWith( + "ai-skills.config.ts", + expect.stringContaining("defineConfig"), + "utf-8", + ); + expect(allLog()).toContain("Created ai-skills.config.ts"); + expect(allLog()).toContain("Next steps:"); + }); + + it("respects --dry-run flag and does not write files", async () => { + mockExistsSync.mockReturnValue(false); + await run(["init", "--dry-run"]); + expect(mockWriteFile).not.toHaveBeenCalled(); + expect(allLog()).toContain("[dry-run] Would create ai-skills.config.ts"); + }); +}); + +describe("run() - detect command", () => { + it("shows detection header", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + mockRegistryList.mockReturnValue([]); + await run(["detect"]); + expect(allLog()).toContain("Detecting AI coding tools..."); + }); + + it("lists detected and undetected tools", async () => { + const detected = makeAdapter({ id: "claude-code", name: "Claude Code" }); + const missing = makeAdapter({ id: "cursor", name: "Cursor" }); + + mockRegistryDetectAll.mockResolvedValue([detected]); + mockRegistryList.mockReturnValue(["claude-code", "cursor"]); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return detected; + if (id === "cursor") return missing; + return undefined; + }); + + await run(["detect"]); + const output = allLog(); + expect(output).toContain("\u2713"); + expect(output).toContain("Claude Code"); + expect(output).toContain("\u2717"); + expect(output).toContain("Cursor"); + }); + + it("shows detection summary with counts", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryDetectAll.mockResolvedValue([adapter]); + mockRegistryList.mockReturnValue(["claude-code", "cursor", "codex"]); + mockRegistryGet.mockReturnValue(adapter); + + await run(["detect"]); + expect(allLog()).toContain("Detected 1/3 tools"); + }); +}); + +describe("run() - generate command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["generate"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("warns for unknown adapter IDs in --tools flag", async () => { + mockRegistryGet.mockReturnValue(undefined); + await run(["generate"]); + expect(allLog()).toContain("No AI tools detected"); + }); +}); + +describe("run() - install command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["install"]); + expect(allLog()).toContain("No AI tools detected"); + }); +}); + +describe("run() - import command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["import"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("imports skills from specified tool", async () => { + const importFn = vi + .fn<() => Promise>() + .mockResolvedValue([{ id: "review", name: "Code Review", content: "Review the code" }]); + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code", import: importFn }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--tools=claude-code"]); + expect(importFn).toHaveBeenCalled(); + expect(allLog()).toContain("Imported 1 skill(s) from Claude Code"); + expect(allLog()).toContain("Code Review (review)"); + }); +}); + +describe("run() - sync command", () => { + it("prints message when no tools detected", async () => { + mockRegistryDetectAll.mockResolvedValue([]); + await run(["sync"]); + expect(allLog()).toContain("No AI tools detected"); + }); +}); + +describe("run() - install guard", () => { + it("skips undetected tool in --tools and warns", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro"]); + expect(allLog()).toContain("No AI tools detected"); + }); + + it("--force bypasses detection check for --tools", async () => { + const installFn = vi + .fn<(files: GeneratedFile[]) => Promise>() + .mockResolvedValue(undefined); + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + install: installFn, + }); + + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["install", "--tools=kiro", "--force"]); + expect(installFn).toHaveBeenCalled(); + expect(allLog()).toContain("Installing skills into 1 tool(s)"); + }); +}); + +describe("run() - flag parsing", () => { + it("parses --tools flag with = syntax", async () => { + const adapter = makeAdapter({ id: "claude-code", name: "Claude Code" }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "claude-code") return adapter; + return undefined; + }); + + await run(["import", "--tools=claude-code"]); + expect(mockRegistryGet).toHaveBeenCalledWith("claude-code"); + expect(mockRegistryDetectAll).not.toHaveBeenCalled(); + }); + + it("parses --force flag", async () => { + const adapter = makeAdapter({ + id: "kiro", + name: "Kiro", + detect: vi.fn<() => Promise>().mockResolvedValue(false), + }); + mockRegistryGet.mockImplementation((id: string) => { + if (id === "kiro") return adapter; + return undefined; + }); + + await run(["generate", "--tools=kiro", "--force"]); + expect(allLog()).toContain("Generating skills for 1 tool(s)"); + }); +}); diff --git a/packages/skills/src/cli/index.ts b/packages/skills/src/cli/index.ts new file mode 100644 index 0000000..fff760c --- /dev/null +++ b/packages/skills/src/cli/index.ts @@ -0,0 +1,316 @@ +import { registry } from "../adapters/registry.js"; +import type { BaseSkillAdapter } from "../adapters/base.js"; +import type { SkillsConfig } from "../types/index.js"; + +// Import all adapters to register them +import "../adapters/all.js"; + +const HELP = ` +ai-skills - Universal skills/prompts configuration for AI coding tools + +USAGE: + ai-skills [options] + +COMMANDS: + init Create an ai-skills.config.ts in the current directory + detect Detect which AI tools are installed + generate Generate skill files for detected/specified tools + install Generate and install skills into detected tools + import Import skills from a detected tool + sync Sync skills across all detected tools + export Export skills to stdout as JSON + help Show this help message + +OPTIONS: + --tools Comma-separated list of tools (e.g., --tools=claude-code,cursor) + --config Path to config file (default: ai-skills.config.ts) + --verbose Show detailed output + --dry-run Show what would be generated without writing files + --force Skip detection checks for --tools (install even if tool not found) + +EXAMPLES: + ai-skills init # Create config file + ai-skills detect # See which AI tools are installed + ai-skills generate # Generate skills for all detected tools + ai-skills install --tools=cursor # Install skills for Cursor only + ai-skills import --tools=claude-code # Import skills from Claude Code +`; + +type Flags = { + tools?: string; + config?: string; + verbose?: boolean; + dryRun?: boolean; + force?: boolean; +}; + +export async function run(args: string[]): Promise { + const command = args[0]; + const flags = parseFlags(args.slice(1)); + + switch (command) { + case "init": + await cmdInit(flags); + break; + case "detect": + await cmdDetect(flags); + break; + case "generate": + await cmdGenerate(flags); + break; + case "install": + await cmdInstall(flags); + break; + case "import": + await cmdImport(flags); + break; + case "sync": + await cmdSync(flags); + break; + case "export": + await cmdExport(flags); + break; + case "help": + case "--help": + case "-h": + case undefined: + console.log(HELP); + break; + default: + console.error(`Unknown command: ${command}`); + console.log(HELP); + process.exit(1); + } +} + +// ── Commands ──────────────────────────────────────────────── + +async function cmdInit(flags: Flags): Promise { + const { existsSync } = await import("node:fs"); + if (existsSync("ai-skills.config.ts")) { + console.log("Config already exists: ai-skills.config.ts"); + return; + } + + const { writeFile } = await import("node:fs/promises"); + + const template = `import { defineConfig } from "@premierstudio/ai-skills"; + +export default defineConfig({ + skills: [ + // Add your skills here: + // + // { + // id: "review", + // name: "Code Review", + // description: "Review code for best practices", + // content: "Please review the selected code for security, performance, and best practices.", + // }, + ], +}); +`; + + if (flags.dryRun) { + console.log("[dry-run] Would create ai-skills.config.ts"); + return; + } + + await writeFile("ai-skills.config.ts", template, "utf-8"); + console.log("Created ai-skills.config.ts"); + console.log(""); + console.log("Next steps:"); + console.log(" 1. Edit ai-skills.config.ts to add your skills"); + console.log(" 2. Run: ai-skills detect (see which AI tools are installed)"); + console.log(" 3. Run: ai-skills install (install skills into your tools)"); +} + +async function cmdDetect(_flags: Flags): Promise { + console.log("Detecting AI coding tools...\n"); + + const detected = await registry.detectAll(); + const all = registry.list(); + + for (const id of all) { + const adapter = registry.get(id); + if (!adapter) continue; + + const isDetected = detected.some((d) => d.id === id); + const icon = isDetected ? "\u2713" : "\u2717"; + const color = isDetected ? "\x1b[32m" : "\x1b[90m"; + const reset = "\x1b[0m"; + + console.log(` ${color}${icon}${reset} ${adapter.name.padEnd(20)} ${adapter.id}`); + } + + console.log(`\nDetected ${detected.length}/${all.length} tools`); +} + +async function cmdGenerate(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Generating skills for ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.skills); + + for (const file of files) { + if (flags.dryRun) { + console.log(` [dry-run] Would write: ${file.path}`); + } else { + console.log(` Generated: ${file.path}`); + } + } + + if (!flags.dryRun) { + await adapter.install(files); + } + } + + console.log("\nDone!"); +} + +async function cmdInstall(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Installing skills into ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.skills); + + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would install: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nSkills installed!"); +} + +async function cmdImport(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + for (const adapter of adapters) { + const skills = await adapter.import(); + console.log(`\nImported ${skills.length} skill(s) from ${adapter.name}:`); + for (const skill of skills) { + console.log(` - ${skill.name} (${skill.id})`); + } + } +} + +async function cmdSync(flags: Flags): Promise { + const adapters = await resolveAdapters(flags); + + if (adapters.length === 0) { + console.log("No AI tools detected. Use --tools to specify manually."); + return; + } + + const config = await loadConfig(flags.config); + + console.log(`Syncing skills to ${adapters.length} tool(s)...\n`); + + for (const adapter of adapters) { + const files = await adapter.generate(config.skills); + + if (flags.dryRun) { + for (const file of files) { + console.log(` [dry-run] Would sync: ${file.path}`); + } + } else { + await adapter.install(files); + console.log(` \u2713 ${adapter.name}`); + } + } + + console.log("\nSkills synced!"); +} + +async function cmdExport(flags: Flags): Promise { + const config = await loadConfig(flags.config); + console.log(JSON.stringify(config.skills, null, 2)); +} + +// ── Helpers ───────────────────────────────────────────────── + +function parseFlags(args: string[]): Flags { + const flags: Flags = {}; + + for (const arg of args) { + if (arg.startsWith("--tools=")) { + flags.tools = arg.slice(8); + } else if (arg.startsWith("--config=")) { + flags.config = arg.slice(9); + } else if (arg === "--verbose") { + flags.verbose = true; + } else if (arg === "--dry-run") { + flags.dryRun = true; + } else if (arg === "--force") { + flags.force = true; + } + } + + return flags; +} + +async function resolveAdapters(flags: Flags): Promise { + if (flags.tools) { + const ids = flags.tools.split(",").map((t) => t.trim()); + const adapters: BaseSkillAdapter[] = []; + for (const id of ids) { + const adapter = registry.get(id); + if (!adapter) { + console.warn(` Warning: Unknown adapter "${id}"`); + continue; + } + if (!flags.force && !(await adapter.detect())) { + console.warn(` Warning: ${adapter.name} not detected, skipping (use --force to override)`); + continue; + } + adapters.push(adapter); + } + return adapters; + } + + return registry.detectAll(); +} + +async function loadConfig(configPath?: string): Promise { + const path = configPath ?? "ai-skills.config.ts"; + const { existsSync } = await import("node:fs"); + + if (!existsSync(path)) { + if (configPath) { + throw new Error(`Config file not found: ${path}`); + } + return { skills: [] }; + } + + const { resolve } = await import("node:path"); + const fullPath = resolve(process.cwd(), path); + const mod = await import(fullPath); + return mod.default as SkillsConfig; +} diff --git a/packages/skills/src/config/define.test.ts b/packages/skills/src/config/define.test.ts new file mode 100644 index 0000000..7976996 --- /dev/null +++ b/packages/skills/src/config/define.test.ts @@ -0,0 +1,42 @@ +import { describe, it, expect } from "vitest"; +import { defineConfig } from "./define.js"; +import type { SkillsConfig } from "../types/index.js"; + +describe("defineConfig", () => { + it("returns the config object unchanged", () => { + const config: SkillsConfig = { + skills: [ + { + id: "review", + name: "Code Review", + content: "Review the code", + }, + ], + }; + expect(defineConfig(config)).toBe(config); + }); + + it("returns empty skills array unchanged", () => { + const config: SkillsConfig = { skills: [] }; + expect(defineConfig(config)).toEqual({ skills: [] }); + }); + + it("preserves all skill fields", () => { + const config: SkillsConfig = { + skills: [ + { + id: "test", + name: "Test Skill", + description: "A test skill", + content: "Do the thing", + tags: ["test", "dev"], + enabled: false, + }, + ], + }; + const result = defineConfig(config); + expect(result.skills[0]?.tags).toEqual(["test", "dev"]); + expect(result.skills[0]?.enabled).toBe(false); + expect(result.skills[0]?.description).toBe("A test skill"); + }); +}); diff --git a/packages/skills/src/config/define.ts b/packages/skills/src/config/define.ts new file mode 100644 index 0000000..2ea3f5a --- /dev/null +++ b/packages/skills/src/config/define.ts @@ -0,0 +1,5 @@ +import type { SkillsConfig } from "../types/index.js"; + +export function defineConfig(config: SkillsConfig): SkillsConfig { + return config; +} diff --git a/packages/skills/src/config/index.ts b/packages/skills/src/config/index.ts new file mode 100644 index 0000000..ee71dfd --- /dev/null +++ b/packages/skills/src/config/index.ts @@ -0,0 +1 @@ +export { defineConfig } from "./define.js"; diff --git a/packages/skills/src/index.ts b/packages/skills/src/index.ts new file mode 100644 index 0000000..31ff548 --- /dev/null +++ b/packages/skills/src/index.ts @@ -0,0 +1,4 @@ +export type { SkillDefinition, SkillsConfig, GeneratedFile } from "./types/index.js"; +export { defineConfig } from "./config/index.js"; +export { BaseSkillAdapter } from "./adapters/base.js"; +export { registry } from "./adapters/registry.js"; diff --git a/packages/skills/src/types/config.ts b/packages/skills/src/types/config.ts new file mode 100644 index 0000000..e2521aa --- /dev/null +++ b/packages/skills/src/types/config.ts @@ -0,0 +1,11 @@ +import type { SkillDefinition } from "./definition.js"; + +export type SkillsConfig = { + skills: SkillDefinition[]; +}; + +export type GeneratedFile = { + path: string; + content: string; + format: "md" | "json" | "yaml"; +}; diff --git a/packages/skills/src/types/definition.ts b/packages/skills/src/types/definition.ts new file mode 100644 index 0000000..ea45835 --- /dev/null +++ b/packages/skills/src/types/definition.ts @@ -0,0 +1,8 @@ +export type SkillDefinition = { + id: string; + name: string; + description?: string; + content: string; + tags?: string[]; + enabled?: boolean; +}; diff --git a/packages/skills/src/types/index.ts b/packages/skills/src/types/index.ts new file mode 100644 index 0000000..0ece641 --- /dev/null +++ b/packages/skills/src/types/index.ts @@ -0,0 +1,2 @@ +export type { SkillDefinition } from "./definition.js"; +export type { SkillsConfig, GeneratedFile } from "./config.js"; diff --git a/packages/skills/tsconfig.json b/packages/skills/tsconfig.json new file mode 100644 index 0000000..329ab40 --- /dev/null +++ b/packages/skills/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/skills/tsup.config.ts b/packages/skills/tsup.config.ts new file mode 100644 index 0000000..d87215c --- /dev/null +++ b/packages/skills/tsup.config.ts @@ -0,0 +1,29 @@ +import { defineConfig } from "tsup"; + +export default defineConfig([ + { + entry: ["src/index.ts", "src/adapters/index.ts", "src/adapters/all.ts", "src/cli/index.ts"], + format: ["esm"], + dts: true, + clean: true, + sourcemap: true, + target: "node22", + outDir: "dist", + splitting: true, + treeshake: true, + }, + { + entry: ["src/cli/bin.ts"], + format: ["esm"], + dts: false, + clean: false, + sourcemap: true, + target: "node22", + outDir: "dist/cli", + splitting: false, + treeshake: true, + banner: { + js: "#!/usr/bin/env node", + }, + }, +]); diff --git a/tsconfig.base.json b/tsconfig.base.json index 82f5662..defb5e0 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -4,6 +4,7 @@ "module": "ESNext", "moduleResolution": "bundler", "lib": ["ES2023"], + "types": ["node"], "strict": true, "esModuleInterop": true, "skipLibCheck": true, diff --git a/vitest.config.ts b/vitest.config.ts index 4d1d688..10d2cd3 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -4,22 +4,40 @@ export default defineConfig({ test: { globals: true, include: [ - "packages/core/src/**/*.test.ts", - "packages/plannable/src/**/*.test.ts", + "packages/hooks/src/**/*.test.ts", + "packages/mcp/src/**/*.test.ts", + "packages/skills/src/**/*.test.ts", + "packages/agents/src/**/*.test.ts", + "packages/rules/src/**/*.test.ts", + "packages/cli/src/**/*.test.ts", ], coverage: { provider: "v8", include: [ - "packages/core/src/**/*.ts", - "packages/plannable/src/**/*.ts", + "packages/hooks/src/**/*.ts", + "packages/mcp/src/**/*.ts", + "packages/skills/src/**/*.ts", + "packages/agents/src/**/*.ts", + "packages/rules/src/**/*.ts", + "packages/cli/src/**/*.ts", ], exclude: [ "packages/*/src/**/*.test.ts", "packages/*/src/**/index.ts", // barrel re-exports - "packages/core/src/types/adapter.ts", // type-only - "packages/core/src/types/config.ts", // type-only - "packages/core/src/types/events.ts", // type-only - "packages/core/src/cli/bin.ts", // entry-point shim + "packages/*/src/adapters/all.ts", // side-effect registrations + "packages/hooks/src/types/adapter.ts", // type-only + "packages/hooks/src/types/config.ts", // type-only + "packages/hooks/src/types/events.ts", // type-only + "packages/hooks/src/cli/bin.ts", // entry-point shim + "packages/mcp/src/types/*.ts", // type-only + "packages/mcp/src/cli/bin.ts", + "packages/skills/src/types/*.ts", + "packages/skills/src/cli/bin.ts", + "packages/agents/src/types/*.ts", + "packages/agents/src/cli/bin.ts", + "packages/rules/src/types/*.ts", + "packages/rules/src/cli/bin.ts", + "packages/cli/src/cli/bin.ts", "**/dist/**", ], },