diff --git a/.env.example b/.env.example index 29423e15..9dc07a3a 100644 --- a/.env.example +++ b/.env.example @@ -2,3 +2,13 @@ # This is public information (visible in the OAuth authorize URL). # Set this as a GitHub Actions variable (not a secret) for CI/CD. VITE_GITHUB_CLIENT_ID=your_oauth_app_client_id_here + +# ── MCP Server ──────────────────────────────────────────────────────────────── +# Personal Access Token (PAT) or OAuth token for the MCP server. +# Required scopes: repo, read:org, notifications +# Used by: mcp/src/octokit.ts +GITHUB_TOKEN=your_github_token_here + +# Port for the WebSocket relay server (MCP ↔ browser dashboard bridge). +# Default: 9876 +# MCP_WS_PORT=9876 diff --git a/.github/workflows/publish-mcp.yml b/.github/workflows/publish-mcp.yml new file mode 100644 index 00000000..1c55fb59 --- /dev/null +++ b/.github/workflows/publish-mcp.yml @@ -0,0 +1,38 @@ +name: Publish MCP Server +on: + push: + tags: ["github-tracker-mcp@*"] +jobs: + build-and-publish: + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5 + - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 + with: + node-version: 24 + registry-url: "https://registry.npmjs.org" + - run: pnpm install --frozen-lockfile + - run: pnpm --filter github-tracker-mcp run typecheck + - run: pnpm --filter github-tracker-mcp run build + - run: pnpm --filter github-tracker-mcp test + - run: cd mcp && pnpm publish --access public --no-git-checks + + create-release: + runs-on: ubuntu-latest + needs: build-and-publish + permissions: + contents: write + steps: + - name: Create GitHub Release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh release create "${{ github.ref_name }}" \ + --repo "${{ github.repository }}" \ + --title "MCP Server ${{ github.ref_name }}" \ + --generate-notes \ + --notes $'## Install\n```bash\nnpx github-tracker-mcp@latest\n```\nSee [npm package](https://www.npmjs.com/package/github-tracker-mcp) for full documentation.' diff --git a/.gitignore b/.gitignore index 90067157..dd68020c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,8 @@ node_modules/ dist/ +mcp/dist/ +dist/shared/ +*.tsbuildinfo .wrangler/ .dev.vars *.local diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9a526df5..0af2ca3d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,13 +13,30 @@ pnpm run dev The dev server starts at `http://localhost:5173`. You'll need a GitHub OAuth app client ID in `.env` (copy `.env.example` and fill in your value). +The repo uses a pnpm workspace: the root package is the SolidJS SPA; `mcp/` is a separate package (`github-tracker-mcp`) built with tsup. Running `pnpm install` at the root installs both. + +To run the MCP server in standalone mode, set `GITHUB_TOKEN` before starting: + +```bash +GITHUB_TOKEN=ghp_... pnpm mcp:serve +``` + +Fine-grained PATs need Actions (read), Contents (read), Issues (read), and Pull requests (read) permissions. + ## Running checks ```bash -pnpm test # unit tests (Vitest) +pnpm test # unit tests (Vitest — root + mcp/) pnpm test:e2e # Playwright E2E tests (chromium) -pnpm run typecheck # TypeScript validation +pnpm run typecheck # TypeScript validation (root + mcp/) pnpm run screenshot # Capture dashboard screenshot (saves to docs/) +pnpm mcp:serve # Start the MCP server (requires GITHUB_TOKEN) +``` + +To test MCP tools interactively, use the MCP Inspector: + +```bash +npx @modelcontextprotocol/inspector tsx mcp/src/index.ts ``` CI runs typecheck, unit tests, and E2E tests on every PR. Make sure they pass locally before pushing. @@ -76,6 +93,43 @@ type(scope): description Scope is optional. Use imperative mood: "add feature", not "adds feature" or "added feature". +## Releasing the MCP server + +The MCP server (`mcp/` package) is published to npm and GitHub Releases via CI. + +### First publish (manual) + +The very first publish must be done locally — OIDC trusted publishing can only be configured for packages that already exist on npm. + +1. Create an npm account at [npmjs.com](https://www.npmjs.com/signup) if you don't have one +2. Log in locally: `npm login` +3. Build and publish: + ```bash + cd mcp + pnpm run build + pnpm publish --access public + ``` + +### Trusted publishing setup (one-time, after first publish) + +CI publishes via npm OIDC trusted publishing — no tokens or secrets needed. + +1. Go to **npmjs.com > github-tracker-mcp > Settings > Trusted Publishers** +2. Add a trusted publisher: + - **Owner:** `gordon-code` + - **Repository:** `github-tracker` + - **Workflow filename:** `publish-mcp.yml` + +### Cutting a release + +```bash +cd mcp +pnpm version patch # or minor / major +git push upstream main --follow-tags +``` + +`pnpm version` bumps `mcp/package.json`, commits, and creates a `github-tracker-mcp@X.Y.Z` tag. Pushing that tag to upstream triggers CI, which typechecks, builds, tests, publishes to npm, and creates a GitHub release. + ## Pull requests All PRs target `main` on `gordon-code/github-tracker`. Keep PRs focused — one feature or fix per PR makes review faster and reverts cleaner. diff --git a/README.md b/README.md index 7e4be0a4..fab21615 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,7 @@ Conditional requests using `If-None-Match` headers — GitHub doesn't count 304 ``` src/ + shared/ # Browser-agnostic types, schemas, format utils shared with MCP server app/ components/ dashboard/ # DashboardPage, IssuesTab, PullRequestsTab, ActionsTab, @@ -105,9 +106,9 @@ src/ # LoadingSpinner, SkeletonRows, ToastContainer, NotificationDrawer, # RepoLockControls, UserAvatarBadge, ExpandCollapseButtons, # RepoGitHubLink, ChevronIcon, ExternalLinkIcon, Tooltip/InfoTooltip - lib/ # 14 modules: format, errors, notifications, oauth, pat, url, + lib/ # 15 modules: format, errors, notifications, oauth, pat, url, # flashDetection, grouping, reorderHighlight, collections, - # emoji, label-colors, sentry, github-emoji-map.json + # emoji, label-colors, sentry, mcp-relay, github-emoji-map.json pages/ # LoginPage, OAuthCallback, PrivacyPage services/ api.ts # GitHub API methods — issues, PRs, workflow runs, user validation, @@ -121,8 +122,11 @@ src/ view.ts # View state (tabs, sorting, filters, ignored items, locked repos) worker/ index.ts # OAuth token exchange endpoint, CORS, security headers -tests/ # unit/component tests across 70 test files -e2e/ # 15 E2E tests across 3 spec files +mcp/ + src/ # MCP server: tools, resources, WebSocket relay, Octokit fallback + tests/ # MCP server unit + integration tests +tests/ # SPA unit/component tests +e2e/ # Playwright E2E tests ``` ## Development @@ -145,6 +149,10 @@ OAuth tokens are stored in `localStorage` under an app-specific key — this is See [DEPLOY.md](./DEPLOY.md) for Cloudflare, OAuth App, and CI/CD setup. +## MCP Server + +An optional MCP (Model Context Protocol) server lets AI clients like Claude Code and Cursor query your dashboard data — open PRs, issues, failing CI — without leaving the editor. See the [MCP server README](mcp/README.md) for setup, available tools, and configuration. + ## Contributing See [CONTRIBUTING.md](./CONTRIBUTING.md). diff --git a/docs/USER_GUIDE.md b/docs/USER_GUIDE.md index 3483e0ff..6f1c80d4 100644 --- a/docs/USER_GUIDE.md +++ b/docs/USER_GUIDE.md @@ -34,6 +34,7 @@ GitHub Tracker is a dashboard that aggregates open issues, pull requests, and Gi - [Notifications](#notifications) - [Tracked Items](#tracked-items) - [Repo Pinning](#repo-pinning) +- [MCP Server Integration](#mcp-server-integration) - [Settings Reference](#settings-reference) - [Troubleshooting](#troubleshooting) @@ -359,6 +360,47 @@ Pin state is per-tab — a repo can be pinned on the Issues tab but not the Pull --- +## MCP Server Integration + +The MCP (Model Context Protocol) server lets AI clients like Claude Code and Cursor query your dashboard data — open PRs, issues, failing CI — without leaving the editor. + +MCP access is fully opt-in. Nothing is exposed unless you explicitly run the standalone server or enable the WebSocket relay in Settings. + +### Standalone mode + +Run the MCP server with a GitHub token for direct API access: + +```bash +GITHUB_TOKEN=ghp_... npx github-tracker-mcp +``` + +This works without the dashboard open. The server fetches data directly from GitHub using the token. See the [MCP server README](https://github.com/gordon-code/github-tracker/tree/main/mcp) for Claude Code configuration and the full tool reference. + +### WebSocket relay mode + +For richer data without extra API calls, connect the MCP server to the running dashboard: + +1. Open **Settings > MCP Server Relay** +2. Toggle **Enable relay** on +3. The status indicator shows "Connected" when the MCP server is running and linked + +When connected, the MCP server receives live dashboard data over a local WebSocket connection (`ws://127.0.0.1:9876`). This provides the same enriched data you see in the dashboard — GraphQL-sourced review decisions, check statuses, and reviewer lists — without consuming additional API quota. + +The relay falls back to direct GitHub API calls automatically when the dashboard is closed. Set `GITHUB_TOKEN` even when using the relay as a safety net — without it, all tool calls fail if the relay disconnects. + +### Available tools + +| Tool | What it returns | +|------|----------------| +| `get_dashboard_summary` | Counts: open PRs, open issues, failing CI, PRs needing review, approved but unmerged | +| `get_open_prs` | Open PRs with CI status, review decision, size, reviewers | +| `get_open_issues` | Open issues across tracked repos | +| `get_failing_actions` | In-progress or recently failed workflow runs | +| `get_pr_details` | Full details for a specific PR | +| `get_rate_limit` | Current GitHub API quota | + +--- + ## Settings Reference Settings are saved automatically to `localStorage` and persist across sessions. All settings can be exported as a JSON file via **Settings > Data > Export**. @@ -382,6 +424,8 @@ Settings are saved automatically to `localStorage` and persist across sessions. | Remember last tab | On | Return to the last active tab on revisit. | | Enable tracked items | Off | Show the Tracked tab for pinning issues and PRs to a personal TODO list. | | API Usage | — | Displays per-source API call counts, pool labels (Core/GraphQL), and last-called timestamps for the current rate limit window. Counts auto-reset when the rate limit window expires. Use "Reset counts" to clear manually. | +| MCP relay enabled | Off | Allow a local MCP server to receive live dashboard data over WebSocket. | +| MCP relay port | 9876 | Port for the WebSocket relay connection. Must match the MCP server's `MCP_WS_PORT`. | ### View State Settings @@ -430,6 +474,18 @@ When a tab has been hidden for more than 2 minutes, a catch-up fetch fires autom Go to **Settings > Repositories > Manage Repositories**, find the repo, and deselect it. If it was in the monitored list, it will be removed from monitoring automatically. +**MCP relay shows "Connecting..." but never connects.** + +- Verify the MCP server is running (`GITHUB_TOKEN=ghp_... npx github-tracker-mcp` or `pnpm mcp:serve`) +- Check that the port in Settings matches the MCP server's port (default: 9876) +- The MCP server binds to `127.0.0.1` only — it must run on the same machine as your browser + +**MCP tools return empty or stale data.** + +- If the dashboard is open with the relay enabled, the MCP server uses live dashboard data. Navigate to the Dashboard tab to trigger a data load. +- If the dashboard is closed, the MCP server falls back to direct API calls using `GITHUB_TOKEN`. REST search lacks check status and review decision data, so PR filters like `failing` and `approved` may return empty results. Use the relay for full filter accuracy. +- The relay snapshot updates on each full refresh (every 5 minutes by default). Hot poll updates are not forwarded to the relay. + **How do I sign out or reset everything?** - **Sign out**: Settings > Data > Sign out. This clears your auth token and returns you to the login page. Your config is preserved. diff --git a/mcp/README.md b/mcp/README.md new file mode 100644 index 00000000..44b8cec2 --- /dev/null +++ b/mcp/README.md @@ -0,0 +1,68 @@ +# github-tracker-mcp + +MCP server for [GitHub Tracker](https://github.com/gordon-code/github-tracker) — exposes dashboard data (open PRs, issues, failing CI) to AI clients like Claude Code and Cursor. + +## Install + +```bash +# Run without installing +npx github-tracker-mcp + +# Or install globally +npm install -g github-tracker-mcp +``` + +## Configuration + +| Variable | Required | Default | Description | +|----------|----------|---------|-------------| +| `GITHUB_TOKEN` | No | — | Classic PAT with `repo` and `read:org` scopes (recommended), or fine-grained PAT with Actions (read), Contents (read), Issues (read), and Pull requests (read) permissions. Fine-grained PATs skip scope validation at startup. | +| `MCP_WS_PORT` | No | `9876` | WebSocket relay port for receiving live data from the dashboard SPA. | + +`GITHUB_TOKEN` is required for standalone (direct API) mode. In relay mode the server receives data from the dashboard and works without a token. If you set `GITHUB_TOKEN` alongside the relay, the server uses it as a fallback when the relay disconnects. + +## Claude Code setup + +```bash +claude mcp add --transport stdio --env GITHUB_TOKEN=ghp_... github-tracker -- npx -y github-tracker-mcp@latest +``` + +Or add `--scope project` to store in `.mcp.json` (shared with git — don't include real tokens in committed files). + +## Available tools + +| Tool | Description | Parameters | +|------|-------------|------------| +| `get_dashboard_summary` | Aggregated counts of open PRs, issues, failing CI, PRs needing review, approved but unmerged | `scope?` (involves_me\|all, default: involves_me) | +| `get_open_prs` | Open PRs with check status and review decision | `repo?`, `status?` (all\|needs_review\|failing\|approved\|draft) | +| `get_open_issues` | Open issues across tracked repos | `repo?` | +| `get_failing_actions` | In-progress or recently failed workflow runs | `repo?` | +| `get_pr_details` | Detailed info about a specific PR | `repo`, `number` | +| `get_rate_limit` | Current GitHub API rate limit status | — | + +`repo` parameters use `owner/repo` format (e.g., `octocat/hello-world`). + +## Resources + +- `tracker://config` — current dashboard configuration (selected repos, tracked users) +- `tracker://repos` — list of tracked repositories + +## WebSocket relay + +Enable the WebSocket relay in the dashboard's Settings page to let the MCP server receive live data directly from the SPA. When connected, the server prefers relay data and falls back to direct GitHub API calls. This reduces API usage and gives the AI client the same enriched data visible in the dashboard without separate polling. + +The relay listens on `ws://127.0.0.1:9876` by default. Override with `MCP_WS_PORT`. + +### Direct API mode limitations + +Without the relay, the MCP server uses REST search which lacks some GraphQL-sourced fields. This affects: + +- `get_open_prs` — `status=failing` and `status=approved` filters return empty results (REST search lacks check status and review decision data). `status=needs_review` works correctly via the `review-requested:` search qualifier. +- `get_dashboard_summary` — `approvedUnmergedCount` is always 0; `scope` parameter works as expected +- `get_dashboard_summary` — when the relay IS connected, `scope` is ignored (the relay always reflects the dashboard's current data set) + +For full filter accuracy for `failing` and `approved` statuses, use the WebSocket relay. + +## Full documentation + +See the [GitHub Tracker repository](https://github.com/gordon-code/github-tracker) for deployment, contributing, and architecture details. diff --git a/mcp/package.json b/mcp/package.json new file mode 100644 index 00000000..510e8835 --- /dev/null +++ b/mcp/package.json @@ -0,0 +1,44 @@ +{ + "name": "github-tracker-mcp", + "version": "0.1.0", + "description": "MCP server for GitHub Tracker — serves dashboard data to AI clients", + "type": "module", + "bin": { + "github-tracker-mcp": "./dist/index.js" + }, + "files": ["dist", "README.md"], + "scripts": { + "build": "tsup", + "typecheck": "tsc --build", + "dev": "tsx src/index.ts", + "start": "node dist/index.js", + "test": "vitest run", + "prepublishOnly": "pnpm run build" + }, + "keywords": ["mcp", "github", "dashboard", "model-context-protocol"], + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/gordon-code/github-tracker.git", + "directory": "mcp" + }, + "engines": { + "node": ">=22" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.29.0", + "ws": "^8.17.0", + "zod": "4.3.6" + }, + "devDependencies": { + "@octokit/core": "7.0.6", + "@octokit/plugin-paginate-rest": "14.0.0", + "@octokit/plugin-retry": "8.1.0", + "@octokit/plugin-throttling": "11.0.3", + "@types/ws": "^8.0.0", + "tsup": "^8.0.0", + "tsx": "^4.0.0", + "typescript": "5.9.3", + "vitest": "^4.0.0" + } +} diff --git a/mcp/src/data-source.ts b/mcp/src/data-source.ts new file mode 100644 index 00000000..3b28f14d --- /dev/null +++ b/mcp/src/data-source.ts @@ -0,0 +1,655 @@ +// ── Data source abstractions ────────────────────────────────────────────────── +// Defines the DataSource interface plus two concrete implementations: +// OctokitDataSource — fetches directly from GitHub REST API +// WebSocketDataSource — forwards requests to the SPA via WebSocket relay +// CompositeDataSource — tries WebSocket first, falls back to Octokit + +import { VALID_REPO_NAME } from "../../src/shared/validation.js"; +import { METHODS } from "../../src/shared/protocol.js"; +import type { + Issue, + PullRequest, + WorkflowRun, + RepoRef, + RateLimitInfo, + DashboardSummary, +} from "../../src/shared/types.js"; +import type { TrackedUser } from "../../src/shared/schemas.js"; +import { sendRelayRequest, isRelayConnected } from "./ws-relay.js"; + +// ── Cached config (populated by config_update notification) ─────────────────── + +export interface CachedConfig { + selectedRepos: RepoRef[]; + trackedUsers: TrackedUser[]; + upstreamRepos: RepoRef[]; + monitoredRepos: RepoRef[]; +} + +let _cachedConfig: CachedConfig | null = null; + +export function setCachedConfig(c: CachedConfig): void { + _cachedConfig = c; +} + +export function clearCachedConfig(): void { + _cachedConfig = null; +} + +// ── DataSource interface ────────────────────────────────────────────────────── + +export interface DataSource { + getDashboardSummary(scope: string): Promise; + getOpenPRs(repo?: string, status?: string): Promise; + getOpenIssues(repo?: string): Promise; + getFailingActions(repo?: string): Promise; + getPRDetails(repo: string, number: number): Promise; + getRateLimit(): Promise; + getConfig(): Promise; + getRepos(): Promise; +} + +// ── Octokit type (avoid importing the full extended class) ──────────────────── + +interface OctokitLike { + request: (route: string, params?: Record) => Promise<{ data: unknown; headers: Record }>; +} + +// ── Helpers ─────────────────────────────────────────────────────────────────── + +function validateRepoParam(repo: string): void { + if (!VALID_REPO_NAME.test(repo)) { + throw new Error(`Invalid repo format: ${JSON.stringify(repo)}. Expected "owner/name".`); + } +} + +function repoParamToRepoRef(repo: string): RepoRef { + const [owner, name] = repo.split("/"); + return { owner, name, fullName: repo }; +} + +/** + * Returns repos to query: explicit param → single validated repo, + * otherwise all repos from cached config. + * Throws a descriptive error if no config and no explicit param. + */ +function resolveRepos(repo?: string): RepoRef[] { + if (repo) { + validateRepoParam(repo); + return [repoParamToRepoRef(repo)]; + } + if (!_cachedConfig) { + throw new Error( + "No repository configuration available. Either pass an explicit `repo` parameter or connect the SPA to send a config_update." + ); + } + return _cachedConfig.selectedRepos; +} + +function chunkArray(arr: T[], size: number): T[][] { + const chunks: T[][] = []; + for (let i = 0; i < arr.length; i += size) { + chunks.push(arr.slice(i, i + size)); + } + return chunks; +} + +function repoFullNameFromUrl(repositoryUrl: string): string { + try { + const url = new URL(repositoryUrl); + const match = url.pathname.match(/^\/repos\/(.+)$/); + if (match) return match[1]; + } catch { + // invalid URL — fall through + } + const prefix = "/repos/"; + const idx = repositoryUrl.indexOf(prefix); + if (idx !== -1) return repositoryUrl.slice(idx + prefix.length); + return repositoryUrl; +} + +// ── REST search result → PullRequest mapper ─────────────────────────────────── + +interface SearchItem { + id: number; + number: number; + title: string; + state: string; + draft?: boolean; + html_url: string; + created_at: string; + updated_at: string; + user: { login: string; avatar_url: string } | null; + repository_url: string; + labels: { name: string; color: string }[]; + assignees: { login: string }[] | null; + pull_request?: { merged_at: string | null }; +} + +function mapSearchItemToPR(item: SearchItem, repoFullName: string): PullRequest { + return { + id: item.id, + number: item.number, + title: item.title, + state: item.state, + draft: item.draft ?? false, + htmlUrl: item.html_url, + createdAt: item.created_at, + updatedAt: item.updated_at, + userLogin: item.user?.login ?? "", + userAvatarUrl: item.user?.avatar_url ?? "", + repoFullName, + labels: (item.labels ?? []).map((l) => ({ name: l.name, color: l.color })), + assigneeLogins: (item.assignees ?? []).map((a) => a.login), + // Fields not available from REST search: + checkStatus: null, + reviewDecision: null, + reviewerLogins: [], + additions: 0, + deletions: 0, + changedFiles: 0, + enriched: false, + headSha: "", + headRef: "", + baseRef: "", + comments: 0, + reviewThreads: 0, + totalReviewCount: 0, + }; +} + +function mapSearchItemToIssue(item: SearchItem, repoFullName: string): Issue { + return { + id: item.id, + number: item.number, + title: item.title, + state: item.state, + htmlUrl: item.html_url, + createdAt: item.created_at, + updatedAt: item.updated_at, + userLogin: item.user?.login ?? "", + userAvatarUrl: item.user?.avatar_url ?? "", + repoFullName, + labels: (item.labels ?? []).map((l) => ({ name: l.name, color: l.color })), + assigneeLogins: (item.assignees ?? []).map((a) => a.login), + comments: 0, + }; +} + +interface WorkflowRunRaw { + id: number; + name: string; + status: string; + conclusion: string | null; + event: string; + workflow_id: number; + head_sha: string; + head_branch: string; + run_number: number; + html_url: string; + created_at: string; + updated_at: string; + run_started_at: string; + completed_at: string | null; + run_attempt: number; + display_title: string; + actor: { login: string } | null; + pull_requests: unknown[]; + jobs_url: string; +} + +function mapWorkflowRun(raw: WorkflowRunRaw, repoFullName: string): WorkflowRun { + return { + id: raw.id, + name: raw.name ?? "", + status: raw.status ?? "", + conclusion: raw.conclusion, + event: raw.event ?? "", + workflowId: raw.workflow_id, + headSha: raw.head_sha ?? "", + headBranch: raw.head_branch ?? "", + runNumber: raw.run_number, + htmlUrl: raw.html_url, + createdAt: raw.created_at, + updatedAt: raw.updated_at, + repoFullName, + isPrRun: Array.isArray(raw.pull_requests) && raw.pull_requests.length > 0, + runStartedAt: raw.run_started_at ?? raw.created_at, + completedAt: raw.completed_at ?? null, + runAttempt: raw.run_attempt ?? 1, + displayTitle: raw.display_title ?? raw.name ?? "", + actorLogin: raw.actor?.login ?? "", + }; +} + +// ── OctokitDataSource ───────────────────────────────────────────────────────── + +export class OctokitDataSource implements DataSource { + private readonly octokit: OctokitLike; + private _login: string | null = null; + + constructor(octokit: OctokitLike) { + this.octokit = octokit; + // Discover authenticated login lazily on first use + } + + private async getLogin(): Promise { + if (this._login) return this._login; + const { data } = await this.octokit.request("GET /user"); + const login = (data as { login: string }).login; + // Must throw — an empty login produces a broken "involves:" query string + if (!login) throw new Error("Could not determine authenticated user login from GET /user"); + this._login = login; + return this._login; + } + + async getOpenPRs(repo?: string, status?: string): Promise { + const login = await this.getLogin(); + const repos = resolveRepos(repo); + const results: PullRequest[] = []; + + // For needs_review, use review-requested: qualifier — REST search lacks reviewDecision data, + // so post-filtering on reviewDecision would always return empty. + const userQualifier = status === "needs_review" + ? `review-requested:${login}` + : `involves:${login}`; + + // Batch repos to avoid N+1 REST calls — GitHub search supports multiple repo: qualifiers. + const batches = chunkArray(repos, 20); + + const batchResults = await Promise.allSettled( + batches.map((batch) => { + const repoFilter = batch.map((r) => `repo:${r.owner}/${r.name}`).join("+"); + const q = `is:pr+is:open+${userQualifier}+${repoFilter}`; + return this.octokit.request("GET /search/issues", { q, per_page: 100 }).then(({ data }) => { + const items = (data as { items: SearchItem[] }).items ?? []; + const prs: PullRequest[] = []; + for (const item of items) { + if (item.pull_request !== undefined) { + const repoFullName = repoFullNameFromUrl(item.repository_url); + prs.push(mapSearchItemToPR(item, repoFullName)); + } + } + return prs; + }); + }) + ); + + for (const settled of batchResults) { + if (settled.status === "fulfilled") { + results.push(...settled.value); + } else { + console.error("[mcp] getOpenPRs batch error:", settled.reason instanceof Error ? settled.reason.message : String(settled.reason)); + } + } + + if (status && status !== "all") { + // needs_review is handled by the search qualifier above — no post-filter needed. + if (status === "needs_review") return results; + return results.filter((pr) => { + switch (status) { + case "draft": return pr.draft; + // REST search lacks checkStatus data — failing filter returns empty on Octokit path + case "failing": return pr.checkStatus === "failure"; + case "approved": return pr.reviewDecision === "APPROVED"; + default: return true; + } + }); + } + + return results; + } + + async getOpenIssues(repo?: string): Promise { + const login = await this.getLogin(); + const repos = resolveRepos(repo); + const results: Issue[] = []; + + // Batch repos to avoid N+1 REST calls. + const batches = chunkArray(repos, 20); + + const batchResults = await Promise.allSettled( + batches.map((batch) => { + const repoFilter = batch.map((r) => `repo:${r.owner}/${r.name}`).join("+"); + const q = `is:issue+is:open+involves:${login}+${repoFilter}`; + return this.octokit.request("GET /search/issues", { q, per_page: 100 }).then(({ data }) => { + const items = (data as { items: SearchItem[] }).items ?? []; + const issues: Issue[] = []; + for (const item of items) { + // Filter out PRs from issue search + if (item.pull_request === undefined) { + const repoFullName = repoFullNameFromUrl(item.repository_url); + issues.push(mapSearchItemToIssue(item, repoFullName)); + } + } + return issues; + }); + }) + ); + + for (const settled of batchResults) { + if (settled.status === "fulfilled") { + results.push(...settled.value); + } else { + console.error("[mcp] getOpenIssues batch error:", settled.reason instanceof Error ? settled.reason.message : String(settled.reason)); + } + } + + return results; + } + + async getFailingActions(repo?: string): Promise { + const repos = resolveRepos(repo); + + const pairs = repos.flatMap((r) => + (["in_progress", "failure"] as const).map((status) => ({ r, status })) + ); + + const settled = await Promise.allSettled( + pairs.map(({ r, status }) => + this.octokit.request( + "GET /repos/{owner}/{repo}/actions/runs", + { owner: r.owner, repo: r.name, status, per_page: 20 } + ).then(({ data }) => { + const runs = (data as { workflow_runs: WorkflowRunRaw[] }).workflow_runs ?? []; + return runs.map((run) => mapWorkflowRun(run, r.fullName)); + }) + ) + ); + + const results: WorkflowRun[] = []; + for (let i = 0; i < settled.length; i++) { + const result = settled[i]; + if (result.status === "fulfilled") { + results.push(...result.value); + } else { + const { r, status } = pairs[i]; + console.error(`[mcp] getFailingActions error for ${r.fullName} (${status}):`, result.reason instanceof Error ? result.reason.message : String(result.reason)); + } + } + + return results; + } + + async getPRDetails(repo: string, number: number): Promise { + validateRepoParam(repo); + const [owner, name] = repo.split("/"); + try { + const { data } = await this.octokit.request( + "GET /repos/{owner}/{repo}/pulls/{pull_number}", + { owner, repo: name, pull_number: number } + ); + const raw = data as { + id: number; + number: number; + title: string; + state: string; + draft: boolean; + html_url: string; + created_at: string; + updated_at: string; + user: { login: string; avatar_url: string } | null; + head: { sha: string; ref: string }; + base: { ref: string }; + assignees: { login: string }[]; + requested_reviewers: { login: string }[]; + labels: { name: string; color: string }[]; + additions: number; + deletions: number; + changed_files: number; + comments: number; + review_comments: number; + }; + return { + id: raw.id, + number: raw.number, + title: raw.title, + state: raw.state, + draft: raw.draft ?? false, + htmlUrl: raw.html_url, + createdAt: raw.created_at, + updatedAt: raw.updated_at, + userLogin: raw.user?.login ?? "", + userAvatarUrl: raw.user?.avatar_url ?? "", + headSha: raw.head.sha, + headRef: raw.head.ref, + baseRef: raw.base.ref, + assigneeLogins: (raw.assignees ?? []).map((a) => a.login), + reviewerLogins: (raw.requested_reviewers ?? []).map((r) => r.login), + repoFullName: repo, + checkStatus: null, + additions: raw.additions ?? 0, + deletions: raw.deletions ?? 0, + changedFiles: raw.changed_files ?? 0, + comments: (raw.comments ?? 0) + (raw.review_comments ?? 0), + reviewThreads: raw.review_comments ?? 0, + labels: (raw.labels ?? []).map((l) => ({ name: l.name, color: l.color })), + reviewDecision: null, + totalReviewCount: 0, + enriched: true, + }; + } catch (err) { + const status = (err as { status?: number }).status; + if (status === 404) return null; + throw err; + } + } + + async getRateLimit(): Promise { + const { data } = await this.octokit.request("GET /rate_limit"); + const core = (data as { rate: { limit: number; remaining: number; reset: number } }).rate; + return { + limit: core.limit, + remaining: core.remaining, + resetAt: new Date(core.reset * 1000), + }; + } + + async getDashboardSummary(scope: string): Promise { + const login = await this.getLogin(); + const repos = _cachedConfig?.selectedRepos ?? []; + + if (repos.length === 0) { + return { openPRCount: 0, openIssueCount: 0, failingRunCount: 0, needsReviewCount: 0, approvedUnmergedCount: 0 }; + } + + const repoFilter = repos.map((r) => `repo:${r.owner}/${r.name}`).join("+"); + const involvesPart = scope === "involves_me" ? `+involves:${login}` : ""; + + let openPRCount = 0; + let openIssueCount = 0; + let needsReviewCount = 0; + // REST search lacks reviewDecision data — approved count requires GraphQL (relay path only) + const approvedUnmergedCount = 0; + let failingRunCount = 0; + + const [prResult, issueResult, reviewResult] = await Promise.allSettled([ + this.octokit.request("GET /search/issues", { q: `is:pr+is:open${involvesPart}+${repoFilter}`, per_page: 1 }), + this.octokit.request("GET /search/issues", { q: `is:issue+is:open${involvesPart}+${repoFilter}`, per_page: 1 }), + this.octokit.request("GET /search/issues", { q: `is:pr+is:open+review-requested:${login}+${repoFilter}`, per_page: 1 }), + ]); + + if (prResult.status === "fulfilled") { + openPRCount = (prResult.value.data as { total_count: number }).total_count; + } else { + console.error("[mcp] getDashboardSummary PR count error:", prResult.reason instanceof Error ? prResult.reason.message : String(prResult.reason)); + } + if (issueResult.status === "fulfilled") { + openIssueCount = (issueResult.value.data as { total_count: number }).total_count; + } else { + console.error("[mcp] getDashboardSummary issue count error:", issueResult.reason instanceof Error ? issueResult.reason.message : String(issueResult.reason)); + } + if (reviewResult.status === "fulfilled") { + needsReviewCount = (reviewResult.value.data as { total_count: number }).total_count; + } else { + console.error("[mcp] getDashboardSummary review count error:", reviewResult.reason instanceof Error ? reviewResult.reason.message : String(reviewResult.reason)); + } + + const failingRunResults = await Promise.allSettled( + repos.map((r) => + this.octokit.request( + "GET /repos/{owner}/{repo}/actions/runs", + { owner: r.owner, repo: r.name, status: "failure", per_page: 5 } + ) + ) + ); + for (const settled of failingRunResults) { + if (settled.status === "fulfilled") { + failingRunCount += (settled.value.data as { total_count: number }).total_count; + } + } + + return { openPRCount, openIssueCount, failingRunCount, needsReviewCount, approvedUnmergedCount }; + } + + async getConfig(): Promise { + return _cachedConfig; + } + + async getRepos(): Promise { + return _cachedConfig?.selectedRepos ?? []; + } +} + +// ── WebSocketDataSource ─────────────────────────────────────────────────────── +// Forwards all calls to the SPA via JSON-RPC over WebSocket relay. + +export class WebSocketDataSource implements DataSource { + async getDashboardSummary(scope: string): Promise { + return sendRelayRequest(METHODS.GET_DASHBOARD_SUMMARY, { scope }) as Promise; + } + + async getOpenPRs(repo?: string, status?: string): Promise { + return sendRelayRequest(METHODS.GET_OPEN_PRS, { repo, status }) as Promise; + } + + async getOpenIssues(repo?: string): Promise { + return sendRelayRequest(METHODS.GET_OPEN_ISSUES, { repo }) as Promise; + } + + async getFailingActions(repo?: string): Promise { + return sendRelayRequest(METHODS.GET_FAILING_ACTIONS, { repo }) as Promise; + } + + async getPRDetails(repo: string, number: number): Promise { + return sendRelayRequest(METHODS.GET_PR_DETAILS, { repo, number }) as Promise; + } + + async getRateLimit(): Promise { + // SPA relay returns { core: {...}, graphql: {...} } — unwrap the core property. + const raw = await sendRelayRequest(METHODS.GET_RATE_LIMIT, {}) as { + core?: { limit: number; remaining: number; resetAt: string }; + limit?: number; + remaining?: number; + resetAt?: string; + }; + const core = raw.core ?? (raw as { limit: number; remaining: number; resetAt: string }); + return { + limit: core.limit, + remaining: core.remaining, + resetAt: new Date(core.resetAt), + }; + } + + async getConfig(): Promise { + return sendRelayRequest(METHODS.GET_CONFIG, {}) as Promise; + } + + async getRepos(): Promise { + return sendRelayRequest(METHODS.GET_REPOS, {}) as Promise; + } +} + +// ── CompositeDataSource ─────────────────────────────────────────────────────── +// Tries WebSocket relay first; falls back to Octokit when relay is unavailable. + +type DataSourceName = "relay" | "octokit"; + +export class CompositeDataSource implements DataSource { + private readonly ws: WebSocketDataSource; + private readonly octokit: DataSource; + private _lastSource: DataSourceName | null = null; + + constructor(ws: WebSocketDataSource, octokit: DataSource) { + this.ws = ws; + this.octokit = octokit; + } + + private logTransition(source: DataSourceName): void { + if (source !== this._lastSource) { + console.error(`[mcp] Data source: ${source}`); + this._lastSource = source; + } + } + + private async tryBoth(method: () => Promise, fallback: () => Promise): Promise { + if (isRelayConnected()) { + try { + const result = await method(); + this.logTransition("relay"); + return result; + } catch (err) { + console.error("[mcp] Relay request failed, falling back to Octokit:", err instanceof Error ? err.message : String(err)); + } + } + const result = await fallback(); + this.logTransition("octokit"); + return result; + } + + async getDashboardSummary(scope: string): Promise { + return this.tryBoth( + () => this.ws.getDashboardSummary(scope), + () => this.octokit.getDashboardSummary(scope) + ); + } + + async getOpenPRs(repo?: string, status?: string): Promise { + return this.tryBoth( + () => this.ws.getOpenPRs(repo, status), + () => this.octokit.getOpenPRs(repo, status) + ); + } + + async getOpenIssues(repo?: string): Promise { + return this.tryBoth( + () => this.ws.getOpenIssues(repo), + () => this.octokit.getOpenIssues(repo) + ); + } + + async getFailingActions(repo?: string): Promise { + return this.tryBoth( + () => this.ws.getFailingActions(repo), + () => this.octokit.getFailingActions(repo) + ); + } + + async getPRDetails(repo: string, number: number): Promise { + return this.tryBoth( + () => this.ws.getPRDetails(repo, number), + () => this.octokit.getPRDetails(repo, number) + ); + } + + async getRateLimit(): Promise { + return this.tryBoth( + () => this.ws.getRateLimit(), + () => this.octokit.getRateLimit() + ); + } + + async getConfig(): Promise { + return this.tryBoth( + () => this.ws.getConfig(), + () => this.octokit.getConfig() + ); + } + + async getRepos(): Promise { + return this.tryBoth( + () => this.ws.getRepos(), + () => this.octokit.getRepos() + ); + } +} diff --git a/mcp/src/index.ts b/mcp/src/index.ts new file mode 100644 index 00000000..12895558 --- /dev/null +++ b/mcp/src/index.ts @@ -0,0 +1,126 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import { z } from "zod"; +import { getOptionalOctokitClient, validateTokenScopes } from "./octokit.js"; +import { + OctokitDataSource, + WebSocketDataSource, + CompositeDataSource, + setCachedConfig, +} from "./data-source.js"; +import type { DataSource } from "./data-source.js"; +import type { DashboardSummary, Issue, PullRequest, RateLimitInfo, RepoRef, WorkflowRun } from "../../src/shared/types.js"; +import { registerTools } from "./tools.js"; +import { registerResources } from "./resources.js"; +import { startWebSocketServer, closeWebSocketServer, onNotification } from "./ws-relay.js"; +import { NOTIFICATIONS } from "../../src/shared/protocol.js"; +import { RepoRefSchema, TrackedUserSchema } from "../../src/shared/schemas.js"; + +// ── Server setup ────────────────────────────────────────────────────────────── + +const server = new McpServer({ + name: "github-tracker", + version: "0.1.0", +}); + +// ── Config update validation schemas ────────────────────────────────────────── + +const MAX_REPOS = 200; +const MAX_TRACKED_USERS = 10; +const MAX_MONITORED_REPOS = 10; + +const ConfigUpdatePayloadSchema = z.object({ + selectedRepos: RepoRefSchema.array().max(MAX_REPOS).default([]), + trackedUsers: TrackedUserSchema.array().max(MAX_TRACKED_USERS).default([]), + upstreamRepos: RepoRefSchema.array().max(MAX_REPOS).default([]), + monitoredRepos: RepoRefSchema.array().max(MAX_MONITORED_REPOS).default([]), +}); + +// ── Main entry point ────────────────────────────────────────────────────────── + +async function main() { + // Start WebSocket relay before MCP transport + const wss = startWebSocketServer(); + + // Wire config_update notification with Zod validation + onNotification(NOTIFICATIONS.CONFIG_UPDATE, (params) => { + const result = ConfigUpdatePayloadSchema.safeParse(params); + if (!result.success) { + console.error( + "[mcp] config_update payload failed validation:", + result.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ") + ); + return; + } + setCachedConfig(result.data); + console.error( + `[mcp] Config updated: ${result.data.selectedRepos.length} repos, ` + + `${result.data.trackedUsers.length} tracked users` + ); + }); + + // Build data source (WebSocket + Octokit composite) + const octokitClient = getOptionalOctokitClient(); + const octokitDs = octokitClient + ? new OctokitDataSource(octokitClient) + : null; + const wsDs = new WebSocketDataSource(); + + // If no Octokit client, create a minimal fallback that always errors + const effectiveOctokitDs = octokitDs ?? createUnavailableDataSource(); + const dataSource = new CompositeDataSource(wsDs, effectiveOctokitDs); + + // Register tools and resources + registerTools(server, dataSource); + registerResources(server, dataSource); + + // Validate token scopes (logs to stderr) + await validateTokenScopes(); + + // Connect MCP stdio transport + const transport = new StdioServerTransport(); + await server.connect(transport); + console.error("[mcp] GitHub Tracker MCP server started"); + + // Graceful shutdown handlers + const shutdown = async (signal: string) => { + console.error(`[mcp] Received ${signal}, shutting down...`); + await closeWebSocketServer(); + await server.close(); + process.exit(0); + }; + + process.on("SIGINT", () => void shutdown("SIGINT")); + process.on("SIGTERM", () => void shutdown("SIGTERM")); + + // Log WebSocket address + if (wss) { + const port = process.env.MCP_WS_PORT ?? "9876"; + console.error(`[mcp] WebSocket relay available at ws://127.0.0.1:${port}`); + } +} + +// ── Unavailable data source stub ────────────────────────────────────────────── +// Used when no GITHUB_TOKEN is set — all methods throw a clear error. + +function createUnavailableDataSource(): DataSource { + const err = () => Promise.reject(new Error( + "No GITHUB_TOKEN set and SPA relay is not connected. " + + "Set GITHUB_TOKEN or open the dashboard to enable data access." + )); + return { + getDashboardSummary: (): Promise => err(), + getOpenPRs: (): Promise => err(), + getOpenIssues: (): Promise => err(), + getFailingActions: (): Promise => err(), + getPRDetails: (): Promise => err(), + getRateLimit: (): Promise => err(), + getConfig: () => Promise.resolve(null), + getRepos: (): Promise => Promise.resolve([]), + }; +} + +main().catch((error) => { + console.error("[mcp] Failed to start MCP server:", error); + process.exit(1); +}); diff --git a/mcp/src/octokit.ts b/mcp/src/octokit.ts new file mode 100644 index 00000000..bd4d22f9 --- /dev/null +++ b/mcp/src/octokit.ts @@ -0,0 +1,145 @@ +import { Octokit } from "@octokit/core"; +import { throttling } from "@octokit/plugin-throttling"; +import { retry } from "@octokit/plugin-retry"; +import { paginateRest } from "@octokit/plugin-paginate-rest"; + +// ── Plugin-extended Octokit class ──────────────────────────────────────────── + +const GitHubOctokit = Octokit.plugin(throttling, retry, paginateRest); + +type GitHubOctokitInstance = InstanceType; + +// ── Client factory ─────────────────────────────────────────────────────────── + +export function createOctokitClient(token: string): GitHubOctokitInstance { + const client = new GitHubOctokit({ + auth: token, + userAgent: "github-tracker-mcp", + throttle: { + onRateLimit: ( + retryAfter: number, + options: { method: string; url: string }, + _octokit: GitHubOctokitInstance, + retryCount: number + ) => { + console.error( + `[mcp] Rate limit hit for ${options.method} ${options.url}. Retry after ${retryAfter}s.` + ); + return retryCount < 1; + }, + onSecondaryRateLimit: ( + retryAfter: number, + options: { method: string; url: string }, + _octokit: GitHubOctokitInstance, + retryCount: number + ) => { + console.error( + `[mcp] Secondary rate limit for ${options.method} ${options.url}. Retry after ${retryAfter}s.` + ); + return retryCount < 1; + }, + }, + retry: { + retries: 2, + // Include 429 to prevent double-handling with plugin-throttling + doNotRetry: [400, 401, 403, 404, 410, 422, 429, 451], + }, + }); + + // Read-only guard: block any non-GET request except POST /graphql + // (GraphQL queries are read-only but always use POST). + client.hook.before("request", (options) => { + const method = (options.method ?? "GET").toUpperCase(); + if (method === "GET") return; + if (method === "POST" && options.url === "/graphql") return; + throw new Error( + `[mcp] Write operation blocked: ${method} ${options.url}. This server is read-only.` + ); + }); + + return client; +} + +// ── Singleton management ───────────────────────────────────────────────────── + +let _instance: GitHubOctokitInstance | null = null; + +/** + * Returns an Octokit instance if GITHUB_TOKEN is set, otherwise null. + */ +export function getOptionalOctokitClient(): GitHubOctokitInstance | null { + if (_instance) return _instance; + const token = process.env.GITHUB_TOKEN; + if (!token) return null; + _instance = createOctokitClient(token); + return _instance; +} + +/** + * Returns an Octokit instance or throws if GITHUB_TOKEN is not set. + */ +export function getOctokitClient(): GitHubOctokitInstance { + const client = getOptionalOctokitClient(); + if (!client) { + throw new Error( + "[mcp] GITHUB_TOKEN environment variable is required but not set." + ); + } + return client; +} + +// ── Token scope validation ─────────────────────────────────────────────────── + +const REQUIRED_SCOPES = ["repo", "read:org"]; + +/** + * Validates the token at startup by calling GET /user and inspecting x-oauth-scopes. + * Logs a warning to stderr if required scopes are missing. + * Returns true if validation passed, false if token is invalid. + */ +export async function validateTokenScopes(): Promise { + const client = getOptionalOctokitClient(); + if (!client) { + console.error("[mcp] No GITHUB_TOKEN set — operating in unauthenticated mode."); + return false; + } + + try { + const response = await client.request("GET /user"); + const login = String((response.data as { login?: string }).login ?? "unknown"); + const rawScopeHeader = (response.headers as Record)["x-oauth-scopes"]; + + if (rawScopeHeader === undefined) { + // Fine-grained PAT — x-oauth-scopes header is not returned + console.error( + `[mcp] Token validated (fine-grained PAT). User: ${login}. ` + + `Scope validation skipped — fine-grained PATs use repository/organization permissions instead of OAuth scopes.` + ); + } else { + const grantedScopes = rawScopeHeader + .split(",") + .map((s: string) => s.trim()) + .filter(Boolean); + + const missingScopes = REQUIRED_SCOPES.filter( + (required) => !grantedScopes.includes(required) + ); + + if (missingScopes.length > 0) { + console.error( + `[mcp] Warning: token is missing required scopes: ${missingScopes.join(", ")}. ` + + `Granted: ${grantedScopes.join(", ") || "(none)"}` + ); + } else { + console.error( + `[mcp] Token validated. User: ${login}, Scopes: ${grantedScopes.join(", ")}` + ); + } + } + + return true; + } catch (err) { + console.error("[mcp] Token validation failed:", err instanceof Error ? err.message : String(err)); + return false; + } +} diff --git a/mcp/src/resources.ts b/mcp/src/resources.ts new file mode 100644 index 00000000..4e363450 --- /dev/null +++ b/mcp/src/resources.ts @@ -0,0 +1,68 @@ +// ── MCP resource registration ───────────────────────────────────────────────── +// Registers tracker:// resources with the MCP server. + +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { DataSource } from "./data-source.js"; + +// ── Resource registration ───────────────────────────────────────────────────── + +export function registerResources(server: McpServer, dataSource: DataSource): void { + // 1. tracker://config — Current tracked repos/users configuration + server.registerResource( + "tracker-config", + "tracker://config", + { + description: "Current tracked repos, users, and configuration for the GitHub Tracker", + mimeType: "application/json", + }, + async (_uri) => { + const config = await dataSource.getConfig(); + const text = config !== null + ? JSON.stringify(config, null, 2) + : JSON.stringify({ status: "No configuration available. Connect the SPA to sync config." }, null, 2); + return { + contents: [ + { + uri: "tracker://config", + mimeType: "application/json", + text, + }, + ], + }; + } + ); + + // 2. tracker://repos — List of configured repositories + server.registerResource( + "tracker-repos", + "tracker://repos", + { + description: "List of repositories currently tracked by the GitHub Tracker", + mimeType: "application/json", + }, + async (_uri) => { + const repos = await dataSource.getRepos(); + const text = JSON.stringify( + { + count: repos.length, + repos: repos.map((r) => ({ + fullName: r.fullName, + owner: r.owner, + name: r.name, + })), + }, + null, + 2 + ); + return { + contents: [ + { + uri: "tracker://repos", + mimeType: "application/json", + text, + }, + ], + }; + } + ); +} diff --git a/mcp/src/tools.ts b/mcp/src/tools.ts new file mode 100644 index 00000000..6a951b58 --- /dev/null +++ b/mcp/src/tools.ts @@ -0,0 +1,270 @@ +// ── MCP tool registration ───────────────────────────────────────────────────── +// Registers all 6 GitHub Tracker tools with the MCP server. + +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { z } from "zod"; +import { METHODS } from "../../src/shared/protocol.js"; +import type { DataSource } from "./data-source.js"; +import type { + Issue, + PullRequest, + WorkflowRun, + DashboardSummary, + RateLimitInfo, +} from "../../src/shared/types.js"; +import { isRelayConnected } from "./ws-relay.js"; + +// ── Formatting helpers ──────────────────────────────────────────────────────── + +function stalenessLine(): string { + // Relay mode has no staleness annotation (data is live from SPA dashboard). + // Octokit mode notes that data comes via the GitHub API directly. + // Ideally relay mode would show "Data as of X ago" using lastUpdatedAt, but that + // field lives in the SPA's RelaySnapshot and isn't forwarded to the MCP server. + return isRelayConnected() + ? "" + : "\n_(data via GitHub API — connect SPA for live dashboard data)_"; +} + +function formatPR(pr: PullRequest, index: number): string { + const lines: string[] = []; + const idx = `${index + 1}.`; + const draft = pr.draft ? " [DRAFT]" : ""; + const review = pr.reviewDecision ? ` [${pr.reviewDecision}]` : ""; + const checks = pr.checkStatus ? ` [checks: ${pr.checkStatus}]` : ""; + lines.push(`${idx} #${pr.number} ${pr.title}${draft}${review}${checks}`); + lines.push(` Repo: ${pr.repoFullName} | Author: ${pr.userLogin}`); + if (pr.reviewerLogins.length > 0) { + lines.push(` Reviewers: ${pr.reviewerLogins.join(", ")}`); + } + if (pr.additions || pr.deletions) { + lines.push(` Changes: +${pr.additions} / -${pr.deletions} (${pr.changedFiles} files)`); + } + lines.push(` URL: ${pr.htmlUrl}`); + lines.push(` Updated: ${new Date(pr.updatedAt).toLocaleString()}`); + return lines.join("\n"); +} + +function formatIssue(issue: Issue, index: number): string { + const lines: string[] = []; + const idx = `${index + 1}.`; + const labels = issue.labels.length > 0 ? ` [${issue.labels.map((l) => l.name).join(", ")}]` : ""; + lines.push(`${idx} #${issue.number} ${issue.title}${labels}`); + lines.push(` Repo: ${issue.repoFullName} | Author: ${issue.userLogin}`); + lines.push(` URL: ${issue.htmlUrl}`); + lines.push(` Updated: ${new Date(issue.updatedAt).toLocaleString()}`); + return lines.join("\n"); +} + +function formatRun(run: WorkflowRun, index: number): string { + const lines: string[] = []; + const idx = `${index + 1}.`; + const conclusion = run.conclusion ? ` [${run.conclusion}]` : ` [${run.status}]`; + lines.push(`${idx} ${run.name}${conclusion} — Run #${run.runNumber}`); + lines.push(` Repo: ${run.repoFullName} | Branch: ${run.headBranch} | Trigger: ${run.event}`); + lines.push(` URL: ${run.htmlUrl}`); + lines.push(` Started: ${new Date(run.runStartedAt).toLocaleString()}`); + return lines.join("\n"); +} + +function formatSummary(summary: DashboardSummary, scope: string): string { + const lines: string[] = [ + `GitHub Tracker Dashboard Summary (scope: ${scope})`, + "─".repeat(50), + `Open PRs: ${summary.openPRCount}`, + `Open Issues: ${summary.openIssueCount}`, + `Failing CI Runs: ${summary.failingRunCount}`, + `Needs Review: ${summary.needsReviewCount}`, + `Approved/Unmerged: ${summary.approvedUnmergedCount}`, + ]; + return lines.join("\n"); +} + +function formatRateLimit(rl: RateLimitInfo): string { + const resetTime = rl.resetAt instanceof Date ? rl.resetAt : new Date(rl.resetAt); + const resetIn = Math.max(0, Math.round((resetTime.getTime() - Date.now()) / 1000)); + const pct = rl.limit > 0 ? Math.round((rl.remaining / rl.limit) * 100) : 0; + return [ + "GitHub API Rate Limit", + "─".repeat(30), + `Remaining: ${rl.remaining} / ${rl.limit} (${pct}%)`, + `Resets at: ${resetTime.toLocaleString()} (in ${resetIn}s)`, + ].join("\n"); +} + +// ── Tool registration ───────────────────────────────────────────────────────── + +export function registerTools(server: McpServer, dataSource: DataSource): void { + // 1. get_dashboard_summary + server.registerTool( + METHODS.GET_DASHBOARD_SUMMARY, + { + description: "Get aggregated counts of open PRs, issues, failing CI runs, and items needing attention", + inputSchema: { + scope: z.enum(["involves_me", "all"]).default("involves_me"), + }, + }, + async (args) => { + const scope = (args as { scope?: string }).scope ?? "involves_me"; + try { + const summary = await dataSource.getDashboardSummary(scope); + const text = formatSummary(summary, scope) + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching dashboard summary: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); + + // 2. get_open_prs + server.registerTool( + METHODS.GET_OPEN_PRS, + { + description: "List open pull requests with status, review decision, and metadata", + inputSchema: { + repo: z.string().optional(), + status: z.enum(["all", "needs_review", "failing", "approved", "draft"]).default("all"), + }, + }, + async (args) => { + const { repo, status } = args as { repo?: string; status?: string }; + try { + const prs = await dataSource.getOpenPRs(repo, status); + if (prs.length === 0) { + const text = `No open pull requests found${repo ? ` in ${repo}` : ""}${status && status !== "all" ? ` with status: ${status}` : ""}.` + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } + const header = `Open Pull Requests (${prs.length})${repo ? ` — ${repo}` : ""}`; + const body = prs.map((pr, i) => formatPR(pr, i)).join("\n\n"); + const text = `${header}\n${"─".repeat(header.length)}\n\n${body}${stalenessLine()}`; + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching open PRs: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); + + // 3. get_open_issues + server.registerTool( + METHODS.GET_OPEN_ISSUES, + { + description: "List open issues across tracked repos", + inputSchema: { + repo: z.string().optional(), + }, + }, + async (args) => { + const { repo } = args as { repo?: string }; + try { + const issues = await dataSource.getOpenIssues(repo); + if (issues.length === 0) { + const text = `No open issues found${repo ? ` in ${repo}` : ""}.` + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } + const header = `Open Issues (${issues.length})${repo ? ` — ${repo}` : ""}`; + const body = issues.map((issue, i) => formatIssue(issue, i)).join("\n\n"); + const text = `${header}\n${"─".repeat(header.length)}\n\n${body}${stalenessLine()}`; + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching open issues: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); + + // 4. get_failing_actions + server.registerTool( + METHODS.GET_FAILING_ACTIONS, + { + description: "List in-progress or recently failed GitHub Actions workflow runs", + inputSchema: { + repo: z.string().optional(), + }, + }, + async (args) => { + const { repo } = args as { repo?: string }; + try { + const runs = await dataSource.getFailingActions(repo); + if (runs.length === 0) { + const text = `No failing or in-progress workflow runs found${repo ? ` in ${repo}` : ""}.` + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } + const header = `Failing/In-Progress Actions (${runs.length})${repo ? ` — ${repo}` : ""}`; + const body = runs.map((run, i) => formatRun(run, i)).join("\n\n"); + const text = `${header}\n${"─".repeat(header.length)}\n\n${body}${stalenessLine()}`; + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching workflow runs: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); + + // 5. get_pr_details + server.registerTool( + METHODS.GET_PR_DETAILS, + { + description: "Get detailed information about a specific pull request", + inputSchema: { + repo: z.string(), + number: z.number().int().positive(), + }, + }, + async (args) => { + const { repo, number } = args as { repo: string; number: number }; + try { + const pr = await dataSource.getPRDetails(repo, number); + if (!pr) { + const text = `Pull request #${number} not found in ${repo}.`; + return { content: [{ type: "text" as const, text }] }; + } + const header = `PR #${pr.number}: ${pr.title}`; + const lines = [ + header, + "─".repeat(Math.min(header.length, 80)), + `Repo: ${pr.repoFullName}`, + `Author: ${pr.userLogin}`, + `State: ${pr.state}${pr.draft ? " (draft)" : ""}`, + `Branch: ${pr.headRef} → ${pr.baseRef}`, + ]; + if (pr.reviewDecision) lines.push(`Review Decision: ${pr.reviewDecision}`); + if (pr.checkStatus) lines.push(`Checks: ${pr.checkStatus}`); + if (pr.reviewerLogins.length > 0) lines.push(`Reviewers: ${pr.reviewerLogins.join(", ")}`); + if (pr.assigneeLogins.length > 0) lines.push(`Assignees: ${pr.assigneeLogins.join(", ")}`); + if (pr.labels.length > 0) lines.push(`Labels: ${pr.labels.map((l) => l.name).join(", ")}`); + if (pr.additions || pr.deletions) { + lines.push(`Changes: +${pr.additions} / -${pr.deletions} (${pr.changedFiles} files)`); + } + lines.push(`Comments: ${pr.comments} | Review threads: ${pr.reviewThreads}`); + lines.push(`URL: ${pr.htmlUrl}`); + lines.push(`Updated: ${new Date(pr.updatedAt).toLocaleString()}`); + const text = lines.join("\n") + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching PR details: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); + + // 6. get_rate_limit + server.registerTool( + METHODS.GET_RATE_LIMIT, + { + description: "Show current GitHub API rate limit status", + inputSchema: {}, + }, + async () => { + try { + const rl = await dataSource.getRateLimit(); + const text = formatRateLimit(rl) + stalenessLine(); + return { content: [{ type: "text" as const, text }] }; + } catch (err) { + const text = `Error fetching rate limit: ${err instanceof Error ? err.message : String(err)}`; + return { content: [{ type: "text" as const, text }], isError: true }; + } + } + ); +} diff --git a/mcp/src/ws-relay.ts b/mcp/src/ws-relay.ts new file mode 100644 index 00000000..049a68ff --- /dev/null +++ b/mcp/src/ws-relay.ts @@ -0,0 +1,312 @@ +// ── WebSocket relay server ──────────────────────────────────────────────────── +// Listens on 127.0.0.1:PORT for a single WebSocket connection from the SPA. +// Uses JSON-RPC 2.0 for request/response and notification dispatch. +// +// Security controls: +// - Origin validation +// - maxPayload: 10 MiB +// - try/catch around JSON.parse + +import { WebSocketServer, WebSocket } from "ws"; +import type { IncomingMessage } from "http"; + +// ── Configuration ───────────────────────────────────────────────────────────── + +const DEFAULT_PORT = 9876; +const HEARTBEAT_INTERVAL_MS = 5000; +const HEARTBEAT_TIMEOUT_MS = 3000; +const REQUEST_TIMEOUT_MS = 10000; +const MAX_PAYLOAD_BYTES = 10 * 1024 * 1024; // 10 MiB + +// ── State ───────────────────────────────────────────────────────────────────── + +let _wss: WebSocketServer | null = null; +let _client: WebSocket | null = null; +let _isAlive = false; +let _heartbeatTimer: ReturnType | null = null; +// Store handle so stopHeartbeat() can cancel a pending pong-timeout +let _pongTimeoutTimer: ReturnType | null = null; +let _idCounter = 0; + +interface PendingRequest { + resolve: (value: unknown) => void; + reject: (reason: unknown) => void; + timer: ReturnType; +} + +const _pending = new Map(); +const _notificationHandlers = new Map void)[]>(); + +// ── Origin validation ───────────────────────────────────────────────────────── + +const ALLOWED_ORIGINS_DEFAULT = new Set([ + "http://localhost", + "https://localhost", + "http://127.0.0.1", + "https://127.0.0.1", + "https://gh.gordoncode.dev", +]); + +function buildAllowedOrigins(): Set { + const extra = process.env.MCP_RELAY_ALLOWED_ORIGINS; + if (!extra) return ALLOWED_ORIGINS_DEFAULT; + const combined = new Set(ALLOWED_ORIGINS_DEFAULT); + for (const o of extra.split(",")) { + const trimmed = o.trim(); + if (trimmed) combined.add(trimmed); + } + return combined; +} + +// Computed once at module scope — origins don't change at runtime +const ALLOWED_ORIGINS = buildAllowedOrigins(); + +function isOriginAllowed(origin: string | undefined): boolean { + // Non-browser clients (e.g. CLI tools) do not send Origin — allow them. + if (origin === undefined) return true; + + if (ALLOWED_ORIGINS.has(origin)) return true; + + // Allow any localhost/127.0.0.1 origin with any port + try { + const url = new URL(origin); + if (url.hostname === "localhost" || url.hostname === "127.0.0.1") return true; + } catch { + // Not a valid URL — reject + } + return false; +} + +// ── Heartbeat ───────────────────────────────────────────────────────────────── + +function startHeartbeat(): void { + if (_heartbeatTimer) clearInterval(_heartbeatTimer); + _heartbeatTimer = setInterval(() => { + const client = _client; + if (!client || client.readyState !== WebSocket.OPEN) return; + + if (!_isAlive) { + console.error("[mcp/ws] Client stalled (no pong received). Terminating."); + client.terminate(); + return; + } + _isAlive = false; + client.ping(); + + if (_pongTimeoutTimer !== null) clearTimeout(_pongTimeoutTimer); + _pongTimeoutTimer = setTimeout(() => { + _pongTimeoutTimer = null; + if (!_isAlive && _client === client && client.readyState === WebSocket.OPEN) { + console.error("[mcp/ws] Pong timeout. Terminating stalled client."); + client.terminate(); + } + }, HEARTBEAT_TIMEOUT_MS); + }, HEARTBEAT_INTERVAL_MS); +} + +function stopHeartbeat(): void { + if (_heartbeatTimer) { + clearInterval(_heartbeatTimer); + _heartbeatTimer = null; + } + if (_pongTimeoutTimer !== null) { + clearTimeout(_pongTimeoutTimer); + _pongTimeoutTimer = null; + } +} + +// ── Pending request cleanup ─────────────────────────────────────────────────── + +function rejectAllPending(reason: string): void { + for (const [id, pending] of _pending) { + clearTimeout(pending.timer); + pending.reject(new Error(reason)); + _pending.delete(id); + } +} + +// ── Message handling ────────────────────────────────────────────────────────── + +function handleMessage(rawData: Buffer | string): void { + let msg: Record; + try { + msg = JSON.parse(rawData.toString()) as Record; + } catch { + console.error("[mcp/ws] Received invalid JSON — ignoring."); + return; + } + + if (msg.jsonrpc !== "2.0") { + console.error("[mcp/ws] Non-JSON-RPC message — ignoring."); + return; + } + + // Response to a pending request (has id, no method) + if ("id" in msg && !("method" in msg)) { + const id = msg.id as number; + const pending = _pending.get(id); + if (!pending) return; + + clearTimeout(pending.timer); + _pending.delete(id); + + if ("error" in msg) { + pending.reject(new Error(String((msg.error as { message?: string })?.message ?? msg.error))); + } else { + pending.resolve(msg.result); + } + return; + } + + // Notification (has method, no id) + if ("method" in msg && !("id" in msg)) { + const method = msg.method as string; + const params = msg.params ?? {}; + const handlers = _notificationHandlers.get(method); + if (handlers) { + for (const h of handlers) { + try { + h(params); + } catch (err) { + console.error(`[mcp/ws] Notification handler error for ${method}:`, err instanceof Error ? err.message : String(err)); + } + } + } + return; + } + + console.error("[mcp/ws] Unrecognized message shape — ignoring."); +} + +// ── Public API ──────────────────────────────────────────────────────────────── + +export function isRelayConnected(): boolean { + return _client !== null && _client.readyState === WebSocket.OPEN; +} + +export function sendRelayRequest(method: string, params: unknown): Promise { + if (!isRelayConnected()) { + return Promise.reject(new Error("[mcp/ws] Relay not connected.")); + } + + const id = ++_idCounter; + const message = JSON.stringify({ jsonrpc: "2.0", id, method, params }); + + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + _pending.delete(id); + reject(new Error(`[mcp/ws] Request timed out: ${method} (id=${id})`)); + }, REQUEST_TIMEOUT_MS); + + _pending.set(id, { resolve, reject, timer }); + + try { + _client!.send(message); + } catch (err) { + clearTimeout(timer); + _pending.delete(id); + reject(err); + } + }); +} + +export function onNotification(method: string, handler: (params: unknown) => void): void { + const handlers = _notificationHandlers.get(method) ?? []; + handlers.push(handler); + _notificationHandlers.set(method, handlers); +} + +export function startWebSocketServer(): WebSocketServer | null { + const port = parseInt(process.env.MCP_WS_PORT ?? String(DEFAULT_PORT), 10); + + function verifyClient( + info: { origin: string; req: IncomingMessage; secure: boolean }, + callback: (res: boolean, code?: number, message?: string) => void + ): void { + const origin = info.req.headers.origin as string | undefined; + if (!isOriginAllowed(origin)) { + console.error(`[mcp/ws] Rejected connection from disallowed origin: ${origin ?? "(none)"}`); + callback(false, 403, "Origin not allowed"); + return; + } + if (_client && _client.readyState === WebSocket.OPEN) { + console.error("[mcp/ws] Rejected second connection attempt (code 4001)."); + callback(false, 4001, "Only one client allowed"); + return; + } + callback(true); + } + + try { + const wss = new WebSocketServer({ + host: "127.0.0.1", + port, + maxPayload: MAX_PAYLOAD_BYTES, + verifyClient, + }); + + wss.on("error", (err: NodeJS.ErrnoException) => { + if (err.code === "EADDRINUSE") { + console.error(`[mcp/ws] Port ${port} already in use — continuing without WebSocket relay.`); + } else { + console.error("[mcp/ws] WebSocket server error:", err.message); + } + }); + + wss.on("connection", (ws: WebSocket) => { + console.error(`[mcp/ws] SPA connected on ws://127.0.0.1:${port}`); + _client = ws; + _isAlive = true; + + ws.on("pong", () => { + _isAlive = true; + }); + + ws.on("message", (data: Buffer | string) => { + handleMessage(data); + }); + + ws.on("close", () => { + console.error("[mcp/ws] SPA disconnected."); + rejectAllPending("WebSocket relay disconnected"); + _client = null; + stopHeartbeat(); + }); + + ws.on("error", (err: Error) => { + console.error("[mcp/ws] Client WebSocket error:", err.message); + }); + + startHeartbeat(); + }); + + wss.on("listening", () => { + console.error(`[mcp/ws] WebSocket relay listening on ws://127.0.0.1:${port}`); + }); + + _wss = wss; + return wss; + } catch (err) { + console.error("[mcp/ws] Failed to create WebSocket server:", err instanceof Error ? err.message : String(err)); + return null; + } +} + +export function closeWebSocketServer(): Promise { + return new Promise((resolve) => { + stopHeartbeat(); + rejectAllPending("WebSocket relay shutting down"); + + if (_client) { + _client.terminate(); + _client = null; + } + + if (_wss) { + _wss.close(() => resolve()); + _wss = null; + } else { + resolve(); + } + }); +} diff --git a/mcp/tests/data-source.test.ts b/mcp/tests/data-source.test.ts new file mode 100644 index 00000000..e72a33d6 --- /dev/null +++ b/mcp/tests/data-source.test.ts @@ -0,0 +1,758 @@ +// ── Data source unit tests ──────────────────────────────────────────────────── +// Tests OctokitDataSource (with mocked Octokit) and CompositeDataSource +// (fallback logic between WebSocket and Octokit). + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { + OctokitDataSource, + WebSocketDataSource, + CompositeDataSource, + setCachedConfig, + clearCachedConfig, +} from "../src/data-source.js"; +import type { DataSource } from "../src/data-source.js"; + +// ── Mock ws-relay module ─────────────────────────────────────────────────────── +// isRelayConnected is used by CompositeDataSource.tryBoth() +let _mockIsConnected = false; +let _mockSendRequest: ReturnType; + +vi.mock("../src/ws-relay.js", () => ({ + get isRelayConnected() { + return () => _mockIsConnected; + }, + get sendRelayRequest() { + return (...args: unknown[]) => _mockSendRequest(...args); + }, + onNotification: vi.fn(), + startWebSocketServer: vi.fn(), + closeWebSocketServer: vi.fn().mockResolvedValue(undefined), +})); + +// ── Mock Octokit ─────────────────────────────────────────────────────────────── + +function makeMockOctokit(responses: Map = new Map()) { + return { + request: vi.fn(async (route: string, _params?: Record) => { + if (responses.has(route)) { + return { data: responses.get(route), headers: {} }; + } + throw new Error(`Unexpected request: ${route}`); + }), + }; +} + +function makeSearchResponse(items: unknown[], total_count = items.length) { + return { items, total_count }; +} + +function makeUserResponse(login = "testuser") { + return { login }; +} + +function makeRateLimitResponse(limit = 5000, remaining = 4500, reset = Math.floor(Date.now() / 1000) + 3600) { + return { rate: { limit, remaining, reset } }; +} + +function makeWorkflowRunsResponse(runs: unknown[], total_count = runs.length) { + return { workflow_runs: runs, total_count }; +} + +function makeRawRun(overrides: Record = {}) { + return { + id: 1, + name: "CI", + status: "completed", + conclusion: "failure", + event: "push", + workflow_id: 1, + head_sha: "abc123", + head_branch: "main", + run_number: 1, + html_url: "https://github.com/owner/repo/actions/runs/1", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + run_started_at: "2024-01-10T08:00:00Z", + run_attempt: 1, + display_title: "CI Build", + actor: { login: "octocat" }, + pull_requests: [], + jobs_url: "https://api.github.com/repos/owner/repo/actions/runs/1/jobs", + ...overrides, + }; +} + +function makeRawPR(overrides: Record = {}) { + return { + id: 1, + number: 1, + title: "Test PR", + state: "open", + draft: false, + html_url: "https://github.com/owner/repo/pull/1", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "octocat", avatar_url: "https://github.com/images/octocat.gif" }, + head: { sha: "abc123", ref: "feature-branch" }, + base: { ref: "main" }, + assignees: [], + requested_reviewers: [], + labels: [], + additions: 50, + deletions: 10, + changed_files: 3, + comments: 2, + review_comments: 1, + ...overrides, + }; +} + +// ── OctokitDataSource tests ──────────────────────────────────────────────────── + +describe("OctokitDataSource", () => { + beforeEach(() => { + // Reset cached config before each test + setCachedConfig({ + selectedRepos: [{ owner: "owner", name: "repo", fullName: "owner/repo" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + // Clear cached config + setCachedConfig({ selectedRepos: [], trackedUsers: [], upstreamRepos: [], monitoredRepos: [] }); + }); + + describe("getOpenPRs", () => { + it("returns PRs from search results", async () => { + const searchItem = { + id: 1, + number: 42, + title: "My Feature", + state: "open", + draft: false, + html_url: "https://github.com/owner/repo/pull/42", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "alice", avatar_url: "https://github.com/alice.png" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], + assignees: [], + pull_request: { merged_at: null }, + }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([searchItem])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs(); + + expect(prs).toHaveLength(1); + expect(prs[0].number).toBe(42); + expect(prs[0].title).toBe("My Feature"); + expect(prs[0].repoFullName).toBe("owner/repo"); + expect(prs[0].userLogin).toBe("alice"); + }); + + it("filters out non-PR items from search results", async () => { + // Item without pull_request field is an issue + const issueItem = { + id: 2, + number: 10, + title: "Issue", + state: "open", + html_url: "https://github.com/owner/repo/issues/10", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "bob", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], + assignees: [], + // no pull_request field + }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([issueItem])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs(); + + expect(prs).toHaveLength(0); + }); + + it("accepts explicit repo parameter and skips cached config", async () => { + // Clear cached config to verify explicit param works without it + setCachedConfig({ selectedRepos: [], trackedUsers: [], upstreamRepos: [], monitoredRepos: [] }); + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs("myorg/myrepo"); + + expect(prs).toEqual([]); + // The request should have been made with the explicit repo + expect(octokit.request).toHaveBeenCalledWith("GET /search/issues", expect.objectContaining({ + q: expect.stringContaining("repo:myorg/myrepo"), + })); + }); + + it("returns empty array when config has no repos and no explicit repo", async () => { + // setCachedConfig with empty selectedRepos → resolveRepos returns [] + setCachedConfig({ selectedRepos: [], trackedUsers: [], upstreamRepos: [], monitoredRepos: [] }); + const responses = new Map([["GET /user", makeUserResponse()]]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs(); + expect(prs).toEqual([]); + }); + + it("throws descriptive error when _cachedConfig is null and no explicit repo", async () => { + clearCachedConfig(); + const octokit = makeMockOctokit(new Map([["GET /user", makeUserResponse()]])); + const ds = new OctokitDataSource(octokit); + await expect(ds.getOpenPRs()).rejects.toThrow( + "No repository configuration available" + ); + }); + + it("rejects invalid repo format", async () => { + const octokit = makeMockOctokit(new Map([["GET /user", makeUserResponse()]])); + const ds = new OctokitDataSource(octokit); + + await expect(ds.getOpenPRs("invalid-repo-without-slash")).rejects.toThrow( + "Invalid repo format" + ); + }); + + it("filters by status=draft", async () => { + const draftPR = { + id: 1, + number: 1, + title: "WIP", + state: "open", + draft: true, + html_url: "https://github.com/owner/repo/pull/1", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "alice", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], + assignees: [], + pull_request: { merged_at: null }, + }; + const readyPR = { ...draftPR, id: 2, number: 2, draft: false, title: "Ready" }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([draftPR, readyPR])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs(undefined, "draft"); + + expect(prs).toHaveLength(1); + expect(prs[0].draft).toBe(true); + }); + + it("filters by status=approved", async () => { + // Note: REST search doesn't return reviewDecision, so approved filter returns empty + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([{ + id: 1, number: 1, title: "Approved", state: "open", draft: false, + html_url: "https://github.com/owner/repo/pull/1", + created_at: "2024-01-10T08:00:00Z", updated_at: "2024-01-12T14:30:00Z", + user: { login: "alice", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], assignees: [], pull_request: { merged_at: null }, + }])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + // reviewDecision is null from REST search, so "approved" filter returns empty + const prs = await ds.getOpenPRs(undefined, "approved"); + expect(prs).toHaveLength(0); + }); + + it("status=needs_review uses review-requested: qualifier and returns results", async () => { + const prItem = { + id: 1, + number: 7, + title: "Review me", + state: "open", + draft: false, + html_url: "https://github.com/owner/repo/pull/7", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "alice", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], + assignees: [], + pull_request: { merged_at: null }, + }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([prItem])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const prs = await ds.getOpenPRs(undefined, "needs_review"); + + // The search query must use review-requested: — not involves: + expect(octokit.request).toHaveBeenCalledWith("GET /search/issues", expect.objectContaining({ + q: expect.stringContaining("review-requested:testuser"), + })); + expect(octokit.request).toHaveBeenCalledWith("GET /search/issues", expect.not.objectContaining({ + q: expect.stringContaining("involves:testuser"), + })); + + // Results are returned as-is — no post-filter for needs_review + expect(prs).toHaveLength(1); + expect(prs[0].number).toBe(7); + expect(prs[0].title).toBe("Review me"); + }); + + it("status=failing returns empty (REST search lacks checkStatus data)", async () => { + const prItem = { + id: 1, + number: 3, + title: "Failing checks", + state: "open", + draft: false, + html_url: "https://github.com/owner/repo/pull/3", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "alice", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [], + assignees: [], + pull_request: { merged_at: null }, + }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([prItem])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + // checkStatus is null from REST search, so "failing" post-filter returns empty + const prs = await ds.getOpenPRs(undefined, "failing"); + expect(prs).toHaveLength(0); + }); + }); + + describe("getOpenIssues", () => { + it("returns issues from search results (excludes PRs)", async () => { + const issueItem = { + id: 3, + number: 15, + title: "Bug report", + state: "open", + html_url: "https://github.com/owner/repo/issues/15", + created_at: "2024-01-10T08:00:00Z", + updated_at: "2024-01-12T14:30:00Z", + user: { login: "carol", avatar_url: "" }, + repository_url: "https://api.github.com/repos/owner/repo", + labels: [{ name: "bug", color: "d73a4a" }], + assignees: [], + // No pull_request field — it's an issue + }; + const prItem = { + ...issueItem, + id: 4, + number: 16, + title: "A PR", + pull_request: { merged_at: null }, + }; + + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([issueItem, prItem])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const issues = await ds.getOpenIssues(); + + expect(issues).toHaveLength(1); + expect(issues[0].number).toBe(15); + expect(issues[0].title).toBe("Bug report"); + expect(issues[0].labels).toEqual([{ name: "bug", color: "d73a4a" }]); + }); + + it("passes explicit repo to search query", async () => { + const responses = new Map([ + ["GET /user", makeUserResponse()], + ["GET /search/issues", makeSearchResponse([])], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + await ds.getOpenIssues("testorg/testrepo"); + + expect(octokit.request).toHaveBeenCalledWith("GET /search/issues", expect.objectContaining({ + q: expect.stringContaining("repo:testorg/testrepo"), + })); + }); + }); + + describe("getFailingActions", () => { + it("returns in-progress and failed runs", async () => { + const failedRun = makeRawRun({ id: 1, status: "completed", conclusion: "failure" }); + const inProgressRun = makeRawRun({ id: 2, status: "in_progress", conclusion: null }); + + const requestMock = vi.fn() + .mockImplementation(async (route: string, params?: Record) => { + if (route === "GET /repos/{owner}/{repo}/actions/runs") { + const status = params?.status; + if (status === "in_progress") { + return { data: makeWorkflowRunsResponse([inProgressRun]), headers: {} }; + } else if (status === "failure") { + return { data: makeWorkflowRunsResponse([failedRun]), headers: {} }; + } + } + return { data: { items: [], total_count: 0 }, headers: {} }; + }); + + const ds = new OctokitDataSource({ request: requestMock }); + const runs = await ds.getFailingActions(); + + expect(runs.length).toBe(2); + const conclusions = runs.map((r) => r.conclusion); + expect(conclusions).toContain("failure"); + expect(conclusions).toContain(null); + }); + + it("returns empty array when config has no repos and no explicit repo", async () => { + setCachedConfig({ selectedRepos: [], trackedUsers: [], upstreamRepos: [], monitoredRepos: [] }); + const ds = new OctokitDataSource({ request: vi.fn() }); + const runs = await ds.getFailingActions(); + expect(runs).toEqual([]); + }); + }); + + describe("getPRDetails", () => { + it("returns PR details for valid PR", async () => { + const rawPR = makeRawPR({ number: 42, title: "Feature PR" }); + const responses = new Map([ + ["GET /repos/{owner}/{repo}/pulls/{pull_number}", rawPR], + ]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const pr = await ds.getPRDetails("owner/repo", 42); + + expect(pr).not.toBeNull(); + expect(pr!.number).toBe(42); + expect(pr!.title).toBe("Feature PR"); + expect(pr!.headRef).toBe("feature-branch"); + expect(pr!.baseRef).toBe("main"); + expect(pr!.additions).toBe(50); + expect(pr!.deletions).toBe(10); + expect(pr!.changedFiles).toBe(3); + expect(pr!.comments).toBe(3); // 2 issue + 1 review + expect(pr!.enriched).toBe(true); + }); + + it("returns null for 404 response", async () => { + const octokit = { + request: vi.fn().mockRejectedValue(Object.assign(new Error("Not Found"), { status: 404 })), + }; + const ds = new OctokitDataSource(octokit); + const pr = await ds.getPRDetails("owner/repo", 9999); + expect(pr).toBeNull(); + }); + + it("throws for non-404 errors", async () => { + const octokit = { + request: vi.fn().mockRejectedValue(Object.assign(new Error("Server Error"), { status: 500 })), + }; + const ds = new OctokitDataSource(octokit); + await expect(ds.getPRDetails("owner/repo", 1)).rejects.toThrow("Server Error"); + }); + + it("rejects invalid repo format", async () => { + const ds = new OctokitDataSource({ request: vi.fn() }); + await expect(ds.getPRDetails("no-slash", 1)).rejects.toThrow("Invalid repo format"); + }); + }); + + describe("getRateLimit", () => { + it("returns parsed rate limit info", async () => { + const resetEpoch = Math.floor(Date.now() / 1000) + 3600; + const responses = new Map([["GET /rate_limit", makeRateLimitResponse(5000, 4200, resetEpoch)]]); + const octokit = makeMockOctokit(responses); + const ds = new OctokitDataSource(octokit); + const rl = await ds.getRateLimit(); + + expect(rl.limit).toBe(5000); + expect(rl.remaining).toBe(4200); + expect(rl.resetAt).toBeInstanceOf(Date); + expect(rl.resetAt.getTime()).toBe(resetEpoch * 1000); + }); + }); + + describe("getDashboardSummary", () => { + it("returns zero counts when no repos are configured", async () => { + setCachedConfig({ selectedRepos: [], trackedUsers: [], upstreamRepos: [], monitoredRepos: [] }); + const octokit = makeMockOctokit(new Map([["GET /user", makeUserResponse()]])); + const ds = new OctokitDataSource(octokit); + const summary = await ds.getDashboardSummary("involves_me"); + + expect(summary.openPRCount).toBe(0); + expect(summary.openIssueCount).toBe(0); + expect(summary.failingRunCount).toBe(0); + expect(summary.needsReviewCount).toBe(0); + expect(summary.approvedUnmergedCount).toBe(0); + }); + + it("constructs involves_me query with user login", async () => { + const requestMock = vi.fn().mockImplementation(async (route: string) => { + if (route === "GET /user") return { data: { login: "testuser" }, headers: {} }; + if (route === "GET /search/issues") return { data: { items: [], total_count: 0 }, headers: {} }; + if (route === "GET /repos/{owner}/{repo}/actions/runs") return { data: { workflow_runs: [], total_count: 0 }, headers: {} }; + throw new Error(`Unexpected: ${route}`); + }); + + const ds = new OctokitDataSource({ request: requestMock }); + await ds.getDashboardSummary("involves_me"); + + const searchCalls = requestMock.mock.calls.filter( + ([route]: [string]) => route === "GET /search/issues" + ); + const prCall = searchCalls.find(([, params]: [string, Record]) => + typeof params?.q === "string" && (params.q as string).includes("is:pr") + ); + expect(prCall).toBeDefined(); + expect(prCall![1].q).toContain("involves:testuser"); + }); + + it("constructs all-scope query without involves filter", async () => { + const requestMock = vi.fn().mockImplementation(async (route: string) => { + if (route === "GET /user") return { data: { login: "testuser" }, headers: {} }; + if (route === "GET /search/issues") return { data: { items: [], total_count: 0 }, headers: {} }; + if (route === "GET /repos/{owner}/{repo}/actions/runs") return { data: { workflow_runs: [], total_count: 0 }, headers: {} }; + throw new Error(`Unexpected: ${route}`); + }); + + const ds = new OctokitDataSource({ request: requestMock }); + await ds.getDashboardSummary("all"); + + const searchCalls = requestMock.mock.calls.filter( + ([route]: [string]) => route === "GET /search/issues" + ); + const prCall = searchCalls.find(([, params]: [string, Record]) => + typeof params?.q === "string" && (params.q as string).includes("is:pr") && + !(params.q as string).includes("review-requested") + ); + expect(prCall).toBeDefined(); + expect(prCall![1].q).not.toContain("involves:"); + }); + }); + + describe("getConfig", () => { + it("returns the cached config", async () => { + const config = { + selectedRepos: [{ owner: "owner", name: "repo", fullName: "owner/repo" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }; + setCachedConfig(config); + const ds = new OctokitDataSource({ request: vi.fn() }); + const result = await ds.getConfig(); + expect(result).toEqual(config); + }); + + it("returns null when no config is set", async () => { + clearCachedConfig(); + const ds = new OctokitDataSource({ request: vi.fn() }); + const result = await ds.getConfig(); + expect(result).toBeNull(); + }); + }); +}); + +// ── WebSocketDataSource tests ────────────────────────────────────────────────── + +describe("WebSocketDataSource", () => { + beforeEach(() => { + _mockIsConnected = true; + _mockSendRequest = vi.fn(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + _mockIsConnected = false; + }); + + describe("getRateLimit", () => { + it("unwraps nested { core: { limit, remaining, resetAt } } shape", async () => { + const resetAt = new Date("2024-01-12T15:00:00Z").toISOString(); + _mockSendRequest = vi.fn().mockResolvedValue({ + core: { limit: 5000, remaining: 3200, resetAt }, + graphql: { limit: 1000, remaining: 950, resetAt }, + }); + + const ds = new WebSocketDataSource(); + const rl = await ds.getRateLimit(); + + expect(rl.limit).toBe(5000); + expect(rl.remaining).toBe(3200); + expect(rl.resetAt).toBeInstanceOf(Date); + expect(rl.resetAt.toISOString()).toBe(resetAt); + }); + + it("falls back to flat { limit, remaining, resetAt } shape when core is absent", async () => { + const resetAt = new Date("2024-01-12T16:00:00Z").toISOString(); + _mockSendRequest = vi.fn().mockResolvedValue({ + limit: 5000, + remaining: 4800, + resetAt, + }); + + const ds = new WebSocketDataSource(); + const rl = await ds.getRateLimit(); + + expect(rl.limit).toBe(5000); + expect(rl.remaining).toBe(4800); + expect(rl.resetAt).toBeInstanceOf(Date); + expect(rl.resetAt.toISOString()).toBe(resetAt); + }); + }); +}); + +// ── CompositeDataSource tests ────────────────────────────────────────────────── + +describe("CompositeDataSource", () => { + beforeEach(() => { + _mockIsConnected = false; + _mockSendRequest = vi.fn(); + setCachedConfig({ + selectedRepos: [{ owner: "owner", name: "repo", fullName: "owner/repo" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + _mockIsConnected = false; + }); + + function makeOctokitDs(overrides: Partial = {}): DataSource { + return { + getDashboardSummary: vi.fn().mockResolvedValue({ + openPRCount: 1, openIssueCount: 1, failingRunCount: 0, needsReviewCount: 0, approvedUnmergedCount: 0, + }), + getOpenPRs: vi.fn().mockResolvedValue([]), + getOpenIssues: vi.fn().mockResolvedValue([]), + getFailingActions: vi.fn().mockResolvedValue([]), + getPRDetails: vi.fn().mockResolvedValue(null), + getRateLimit: vi.fn().mockResolvedValue({ limit: 5000, remaining: 5000, resetAt: new Date() }), + getConfig: vi.fn().mockResolvedValue(null), + getRepos: vi.fn().mockResolvedValue([]), + ...overrides, + }; + } + + it("uses Octokit when relay is disconnected", async () => { + _mockIsConnected = false; + const octokitDs = makeOctokitDs(); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + const result = await composite.getOpenPRs(); + expect(octokitDs.getOpenPRs).toHaveBeenCalled(); + expect(result).toEqual([]); + }); + + it("uses relay when connected and relay succeeds", async () => { + _mockIsConnected = true; + _mockSendRequest = vi.fn().mockResolvedValue([{ id: 999, number: 1, title: "Relay PR" }]); + + const octokitDs = makeOctokitDs(); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + const result = await composite.getOpenPRs(); + expect(_mockSendRequest).toHaveBeenCalled(); + expect(octokitDs.getOpenPRs).not.toHaveBeenCalled(); + expect(result).toHaveLength(1); + expect((result[0] as { id: number }).id).toBe(999); + }); + + it("falls back to Octokit when relay is connected but request fails", async () => { + _mockIsConnected = true; + _mockSendRequest = vi.fn().mockRejectedValue(new Error("relay timeout")); + + const fallbackPRs = [{ id: 1, number: 1, title: "Octokit PR" }]; + const octokitDs = makeOctokitDs({ + getOpenPRs: vi.fn().mockResolvedValue(fallbackPRs), + }); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + const result = await composite.getOpenPRs(); + expect(_mockSendRequest).toHaveBeenCalled(); + expect(octokitDs.getOpenPRs).toHaveBeenCalled(); + expect(result).toEqual(fallbackPRs); + }); + + it("falls back to Octokit for getDashboardSummary when relay fails", async () => { + _mockIsConnected = true; + _mockSendRequest = vi.fn().mockRejectedValue(new Error("relay down")); + + const expectedSummary = { + openPRCount: 5, openIssueCount: 3, failingRunCount: 1, needsReviewCount: 2, approvedUnmergedCount: 0, + }; + const octokitDs = makeOctokitDs({ + getDashboardSummary: vi.fn().mockResolvedValue(expectedSummary), + }); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + const result = await composite.getDashboardSummary("involves_me"); + expect(result).toEqual(expectedSummary); + expect(octokitDs.getDashboardSummary).toHaveBeenCalled(); + }); + + it("throws when both relay fails and Octokit throws", async () => { + _mockIsConnected = true; + _mockSendRequest = vi.fn().mockRejectedValue(new Error("relay down")); + + const octokitDs = makeOctokitDs({ + getOpenPRs: vi.fn().mockRejectedValue(new Error("No GITHUB_TOKEN")), + }); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + await expect(composite.getOpenPRs()).rejects.toThrow("No GITHUB_TOKEN"); + }); + + it("uses Octokit directly for all methods when relay is disconnected", async () => { + _mockIsConnected = false; + + const octokitDs = makeOctokitDs(); + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, octokitDs); + + await composite.getOpenIssues(); + await composite.getFailingActions(); + await composite.getRateLimit(); + + expect(octokitDs.getOpenIssues).toHaveBeenCalled(); + expect(octokitDs.getFailingActions).toHaveBeenCalled(); + expect(octokitDs.getRateLimit).toHaveBeenCalled(); + expect(_mockSendRequest).not.toHaveBeenCalled(); + }); +}); diff --git a/mcp/tests/integration.test.ts b/mcp/tests/integration.test.ts new file mode 100644 index 00000000..7c0c7254 --- /dev/null +++ b/mcp/tests/integration.test.ts @@ -0,0 +1,492 @@ +// ── Integration tests ──────────────────────────────────────────────────────── +// Task 6: Tests that exercise real components together (not mocked). +// Covers: WebSocket relay data flow, fallback mode, edge cases. + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { WebSocket, WebSocketServer } from "ws"; +import { + startWebSocketServer, + closeWebSocketServer, + isRelayConnected, + sendRelayRequest, + onNotification, +} from "../src/ws-relay.js"; +import { + WebSocketDataSource, + CompositeDataSource, + OctokitDataSource, + setCachedConfig, +} from "../src/data-source.js"; +import type { DataSource } from "../src/data-source.js"; +import { METHODS, NOTIFICATIONS } from "../../src/shared/protocol.js"; +import { makeIssue, makePullRequest, makeWorkflowRun } from "../../tests/helpers/factories.js"; + +// ── Helpers ──────────────────────────────────────────────────────────────────── + +function waitForEvent( + emitter: { once: (event: string, cb: (...args: unknown[]) => void) => void }, + event: string, + timeout = 3000 +): Promise { + return new Promise((resolve, reject) => { + const t = setTimeout(() => reject(new Error(`Timeout waiting for '${event}'`)), timeout); + emitter.once(event, (...args: unknown[]) => { clearTimeout(t); resolve(args); }); + }); +} + +function waitForListening(wss: WebSocketServer): Promise { + return new Promise((resolve, reject) => { + const addr = wss.address(); + if (addr && typeof addr === "object") return resolve(addr.port); + const t = setTimeout(() => reject(new Error("Timeout waiting for listening")), 3000); + wss.on("listening", () => { + clearTimeout(t); + const a = wss.address(); + if (a && typeof a === "object") resolve(a.port); + else reject(new Error("Server has no address after listening")); + }); + wss.on("error", (e) => { clearTimeout(t); reject(e); }); + }); +} + +function waitForOpen(ws: WebSocket): Promise { + return new Promise((resolve, reject) => { + if (ws.readyState === WebSocket.OPEN) return resolve(); + const t = setTimeout(() => reject(new Error("Timeout waiting for open")), 3000); + ws.on("open", () => { clearTimeout(t); resolve(); }); + ws.on("error", (e) => { clearTimeout(t); reject(e); }); + }); +} + +function waitForClose(ws: WebSocket, timeout = 3000): Promise { + return new Promise((resolve, reject) => { + if (ws.readyState === WebSocket.CLOSED) return resolve(); + const t = setTimeout(() => reject(new Error("Timeout waiting for close")), timeout); + ws.once("close", () => { clearTimeout(t); resolve(); }); + }); +} + +function waitForMessage(ws: WebSocket, timeout = 3000): Promise { + return new Promise((resolve, reject) => { + const t = setTimeout(() => reject(new Error("Timeout waiting for message")), timeout); + ws.once("message", (data) => { clearTimeout(t); resolve(data.toString()); }); + }); +} + +async function waitForCondition( + condition: () => boolean, + timeout = 2000, + interval = 10 +): Promise { + const deadline = Date.now() + timeout; + while (!condition()) { + if (Date.now() > deadline) throw new Error("Timeout waiting for condition"); + await new Promise((r) => setTimeout(r, interval)); + } +} + +/** + * Round-trip sentinel: sends a relay request from the server to the connected + * client and waits for the client to respond. By the time this resolves, the + * server has processed every message that arrived before the sentinel. + */ +async function roundTripSentinel(ws: WebSocket): Promise { + const sentinelPromise = sendRelayRequest("__sentinel__", {}); + const raw = await waitForMessage(ws); + const msg = JSON.parse(raw) as { id: number }; + ws.send(JSON.stringify({ jsonrpc: "2.0", id: msg.id, result: null })); + await sentinelPromise; +} + +// ── Test setup ────────────────────────────────────────────────────────────────── + +describe("Integration: WebSocket relay data flow", () => { + let wss: WebSocketServer | null; + let client: WebSocket | null = null; + + beforeEach(() => { + vi.stubEnv("MCP_WS_PORT", "0"); + wss = startWebSocketServer(); + }); + + afterEach(async () => { + if (client && client.readyState !== WebSocket.CLOSED) { + client.close(); + client = null; + } + await closeWebSocketServer(); + vi.unstubAllEnvs(); + }); + + it("sends JSON-RPC request to connected client and receives response", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + + // Client acts as SPA — responds to JSON-RPC requests + client.on("message", (data) => { + const req = JSON.parse(data.toString()); + if (req.method === METHODS.GET_RATE_LIMIT) { + client!.send(JSON.stringify({ + jsonrpc: "2.0", + id: req.id, + result: { + core: { limit: 5000, remaining: 4999, resetAt: "2026-04-07T20:00:00Z" }, + graphql: { limit: 5000, remaining: 4998, resetAt: "2026-04-07T20:00:00Z" }, + }, + })); + } + }); + + // Wait for the server's connection handler to register the client + await waitForCondition(() => isRelayConnected()); + expect(isRelayConnected()).toBe(true); + + // Send a request through the relay + const result = await sendRelayRequest(METHODS.GET_RATE_LIMIT, {}); + expect(result).toBeDefined(); + expect((result as { core: { limit: number } }).core.limit).toBe(5000); + }); + + it("receives config_update notifications from client", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + + const received = vi.fn(); + const notificationReceived = new Promise((resolve) => { + onNotification(NOTIFICATIONS.CONFIG_UPDATE, (params) => { + received(params); + resolve(); + }); + }); + + // Client sends a config_update notification (no id = notification) + client.send(JSON.stringify({ + jsonrpc: "2.0", + method: NOTIFICATIONS.CONFIG_UPDATE, + params: { + selectedRepos: [{ owner: "test", name: "repo", fullName: "test/repo" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }, + })); + + // Wait for the notification handler to be invoked + await notificationReceived; + expect(received).toHaveBeenCalledOnce(); + const params = received.mock.calls[0][0] as { selectedRepos: unknown[] }; + expect(params.selectedRepos).toHaveLength(1); + }); + + it("handles client disconnect gracefully — pending requests reject", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + await waitForCondition(() => isRelayConnected()); + + // Send a request but don't respond — then disconnect + const promise = sendRelayRequest(METHODS.GET_OPEN_PRS, {}); + client.close(); + + await expect(promise).rejects.toThrow(); + expect(isRelayConnected()).toBe(false); + }); + + // Note: request timeout (10s) is tested in ws-relay.test.ts with proper mocking. + // Mixing vi.useFakeTimers with real WebSocket connections + heartbeat timers + // causes stale timer interference — not suitable for integration tests. +}); + +describe("Integration: WebSocketDataSource through relay", () => { + let wss: WebSocketServer | null; + let client: WebSocket | null = null; + let wsDs: WebSocketDataSource; + + beforeEach(() => { + vi.stubEnv("MCP_WS_PORT", "0"); + wss = startWebSocketServer(); + wsDs = new WebSocketDataSource(); + }); + + afterEach(async () => { + if (client && client.readyState !== WebSocket.CLOSED) { + client.close(); + client = null; + } + await closeWebSocketServer(); + vi.unstubAllEnvs(); + }); + + it("getOpenPRs returns PRs from relay client", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + + const mockPRs = [makePullRequest({ title: "Fix auth bug", repoFullName: "acme/app" })]; + + client.on("message", (data) => { + const req = JSON.parse(data.toString()); + if (req.method === METHODS.GET_OPEN_PRS) { + client!.send(JSON.stringify({ + jsonrpc: "2.0", + id: req.id, + result: mockPRs, + })); + } + }); + + await waitForCondition(() => isRelayConnected()); + const prs = await wsDs.getOpenPRs(); + expect(prs).toHaveLength(1); + expect(prs[0].title).toBe("Fix auth bug"); + }); + + it("getConfig returns config pushed via notification", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + + client.on("message", (data) => { + const req = JSON.parse(data.toString()); + if (req.method === METHODS.GET_CONFIG) { + client!.send(JSON.stringify({ + jsonrpc: "2.0", + id: req.id, + result: { + selectedRepos: [{ owner: "acme", name: "app", fullName: "acme/app" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }, + })); + } + }); + + await waitForCondition(() => isRelayConnected()); + const config = await wsDs.getConfig(); + expect(config).toBeDefined(); + expect((config as { selectedRepos: unknown[] }).selectedRepos).toHaveLength(1); + }); +}); + +describe("Integration: CompositeDataSource fallback", () => { + let wss: WebSocketServer | null; + + beforeEach(() => { + vi.stubEnv("MCP_WS_PORT", "0"); + wss = startWebSocketServer(); + }); + + afterEach(async () => { + await closeWebSocketServer(); + vi.unstubAllEnvs(); + }); + + it("falls back to Octokit when relay is disconnected", async () => { + // No client connected — relay is disconnected + expect(isRelayConnected()).toBe(false); + + const mockFallback: DataSource = { + getDashboardSummary: vi.fn().mockResolvedValue({ + openPRCount: 5, openIssueCount: 3, failingRunCount: 1, + needsReviewCount: 2, approvedUnmergedCount: 1, + }), + getOpenPRs: vi.fn().mockResolvedValue([]), + getOpenIssues: vi.fn().mockResolvedValue([]), + getFailingActions: vi.fn().mockResolvedValue([]), + getPRDetails: vi.fn().mockResolvedValue(null), + getRateLimit: vi.fn().mockResolvedValue({ limit: 5000, remaining: 4999, resetAt: new Date() }), + getConfig: vi.fn().mockResolvedValue(null), + getRepos: vi.fn().mockResolvedValue([]), + }; + + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, mockFallback); + + const summary = await composite.getDashboardSummary("involves_me"); + expect(summary.openPRCount).toBe(5); + expect(mockFallback.getDashboardSummary).toHaveBeenCalledOnce(); + }); + + it("uses relay when connected, skipping Octokit", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + const client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + + client.on("message", (data) => { + const req = JSON.parse(data.toString()); + if (req.method === METHODS.GET_OPEN_ISSUES) { + client.send(JSON.stringify({ + jsonrpc: "2.0", + id: req.id, + result: [makeIssue({ title: "Relay issue" })], + })); + } + }); + + await waitForCondition(() => isRelayConnected()); + + const mockFallback: DataSource = { + getDashboardSummary: vi.fn(), + getOpenPRs: vi.fn(), + getOpenIssues: vi.fn().mockResolvedValue([makeIssue({ title: "Fallback issue" })]), + getFailingActions: vi.fn(), + getPRDetails: vi.fn(), + getRateLimit: vi.fn(), + getConfig: vi.fn(), + getRepos: vi.fn(), + }; + + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, mockFallback); + + const issues = await composite.getOpenIssues(); + expect(issues[0].title).toBe("Relay issue"); + // Fallback should NOT have been called + expect(mockFallback.getOpenIssues).not.toHaveBeenCalled(); + + client.close(); + }); +}); + +describe("Integration: Edge cases (no server)", () => { + it("no GITHUB_TOKEN + no relay → tools return clear error", async () => { + const unavailable: DataSource = { + getDashboardSummary: () => Promise.reject(new Error( + "No GITHUB_TOKEN set and SPA relay is not connected." + )), + getOpenPRs: () => Promise.reject(new Error("No GITHUB_TOKEN")), + getOpenIssues: () => Promise.reject(new Error("No GITHUB_TOKEN")), + getFailingActions: () => Promise.reject(new Error("No GITHUB_TOKEN")), + getPRDetails: () => Promise.reject(new Error("No GITHUB_TOKEN")), + getRateLimit: () => Promise.reject(new Error("No GITHUB_TOKEN")), + getConfig: () => Promise.resolve(null), + getRepos: () => Promise.resolve([]), + }; + + const wsDs = new WebSocketDataSource(); + const composite = new CompositeDataSource(wsDs, unavailable); + + await expect(composite.getDashboardSummary("involves_me")).rejects.toThrow(/GITHUB_TOKEN/); + const config = await composite.getConfig(); + expect(config).toBeNull(); + const repos = await composite.getRepos(); + expect(repos).toEqual([]); + }); +}); + +describe("Integration: Edge cases (with server)", () => { + let wss: WebSocketServer | null; + + beforeEach(() => { + vi.stubEnv("MCP_WS_PORT", "0"); + wss = startWebSocketServer(); + }); + + afterEach(async () => { + await closeWebSocketServer(); + vi.unstubAllEnvs(); + }); + + it("second client rejected", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + + const client1 = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client1); + await waitForCondition(() => isRelayConnected()); + expect(isRelayConnected()).toBe(true); + + const client2 = new WebSocket(`ws://127.0.0.1:${port}`); + await new Promise((resolve) => { + client2.on("close", () => resolve()); + client2.on("error", () => resolve()); + }); + + expect(client2.readyState).not.toBe(WebSocket.OPEN); + client1.close(); + }); + + it("malformed JSON message does not crash server", async () => { + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + + const client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + await waitForCondition(() => isRelayConnected()); + + client.send("not valid json {{{"); + client.send(""); + client.send(JSON.stringify({ foo: "bar" })); + + // Round-trip sentinel: proves server processed all malformed messages and is still alive + await roundTripSentinel(client); + expect(isRelayConnected()).toBe(true); + client.close(); + }); + + it("config cache persists across relay disconnects", async () => { + setCachedConfig({ + selectedRepos: [{ owner: "acme", name: "app", fullName: "acme/app" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + }); + + if (!wss) throw new Error("Server not started"); + const port = await waitForListening(wss); + + const client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + client.close(); + await waitForClose(client); + // Wait for the server's close handler to clear the connection state + await waitForCondition(() => !isRelayConnected()); + + const wsDs = new WebSocketDataSource(); + const mockFallback: DataSource = { + getDashboardSummary: vi.fn(), + getOpenPRs: vi.fn(), + getOpenIssues: vi.fn(), + getFailingActions: vi.fn(), + getPRDetails: vi.fn(), + getRateLimit: vi.fn(), + getConfig: vi.fn().mockResolvedValue({ + selectedRepos: [{ owner: "acme", name: "app", fullName: "acme/app" }], + trackedUsers: [], upstreamRepos: [], monitoredRepos: [], + }), + getRepos: vi.fn().mockResolvedValue([{ owner: "acme", name: "app", fullName: "acme/app" }]), + }; + + const composite = new CompositeDataSource(wsDs, mockFallback); + const repos = await composite.getRepos(); + expect(repos).toHaveLength(1); + expect(repos[0].fullName).toBe("acme/app"); + }); +}); + +describe("Integration: Port conflict", () => { + it("EADDRINUSE — server starts without WebSocket", async () => { + const blocker = new WebSocketServer({ host: "127.0.0.1", port: 0 }); + const port = await waitForListening(blocker); + + vi.stubEnv("MCP_WS_PORT", String(port)); + const wss = startWebSocketServer(); + + // The EADDRINUSE error fires asynchronously; wait for the error event on wss + if (wss) await waitForEvent(wss, "error"); + expect(isRelayConnected()).toBe(false); + + await closeWebSocketServer(); + await new Promise((resolve) => blocker.close(() => resolve())); + vi.unstubAllEnvs(); + }); +}); diff --git a/mcp/tests/octokit.test.ts b/mcp/tests/octokit.test.ts new file mode 100644 index 00000000..6f6a0fae --- /dev/null +++ b/mcp/tests/octokit.test.ts @@ -0,0 +1,189 @@ +// ── octokit.ts unit tests ────────────────────────────────────────────────────── +// Tests createOctokitClient (write guard), getOptionalOctokitClient (singleton), +// getOctokitClient (throws without token), and validateTokenScopes. + +import { describe, it, expect, vi, afterEach } from "vitest"; + +// ── Write guard tests ───────────────────────────────────────────────────────── + +describe("createOctokitClient — write guard hook", () => { + it("allows GET requests through", async () => { + const { createOctokitClient } = await import("../src/octokit.js"); + const client = createOctokitClient("fake-token"); + + let capturedMethod: string | undefined; + client.hook.wrap("request", async (_request, options) => { + capturedMethod = (options.method ?? "GET").toUpperCase(); + return { data: {}, headers: {}, status: 200, url: String(options.url) }; + }); + + await client.request("GET /user"); + expect(capturedMethod).toBe("GET"); + }); + + it("allows POST /graphql through", async () => { + const { createOctokitClient } = await import("../src/octokit.js"); + const client = createOctokitClient("fake-token"); + + let capturedUrl: string | undefined; + client.hook.wrap("request", async (_request, options) => { + capturedUrl = String(options.url); + return { data: {}, headers: {}, status: 200, url: String(options.url) }; + }); + + await client.request("POST /graphql", { query: "{ viewer { login } }" }); + expect(capturedUrl).toBe("/graphql"); + }); + + it("blocks PUT requests", async () => { + const { createOctokitClient } = await import("../src/octokit.js"); + const client = createOctokitClient("fake-token"); + // No hook.wrap — before hook fires and throws before any network call + await expect( + client.request("PUT /repos/{owner}/{repo}/contents/{path}" as Parameters[0], { + owner: "o", repo: "r", path: "f.txt", message: "u", content: "dA==", sha: "abc", + }) + ).rejects.toThrow("Write operation blocked"); + }); + + it("blocks DELETE requests", async () => { + const { createOctokitClient } = await import("../src/octokit.js"); + const client = createOctokitClient("fake-token"); + await expect( + client.request("DELETE /repos/{owner}/{repo}" as Parameters[0], { + owner: "o", repo: "r", + }) + ).rejects.toThrow("Write operation blocked"); + }); +}); + +// ── Singleton tests ─────────────────────────────────────────────────────────── + +describe("getOptionalOctokitClient", () => { + afterEach(() => { + vi.unstubAllEnvs(); + vi.resetModules(); + }); + + it("returns null when GITHUB_TOKEN is not set", async () => { + delete process.env.GITHUB_TOKEN; + vi.resetModules(); + const { getOptionalOctokitClient } = await import("../src/octokit.js"); + expect(getOptionalOctokitClient()).toBeNull(); + }); + + it("returns an Octokit instance when GITHUB_TOKEN is set", async () => { + vi.stubEnv("GITHUB_TOKEN", "ghp_testtoken"); + vi.resetModules(); + const { getOptionalOctokitClient } = await import("../src/octokit.js"); + const client = getOptionalOctokitClient(); + expect(client).not.toBeNull(); + expect(typeof client?.request).toBe("function"); + }); + + it("returns the same singleton on repeated calls", async () => { + vi.stubEnv("GITHUB_TOKEN", "ghp_testtoken"); + vi.resetModules(); + const { getOptionalOctokitClient } = await import("../src/octokit.js"); + expect(getOptionalOctokitClient()).toBe(getOptionalOctokitClient()); + }); +}); + +describe("getOctokitClient", () => { + afterEach(() => { + vi.unstubAllEnvs(); + vi.resetModules(); + }); + + it("throws when GITHUB_TOKEN is not set", async () => { + delete process.env.GITHUB_TOKEN; + vi.resetModules(); + const { getOctokitClient } = await import("../src/octokit.js"); + expect(() => getOctokitClient()).toThrow("GITHUB_TOKEN"); + }); +}); + +// ── validateTokenScopes ─────────────────────────────────────────────────────── + +describe("validateTokenScopes", () => { + afterEach(() => { + vi.restoreAllMocks(); + vi.unstubAllEnvs(); + vi.resetModules(); + }); + + it("returns false when GITHUB_TOKEN is not set", async () => { + delete process.env.GITHUB_TOKEN; + vi.resetModules(); + const spy = vi.spyOn(console, "error").mockImplementation(() => {}); + const { validateTokenScopes } = await import("../src/octokit.js"); + expect(await validateTokenScopes()).toBe(false); + expect(spy).toHaveBeenCalledWith(expect.stringContaining("No GITHUB_TOKEN set")); + }); + + it("detects fine-grained PAT (no x-oauth-scopes header)", async () => { + vi.stubEnv("GITHUB_TOKEN", "github_pat_fake"); + vi.resetModules(); + const spy = vi.spyOn(console, "error").mockImplementation(() => {}); + const { getOptionalOctokitClient, validateTokenScopes } = await import("../src/octokit.js"); + const client = getOptionalOctokitClient()!; + vi.spyOn(client, "request").mockResolvedValue({ + data: { login: "testuser" }, + headers: {}, + status: 200, + url: "", + } as never); + + expect(await validateTokenScopes()).toBe(true); + expect(spy).toHaveBeenCalledWith(expect.stringContaining("fine-grained PAT")); + }); + + it("validates classic PAT with all required scopes", async () => { + vi.stubEnv("GITHUB_TOKEN", "ghp_classic"); + vi.resetModules(); + const spy = vi.spyOn(console, "error").mockImplementation(() => {}); + const { getOptionalOctokitClient, validateTokenScopes } = await import("../src/octokit.js"); + const client = getOptionalOctokitClient()!; + vi.spyOn(client, "request").mockResolvedValue({ + data: { login: "octocat" }, + headers: { "x-oauth-scopes": "repo, read:org" }, + status: 200, + url: "", + } as never); + + expect(await validateTokenScopes()).toBe(true); + expect(spy).toHaveBeenCalledWith(expect.stringContaining("Token validated")); + }); + + it("warns when required scopes are missing", async () => { + vi.stubEnv("GITHUB_TOKEN", "ghp_limited"); + vi.resetModules(); + const spy = vi.spyOn(console, "error").mockImplementation(() => {}); + const { getOptionalOctokitClient, validateTokenScopes } = await import("../src/octokit.js"); + const client = getOptionalOctokitClient()!; + vi.spyOn(client, "request").mockResolvedValue({ + data: { login: "partial" }, + headers: { "x-oauth-scopes": "read:user" }, + status: 200, + url: "", + } as never); + + expect(await validateTokenScopes()).toBe(true); + expect(spy).toHaveBeenCalledWith(expect.stringContaining("missing required scopes")); + }); + + it("returns false when request throws", async () => { + vi.stubEnv("GITHUB_TOKEN", "ghp_bad"); + vi.resetModules(); + const spy = vi.spyOn(console, "error").mockImplementation(() => {}); + const { getOptionalOctokitClient, validateTokenScopes } = await import("../src/octokit.js"); + const client = getOptionalOctokitClient()!; + vi.spyOn(client, "request").mockRejectedValue(new Error("401 Unauthorized")); + + expect(await validateTokenScopes()).toBe(false); + expect(spy).toHaveBeenCalledWith( + expect.stringContaining("Token validation failed"), + expect.stringContaining("401 Unauthorized") + ); + }); +}); diff --git a/mcp/tests/resources.test.ts b/mcp/tests/resources.test.ts new file mode 100644 index 00000000..f8d36095 --- /dev/null +++ b/mcp/tests/resources.test.ts @@ -0,0 +1,229 @@ +// ── MCP resources.ts unit tests ─────────────────────────────────────────────── +// Tests each of the 2 resources using a mock DataSource. Resources are tested +// by calling the registered readCallback directly via server._registeredResources. + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { registerResources } from "../src/resources.js"; +import type { DataSource } from "../src/data-source.js"; +import type { CachedConfig } from "../src/data-source.js"; +import type { RepoRef } from "../../src/shared/types.js"; + +// ── Mock DataSource ──────────────────────────────────────────────────────────── + +function makeMockDataSource(overrides: Partial = {}): DataSource { + return { + getDashboardSummary: vi.fn().mockResolvedValue(null), + getOpenPRs: vi.fn().mockResolvedValue([]), + getOpenIssues: vi.fn().mockResolvedValue([]), + getFailingActions: vi.fn().mockResolvedValue([]), + getPRDetails: vi.fn().mockResolvedValue(null), + getRateLimit: vi.fn().mockResolvedValue(null), + getConfig: vi.fn().mockResolvedValue(null), + getRepos: vi.fn().mockResolvedValue([]), + ...overrides, + }; +} + +// ── Helper: call a registered resource readCallback directly ────────────────── + +type ResourceRegistry = Record< + string, + { readCallback: (uri: URL, extra: Record) => Promise } +>; + +async function callResource( + server: McpServer, + uri: string +): Promise<{ contents: { uri: string; mimeType: string; text: string }[] }> { + const resources = (server as unknown as { _registeredResources: ResourceRegistry }) + ._registeredResources; + const resource = resources[uri]; + if (!resource) throw new Error(`Resource not found: ${uri}`); + return resource.readCallback(new URL(uri), {}) as Promise<{ + contents: { uri: string; mimeType: string; text: string }[]; + }>; +} + +// ── Sample fixtures ─────────────────────────────────────────────────────────── + +function makeRepoRef(fullName: string): RepoRef { + const [owner, name] = fullName.split("/"); + return { owner, name, fullName }; +} + +function makeConfig(overrides: Partial = {}): CachedConfig { + return { + selectedRepos: [], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], + ...overrides, + }; +} + +// ── Tests: tracker://config ─────────────────────────────────────────────────── + +describe("tracker://config", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerResources(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns JSON config when config is available", async () => { + const config = makeConfig({ + selectedRepos: [makeRepoRef("owner/repo")], + }); + vi.mocked(ds.getConfig).mockResolvedValueOnce(config); + + const result = await callResource(server, "tracker://config"); + + expect(result.contents).toHaveLength(1); + const content = result.contents[0]; + expect(content.uri).toBe("tracker://config"); + expect(content.mimeType).toBe("application/json"); + + const parsed = JSON.parse(content.text); + expect(parsed.selectedRepos).toHaveLength(1); + expect(parsed.selectedRepos[0].fullName).toBe("owner/repo"); + }); + + it("returns placeholder when config is null", async () => { + vi.mocked(ds.getConfig).mockResolvedValueOnce(null); + + const result = await callResource(server, "tracker://config"); + + expect(result.contents).toHaveLength(1); + const content = result.contents[0]; + expect(content.uri).toBe("tracker://config"); + expect(content.mimeType).toBe("application/json"); + + const parsed = JSON.parse(content.text); + expect(parsed.status).toContain("No configuration available"); + }); + + it("calls getConfig on the data source", async () => { + await callResource(server, "tracker://config"); + expect(ds.getConfig).toHaveBeenCalledOnce(); + }); + + it("returns valid JSON in both cases", async () => { + // null case + const nullResult = await callResource(server, "tracker://config"); + expect(() => JSON.parse(nullResult.contents[0].text)).not.toThrow(); + + // config case + vi.mocked(ds.getConfig).mockResolvedValueOnce(makeConfig()); + const configResult = await callResource(server, "tracker://config"); + expect(() => JSON.parse(configResult.contents[0].text)).not.toThrow(); + }); + + it("serializes config with all fields", async () => { + const config = makeConfig({ + selectedRepos: [makeRepoRef("org/app")], + upstreamRepos: [makeRepoRef("upstream/lib")], + trackedUsers: [], + monitoredRepos: [], + }); + vi.mocked(ds.getConfig).mockResolvedValueOnce(config); + + const result = await callResource(server, "tracker://config"); + const parsed = JSON.parse(result.contents[0].text); + + expect(parsed).toHaveProperty("selectedRepos"); + expect(parsed).toHaveProperty("upstreamRepos"); + expect(parsed).toHaveProperty("trackedUsers"); + expect(parsed).toHaveProperty("monitoredRepos"); + expect(parsed.upstreamRepos[0].fullName).toBe("upstream/lib"); + }); +}); + +// ── Tests: tracker://repos ──────────────────────────────────────────────────── + +describe("tracker://repos", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerResources(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns empty list with count 0 when no repos", async () => { + vi.mocked(ds.getRepos).mockResolvedValueOnce([]); + + const result = await callResource(server, "tracker://repos"); + + expect(result.contents).toHaveLength(1); + const content = result.contents[0]; + expect(content.uri).toBe("tracker://repos"); + expect(content.mimeType).toBe("application/json"); + + const parsed = JSON.parse(content.text); + expect(parsed.count).toBe(0); + expect(parsed.repos).toHaveLength(0); + }); + + it("returns repo list with correct count", async () => { + const repos = [makeRepoRef("owner/alpha"), makeRepoRef("owner/beta")]; + vi.mocked(ds.getRepos).mockResolvedValueOnce(repos); + + const result = await callResource(server, "tracker://repos"); + const parsed = JSON.parse(result.contents[0].text); + + expect(parsed.count).toBe(2); + expect(parsed.repos).toHaveLength(2); + }); + + it("includes fullName, owner, and name fields per repo", async () => { + vi.mocked(ds.getRepos).mockResolvedValueOnce([makeRepoRef("acme/widget")]); + + const result = await callResource(server, "tracker://repos"); + const parsed = JSON.parse(result.contents[0].text); + const repo = parsed.repos[0]; + + expect(repo.fullName).toBe("acme/widget"); + expect(repo.owner).toBe("acme"); + expect(repo.name).toBe("widget"); + }); + + it("calls getRepos on the data source", async () => { + await callResource(server, "tracker://repos"); + expect(ds.getRepos).toHaveBeenCalledOnce(); + }); + + it("returns valid JSON", async () => { + vi.mocked(ds.getRepos).mockResolvedValueOnce([makeRepoRef("x/y")]); + const result = await callResource(server, "tracker://repos"); + expect(() => JSON.parse(result.contents[0].text)).not.toThrow(); + }); + + it("preserves order of repos from data source", async () => { + const repos = [ + makeRepoRef("a/first"), + makeRepoRef("b/second"), + makeRepoRef("c/third"), + ]; + vi.mocked(ds.getRepos).mockResolvedValueOnce(repos); + + const result = await callResource(server, "tracker://repos"); + const parsed = JSON.parse(result.contents[0].text); + + expect(parsed.repos[0].fullName).toBe("a/first"); + expect(parsed.repos[1].fullName).toBe("b/second"); + expect(parsed.repos[2].fullName).toBe("c/third"); + }); +}); diff --git a/mcp/tests/tools.test.ts b/mcp/tests/tools.test.ts new file mode 100644 index 00000000..6c210f3f --- /dev/null +++ b/mcp/tests/tools.test.ts @@ -0,0 +1,458 @@ +// ── MCP tools.ts unit tests ─────────────────────────────────────────────────── +// Tests each of the 6 tools using a mock DataSource. Tools are tested by +// calling the registered handler directly via server._registeredTools. + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { registerTools } from "../src/tools.js"; +import type { DataSource } from "../src/data-source.js"; +import type { + Issue, + PullRequest, + WorkflowRun, + DashboardSummary, + RateLimitInfo, +} from "../../src/shared/types.js"; +import { makeIssue, makePullRequest, makeWorkflowRun } from "../../tests/helpers/factories.js"; + +// ── Mock ws-relay module ─────────────────────────────────────────────────────── +// Tools call isRelayConnected() — mock to return false (not connected) +vi.mock("../src/ws-relay.js", () => ({ + isRelayConnected: () => false, + sendRelayRequest: vi.fn(), + onNotification: vi.fn(), + startWebSocketServer: vi.fn(), + closeWebSocketServer: vi.fn().mockResolvedValue(undefined), +})); + +// ── Mock DataSource ──────────────────────────────────────────────────────────── + +function makeMockDataSource(overrides: Partial = {}): DataSource { + const defaultSummary: DashboardSummary = { + openPRCount: 3, + openIssueCount: 5, + failingRunCount: 1, + needsReviewCount: 2, + approvedUnmergedCount: 1, + }; + const defaultRateLimit: RateLimitInfo = { + limit: 5000, + remaining: 4800, + resetAt: new Date("2026-04-07T12:00:00Z"), + }; + + return { + getDashboardSummary: vi.fn().mockResolvedValue(defaultSummary), + getOpenPRs: vi.fn().mockResolvedValue([]), + getOpenIssues: vi.fn().mockResolvedValue([]), + getFailingActions: vi.fn().mockResolvedValue([]), + getPRDetails: vi.fn().mockResolvedValue(null), + getRateLimit: vi.fn().mockResolvedValue(defaultRateLimit), + getConfig: vi.fn().mockResolvedValue(null), + getRepos: vi.fn().mockResolvedValue([]), + ...overrides, + }; +} + +// ── Helper: call a registered tool handler directly ─────────────────────────── + +type ToolRegistry = Record< + string, + { handler: (args: Record, extra: Record) => Promise } +>; + +async function callTool( + server: McpServer, + toolName: string, + args: Record = {} +): Promise<{ content: { type: string; text: string }[]; isError?: boolean }> { + const tools = (server as unknown as { _registeredTools: ToolRegistry })._registeredTools; + const tool = tools[toolName]; + if (!tool) throw new Error(`Tool not found: ${toolName}`); + return tool.handler(args, {}) as Promise<{ content: { type: string; text: string }[]; isError?: boolean }>; +} + +// ── Tests ───────────────────────────────────────────────────────────────────── + +describe("get_dashboard_summary", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns structured summary with counts", async () => { + const result = await callTool(server, "get_dashboard_summary", { scope: "involves_me" }); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("Open PRs:"); + expect(text).toContain("3"); + expect(text).toContain("Open Issues:"); + expect(text).toContain("5"); + expect(text).toContain("Failing CI Runs:"); + expect(text).toContain("1"); + expect(text).toContain("Needs Review:"); + expect(text).toContain("2"); + }); + + it("passes scope to data source", async () => { + await callTool(server, "get_dashboard_summary", { scope: "all" }); + expect(ds.getDashboardSummary).toHaveBeenCalledWith("all"); + }); + + it("defaults scope to involves_me", async () => { + await callTool(server, "get_dashboard_summary", {}); + expect(ds.getDashboardSummary).toHaveBeenCalledWith("involves_me"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getDashboardSummary).mockRejectedValueOnce(new Error("API error")); + const result = await callTool(server, "get_dashboard_summary"); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching dashboard summary"); + expect(result.content[0].text).toContain("API error"); + }); + + it("includes staleness note when relay is disconnected", async () => { + const result = await callTool(server, "get_dashboard_summary"); + // isRelayConnected is mocked to return false, so staleness note should be present + expect(result.content[0].text).toContain("data via GitHub API"); + }); +}); + +describe("get_open_prs", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns 'no PRs' message when empty", async () => { + const result = await callTool(server, "get_open_prs"); + expect(result.isError).toBeFalsy(); + expect(result.content[0].text).toContain("No open pull requests found"); + }); + + it("returns formatted PR list", async () => { + const pr = makePullRequest({ number: 42, title: "My Feature PR", repoFullName: "owner/repo" }); + vi.mocked(ds.getOpenPRs).mockResolvedValueOnce([pr]); + const result = await callTool(server, "get_open_prs"); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("#42"); + expect(text).toContain("My Feature PR"); + expect(text).toContain("owner/repo"); + }); + + it("passes repo filter to data source", async () => { + await callTool(server, "get_open_prs", { repo: "owner/repo" }); + expect(ds.getOpenPRs).toHaveBeenCalledWith("owner/repo", undefined); + }); + + it("passes status filter to data source", async () => { + await callTool(server, "get_open_prs", { status: "needs_review" }); + expect(ds.getOpenPRs).toHaveBeenCalledWith(undefined, "needs_review"); + }); + + it("passes both repo and status filters", async () => { + await callTool(server, "get_open_prs", { repo: "owner/repo", status: "failing" }); + expect(ds.getOpenPRs).toHaveBeenCalledWith("owner/repo", "failing"); + }); + + it("shows draft badge on draft PRs", async () => { + const pr = makePullRequest({ draft: true, title: "WIP draft" }); + vi.mocked(ds.getOpenPRs).mockResolvedValueOnce([pr]); + const result = await callTool(server, "get_open_prs"); + expect(result.content[0].text).toContain("[DRAFT]"); + }); + + it("shows review decision badge", async () => { + const pr = makePullRequest({ reviewDecision: "APPROVED", title: "Approved PR" }); + vi.mocked(ds.getOpenPRs).mockResolvedValueOnce([pr]); + const result = await callTool(server, "get_open_prs"); + expect(result.content[0].text).toContain("[APPROVED]"); + }); + + it("shows check status", async () => { + const pr = makePullRequest({ checkStatus: "failure", title: "Failing checks PR" }); + vi.mocked(ds.getOpenPRs).mockResolvedValueOnce([pr]); + const result = await callTool(server, "get_open_prs"); + expect(result.content[0].text).toContain("[checks: failure]"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getOpenPRs).mockRejectedValueOnce(new Error("network error")); + const result = await callTool(server, "get_open_prs"); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching open PRs"); + }); +}); + +describe("get_open_issues", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns 'no issues' message when empty", async () => { + const result = await callTool(server, "get_open_issues"); + expect(result.isError).toBeFalsy(); + expect(result.content[0].text).toContain("No open issues found"); + }); + + it("returns formatted issue list", async () => { + const issue = makeIssue({ number: 7, title: "Bug report", repoFullName: "owner/repo" }); + vi.mocked(ds.getOpenIssues).mockResolvedValueOnce([issue]); + const result = await callTool(server, "get_open_issues"); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("#7"); + expect(text).toContain("Bug report"); + expect(text).toContain("owner/repo"); + }); + + it("passes repo filter to data source", async () => { + await callTool(server, "get_open_issues", { repo: "myorg/myrepo" }); + expect(ds.getOpenIssues).toHaveBeenCalledWith("myorg/myrepo"); + }); + + it("shows issue labels in output", async () => { + const issue = makeIssue({ + title: "Labeled issue", + labels: [{ name: "bug", color: "d73a4a" }], + }); + vi.mocked(ds.getOpenIssues).mockResolvedValueOnce([issue]); + const result = await callTool(server, "get_open_issues"); + expect(result.content[0].text).toContain("[bug]"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getOpenIssues).mockRejectedValueOnce(new Error("500")); + const result = await callTool(server, "get_open_issues"); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching open issues"); + }); +}); + +describe("get_failing_actions", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns 'no failing runs' message when empty", async () => { + const result = await callTool(server, "get_failing_actions"); + expect(result.isError).toBeFalsy(); + expect(result.content[0].text).toContain("No failing or in-progress workflow runs found"); + }); + + it("returns formatted run list with conclusion", async () => { + const run = makeWorkflowRun({ + name: "CI Build", + conclusion: "failure", + repoFullName: "owner/repo", + runNumber: 99, + }); + vi.mocked(ds.getFailingActions).mockResolvedValueOnce([run]); + const result = await callTool(server, "get_failing_actions"); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("CI Build"); + expect(text).toContain("[failure]"); + expect(text).toContain("Run #99"); + expect(text).toContain("owner/repo"); + }); + + it("shows in_progress status when conclusion is null", async () => { + const run = makeWorkflowRun({ + name: "Running", + status: "in_progress", + conclusion: null, + }); + vi.mocked(ds.getFailingActions).mockResolvedValueOnce([run]); + const result = await callTool(server, "get_failing_actions"); + expect(result.content[0].text).toContain("[in_progress]"); + }); + + it("passes repo filter to data source", async () => { + await callTool(server, "get_failing_actions", { repo: "owner/repo" }); + expect(ds.getFailingActions).toHaveBeenCalledWith("owner/repo"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getFailingActions).mockRejectedValueOnce(new Error("timeout")); + const result = await callTool(server, "get_failing_actions"); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching workflow runs"); + }); +}); + +describe("get_pr_details", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns 'not found' message for nonexistent PR", async () => { + vi.mocked(ds.getPRDetails).mockResolvedValueOnce(null); + const result = await callTool(server, "get_pr_details", { + repo: "owner/repo", + number: 999, + }); + expect(result.isError).toBeFalsy(); + expect(result.content[0].text).toContain("#999 not found in owner/repo"); + }); + + it("returns detailed PR info for existing PR", async () => { + const pr = makePullRequest({ + number: 42, + title: "Feature branch", + repoFullName: "owner/repo", + userLogin: "alice", + headRef: "feat/my-feature", + baseRef: "main", + additions: 100, + deletions: 20, + changedFiles: 5, + comments: 3, + reviewThreads: 1, + }); + vi.mocked(ds.getPRDetails).mockResolvedValueOnce(pr); + const result = await callTool(server, "get_pr_details", { + repo: "owner/repo", + number: 42, + }); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("PR #42: Feature branch"); + expect(text).toContain("alice"); + expect(text).toContain("feat/my-feature"); + expect(text).toContain("main"); + expect(text).toContain("+100 / -20"); + expect(text).toContain("5 files"); + }); + + it("calls data source with repo and number", async () => { + await callTool(server, "get_pr_details", { repo: "owner/repo", number: 5 }); + expect(ds.getPRDetails).toHaveBeenCalledWith("owner/repo", 5); + }); + + it("shows review decision and check status when present", async () => { + const pr = makePullRequest({ + reviewDecision: "CHANGES_REQUESTED", + checkStatus: "pending", + reviewerLogins: ["bob", "charlie"], + }); + vi.mocked(ds.getPRDetails).mockResolvedValueOnce(pr); + const result = await callTool(server, "get_pr_details", { + repo: "owner/repo", + number: 1, + }); + const text = result.content[0].text; + expect(text).toContain("CHANGES_REQUESTED"); + expect(text).toContain("pending"); + expect(text).toContain("bob"); + expect(text).toContain("charlie"); + }); + + it("shows draft indicator for draft PRs", async () => { + const pr = makePullRequest({ draft: true }); + vi.mocked(ds.getPRDetails).mockResolvedValueOnce(pr); + const result = await callTool(server, "get_pr_details", { + repo: "owner/repo", + number: 1, + }); + expect(result.content[0].text).toContain("draft"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getPRDetails).mockRejectedValueOnce(new Error("rate limit exceeded")); + const result = await callTool(server, "get_pr_details", { + repo: "owner/repo", + number: 1, + }); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching PR details"); + expect(result.content[0].text).toContain("rate limit exceeded"); + }); +}); + +describe("get_rate_limit", () => { + let server: McpServer; + let ds: DataSource; + + beforeEach(() => { + server = new McpServer({ name: "test", version: "0.0.0" }); + ds = makeMockDataSource(); + registerTools(server, ds); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it("returns formatted rate limit info", async () => { + const result = await callTool(server, "get_rate_limit"); + expect(result.isError).toBeFalsy(); + const text = result.content[0].text; + expect(text).toContain("GitHub API Rate Limit"); + expect(text).toContain("4800"); + expect(text).toContain("5000"); + expect(text).toContain("96%"); + }); + + it("calls getRateLimit on data source", async () => { + await callTool(server, "get_rate_limit"); + expect(ds.getRateLimit).toHaveBeenCalled(); + }); + + it("shows resets at time", async () => { + const result = await callTool(server, "get_rate_limit"); + expect(result.content[0].text).toContain("Resets at:"); + }); + + it("returns error content on data source failure", async () => { + vi.mocked(ds.getRateLimit).mockRejectedValueOnce(new Error("unauthorized")); + const result = await callTool(server, "get_rate_limit"); + expect(result.isError).toBe(true); + expect(result.content[0].text).toContain("Error fetching rate limit"); + }); +}); diff --git a/mcp/tests/ws-relay.test.ts b/mcp/tests/ws-relay.test.ts new file mode 100644 index 00000000..80621161 --- /dev/null +++ b/mcp/tests/ws-relay.test.ts @@ -0,0 +1,440 @@ +// ── WebSocket relay server unit tests ───────────────────────────────────────── +// Tests the ws-relay server behavior using the real `ws` library. +// Each test group starts a fresh server on port 0 (OS-assigned port). + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { WebSocket } from "ws"; +import { + startWebSocketServer, + closeWebSocketServer, + isRelayConnected, + sendRelayRequest, + onNotification, +} from "../src/ws-relay.js"; +import type { WebSocketServer } from "ws"; + +// ── Helpers ──────────────────────────────────────────────────────────────────── + +function getServerPort(wss: WebSocketServer): number { + const addr = wss.address(); + if (addr && typeof addr === "object") return addr.port; + throw new Error("Server has no address — may not be listening yet"); +} + +function waitForEvent(emitter: { on: (event: string, cb: (...args: unknown[]) => void) => void }, event: string, timeout = 2000): Promise { + return new Promise((resolve, reject) => { + const t = setTimeout(() => reject(new Error(`Timeout waiting for '${event}'`)), timeout); + emitter.on(event, (...args: unknown[]) => { + clearTimeout(t); + resolve(args); + }); + }); +} + +function waitForOpen(ws: WebSocket, timeout = 2000): Promise { + if (ws.readyState === WebSocket.OPEN) return Promise.resolve(); + return new Promise((resolve, reject) => { + const t = setTimeout(() => reject(new Error("Timeout waiting for WebSocket open")), timeout); + ws.once("open", () => { clearTimeout(t); resolve(); }); + ws.once("error", (err) => { clearTimeout(t); reject(err); }); + }); +} + +function waitForMessage(ws: WebSocket, timeout = 2000): Promise { + return new Promise((resolve, reject) => { + const t = setTimeout(() => reject(new Error("Timeout waiting for message")), timeout); + ws.once("message", (data) => { + clearTimeout(t); + resolve(data.toString()); + }); + }); +} + +function waitForClose(ws: WebSocket, timeout = 2000): Promise<{ code: number; reason: string }> { + return new Promise((resolve, reject) => { + if (ws.readyState === WebSocket.CLOSED) { + resolve({ code: 1000, reason: "" }); + return; + } + const t = setTimeout(() => reject(new Error("Timeout waiting for WebSocket close")), timeout); + ws.once("close", (code, reason) => { + clearTimeout(t); + resolve({ code, reason: reason.toString() }); + }); + }); +} + +function sendJsonRpc(ws: WebSocket, msg: object): void { + ws.send(JSON.stringify(msg)); +} + +async function waitForCondition( + condition: () => boolean, + timeout = 2000, + interval = 10 +): Promise { + const deadline = Date.now() + timeout; + while (!condition()) { + if (Date.now() > deadline) throw new Error("Timeout waiting for condition"); + await new Promise((r) => setTimeout(r, interval)); + } +} + +/** + * Round-trip sentinel: sends a relay request from the server to the client and + * waits for the client to respond. By the time this resolves, the server has + * processed every message that was sent before the sentinel was issued. + */ +async function roundTripSentinel(ws: WebSocket): Promise { + const sentinelPromise = sendRelayRequest("__sentinel__", {}); + const raw = await waitForMessage(ws); + const msg = JSON.parse(raw) as { id: number }; + // Respond so the pending request resolves cleanly + ws.send(JSON.stringify({ jsonrpc: "2.0", id: msg.id, result: null })); + await sentinelPromise; +} + +// ── Tests ────────────────────────────────────────────────────────────────────── + +describe("WebSocket relay server — connection", () => { + let wss: WebSocketServer; + let port: number; + const clients: WebSocket[] = []; + + beforeEach(async () => { + process.env.MCP_WS_PORT = "0"; + wss = startWebSocketServer()!; + expect(wss).not.toBeNull(); + await waitForEvent(wss, "listening"); + port = getServerPort(wss); + }); + + afterEach(async () => { + for (const c of clients) { + if (c.readyState === WebSocket.OPEN || c.readyState === WebSocket.CONNECTING) { + c.close(); + } + } + clients.length = 0; + await closeWebSocketServer(); + delete process.env.MCP_WS_PORT; + vi.restoreAllMocks(); + }); + + it("accepts a single client connection", async () => { + const ws = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws); + await waitForOpen(ws); + expect(ws.readyState).toBe(WebSocket.OPEN); + expect(isRelayConnected()).toBe(true); + }); + + it("rejects a second client (only one client allowed)", async () => { + const ws1 = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws1); + await waitForOpen(ws1); + + // ws library: when verifyClient rejects with code 4001, it sends an HTTP response + // with that status code. Code 4001 is not a valid HTTP status, so the client sees + // either an HPE_INVALID_STATUS parse error or a close event. We just verify + // the second client never reaches OPEN state. + const ws2 = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws2); + + const rejected = await new Promise((resolve) => { + ws2.once("open", () => resolve(false)); // Should not open + ws2.once("error", () => resolve(true)); // Error = rejected + ws2.once("close", (code) => { + // Any close (including 4001) counts as rejected + resolve(code !== 1000 || ws2.readyState === WebSocket.CLOSED); + }); + // Safety timeout + setTimeout(() => resolve(true), 2000); + }); + + expect(rejected).toBe(true); + expect(ws1.readyState).toBe(WebSocket.OPEN); // First client still open + }); + + it("reports disconnected after client closes", async () => { + const ws = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws); + await waitForOpen(ws); + expect(isRelayConnected()).toBe(true); + + ws.close(); + await waitForClose(ws); + + // Wait for the server's close handler to set _client = null + await waitForCondition(() => !isRelayConnected()); + expect(isRelayConnected()).toBe(false); + }); + + it("handles malformed JSON without crashing", async () => { + const ws = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws); + await waitForOpen(ws); + + // Send garbage — server should ignore it gracefully + ws.send("this is not valid json{{{{"); + + // Round-trip sentinel: proves server processed the malformed message and is still alive + await roundTripSentinel(ws); + expect(ws.readyState).toBe(WebSocket.OPEN); + expect(isRelayConnected()).toBe(true); + }); + + it("ignores non-JSON-RPC 2.0 messages", async () => { + const ws = new WebSocket(`ws://127.0.0.1:${port}`); + clients.push(ws); + await waitForOpen(ws); + + // Valid JSON but wrong jsonrpc version + ws.send(JSON.stringify({ jsonrpc: "1.0", id: 1, method: "test" })); + + // Round-trip sentinel: proves server processed the invalid message and is still alive + await roundTripSentinel(ws); + expect(ws.readyState).toBe(WebSocket.OPEN); + }); +}); + +describe("WebSocket relay server — JSON-RPC request/response", () => { + let wss: WebSocketServer; + let port: number; + let client: WebSocket; + + beforeEach(async () => { + process.env.MCP_WS_PORT = "0"; + wss = startWebSocketServer()!; + await waitForEvent(wss, "listening"); + port = getServerPort(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + }); + + afterEach(async () => { + if (client.readyState === WebSocket.OPEN) client.close(); + await closeWebSocketServer(); + delete process.env.MCP_WS_PORT; + vi.restoreAllMocks(); + }); + + it("resolves sendRelayRequest when client sends a response", async () => { + // sendRelayRequest sends a JSON-RPC request to the client; client sends back a response + const requestPromise = sendRelayRequest("test_method", { foo: "bar" }); + + // Read what the server sent us + const rawRequest = await waitForMessage(client); + const req = JSON.parse(rawRequest) as { jsonrpc: string; id: number; method: string; params: unknown }; + expect(req.jsonrpc).toBe("2.0"); + expect(req.method).toBe("test_method"); + expect(req.params).toEqual({ foo: "bar" }); + + // Send back a JSON-RPC response + sendJsonRpc(client, { jsonrpc: "2.0", id: req.id, result: { answer: 42 } }); + + const result = await requestPromise; + expect(result).toEqual({ answer: 42 }); + }); + + it("rejects sendRelayRequest when client sends an error response", async () => { + const requestPromise = sendRelayRequest("broken_method", {}); + + const rawRequest = await waitForMessage(client); + const req = JSON.parse(rawRequest) as { id: number }; + + sendJsonRpc(client, { + jsonrpc: "2.0", + id: req.id, + error: { code: -32601, message: "Method not found" }, + }); + + await expect(requestPromise).rejects.toThrow("Method not found"); + }); + + it("rejects pending requests when client disconnects", async () => { + const requestPromise = sendRelayRequest("slow_method", {}); + // Attach catch immediately to prevent unhandled rejection + requestPromise.catch(() => {}); + + // Consume the request message so it doesn't block + await waitForMessage(client); + + // Close client without responding + client.close(); + await waitForClose(client); + + // Wait for the server's close handler to reject pending requests and clear _client + await waitForCondition(() => !isRelayConnected()); + + await expect(requestPromise).rejects.toThrow(/relay disconnected|disconnected/i); + }); +}); + +describe("WebSocket relay server — pending request timeout", () => { + let wss: WebSocketServer; + let port: number; + let client: WebSocket; + + beforeEach(async () => { + process.env.MCP_WS_PORT = "0"; + wss = startWebSocketServer()!; + await waitForEvent(wss, "listening"); + port = getServerPort(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + }); + + afterEach(async () => { + vi.useRealTimers(); + if (client?.readyState === WebSocket.OPEN) client.close(); + await closeWebSocketServer(); + delete process.env.MCP_WS_PORT; + vi.restoreAllMocks(); + }); + + it("rejects pending request after REQUEST_TIMEOUT_MS (10s)", async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }); + + // Start a request — server sends to client, client doesn't respond + const requestPromise = sendRelayRequest("timeout_method", {}); + // Prevent unhandled rejection + requestPromise.catch(() => {}); + + // Read but don't respond to the request (consume the message) + await new Promise((resolve) => { + client.once("message", () => resolve()); + }); + + // Advance time past the 10s timeout + vi.advanceTimersByTime(11000); + await vi.runAllTimersAsync(); + + await expect(requestPromise).rejects.toThrow(/timed out/i); + }); +}); + +describe("WebSocket relay server — notifications", () => { + let wss: WebSocketServer; + let port: number; + let client: WebSocket; + + beforeEach(async () => { + process.env.MCP_WS_PORT = "0"; + wss = startWebSocketServer()!; + await waitForEvent(wss, "listening"); + port = getServerPort(wss); + client = new WebSocket(`ws://127.0.0.1:${port}`); + await waitForOpen(client); + }); + + afterEach(async () => { + if (client.readyState === WebSocket.OPEN) client.close(); + await closeWebSocketServer(); + delete process.env.MCP_WS_PORT; + vi.restoreAllMocks(); + }); + + it("dispatches notifications to registered handlers", async () => { + const handler = vi.fn(); + const handlerCalled = new Promise((resolve) => { + onNotification("test_notification", (params) => { + handler(params); + resolve(); + }); + }); + + // Client sends a notification (no id field) + sendJsonRpc(client, { + jsonrpc: "2.0", + method: "test_notification", + params: { key: "value" }, + }); + + // Wait for the notification handler to be invoked + await handlerCalled; + expect(handler).toHaveBeenCalledWith({ key: "value" }); + }); + + it("ignores messages with unknown shape (both id and method present)", async () => { + // A request from the client to the server is not part of the protocol + // Server should ignore it gracefully + sendJsonRpc(client, { + jsonrpc: "2.0", + id: 1, + method: "some_method", + params: {}, + }); + + // Round-trip sentinel: proves server processed the unrecognized message and is still alive + await roundTripSentinel(client); + // No error — server still alive + expect(isRelayConnected()).toBe(true); + }); +}); + +describe("WebSocket relay server — origin validation", () => { + let wss: WebSocketServer; + let port: number; + + beforeEach(async () => { + process.env.MCP_WS_PORT = "0"; + wss = startWebSocketServer()!; + await waitForEvent(wss, "listening"); + port = getServerPort(wss); + }); + + afterEach(async () => { + await closeWebSocketServer(); + delete process.env.MCP_WS_PORT; + vi.restoreAllMocks(); + }); + + it("allows connections from localhost origins", async () => { + const ws = new WebSocket(`ws://127.0.0.1:${port}`, { + headers: { origin: "http://localhost:5173" }, + }); + + const opened = await new Promise((resolve) => { + ws.once("open", () => { ws.close(); resolve(true); }); + ws.once("error", () => resolve(false)); + ws.once("close", (code) => { + // 4001 means rejected for second client, other non-1000 means rejected by server + if (code !== 1000 && code !== 1001) resolve(false); + }); + }); + + expect(opened).toBe(true); + }); + + it("rejects connections from disallowed origins", async () => { + // The server calls verifyClient with callback(false, 403, "Origin not allowed"). + // The ws library sends an HTTP 403 response, which the client sees as an error. + const ws = new WebSocket(`ws://127.0.0.1:${port}`, { + headers: { origin: "https://evil.example.com" }, + }); + + const rejected = await new Promise((resolve) => { + ws.once("open", () => { ws.close(); resolve(false); }); // Should not open + ws.once("error", () => resolve(true)); // Error = rejected by server + ws.once("close", () => resolve(true)); // Close = connection refused + setTimeout(() => resolve(true), 2000); // Safety timeout + }); + + expect(rejected).toBe(true); + }); +}); + +describe("sendRelayRequest — disconnected state", () => { + afterEach(async () => { + await closeWebSocketServer(); + vi.restoreAllMocks(); + }); + + it("rejects immediately when relay is not connected", async () => { + // No server started — relay is not connected + await expect(sendRelayRequest("any_method", {})).rejects.toThrow( + /relay not connected/i + ); + }); +}); diff --git a/mcp/tsconfig.json b/mcp/tsconfig.json new file mode 100644 index 00000000..6295d45f --- /dev/null +++ b/mcp/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "composite": true, + "outDir": "dist", + "rootDir": "src", + "module": "NodeNext", + "moduleResolution": "nodenext", + "target": "ES2022", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "declaration": true, + "sourceMap": true + }, + "references": [{ "path": "../src/shared" }], + "include": ["src/**/*.ts"] +} diff --git a/mcp/tsup.config.ts b/mcp/tsup.config.ts new file mode 100644 index 00000000..136caf3a --- /dev/null +++ b/mcp/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: "esm", + target: "node22", + bundle: true, + clean: true, + external: ["@modelcontextprotocol/sdk", "ws", "zod"], + banner: { js: "#!/usr/bin/env node" }, +}); diff --git a/mcp/vitest.config.ts b/mcp/vitest.config.ts new file mode 100644 index 00000000..faa6d98e --- /dev/null +++ b/mcp/vitest.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + environment: "node", + include: ["tests/**/*.test.ts"], + }, +}); diff --git a/package.json b/package.json index eccde7b7..aea40c2f 100644 --- a/package.json +++ b/package.json @@ -11,10 +11,11 @@ "test": "vitest run --config vitest.workspace.ts", "test:watch": "vitest --config vitest.workspace.ts", "deploy": "wrangler deploy", - "typecheck": "tsc --noEmit", + "typecheck": "tsc --build src/shared/tsconfig.json && tsc --noEmit && tsc --build mcp/tsconfig.json", "test:e2e": "E2E_PORT=$(node -e \"const s=require('net').createServer();s.listen(0,()=>{console.log(s.address().port);s.close()})\") playwright test", "test:waf": "bash scripts/waf-smoke-test.sh", - "screenshot": "pnpm exec playwright test --config playwright.config.screenshot.ts" + "screenshot": "pnpm exec playwright test --config playwright.config.screenshot.ts", + "mcp:serve": "pnpm --filter github-tracker-mcp dev" }, "dependencies": { "@kobalte/core": "0.13.11", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e3059f29..4f12d459 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -41,10 +41,10 @@ importers: devDependencies: '@cloudflare/vite-plugin': specifier: 1.30.1 - version: 1.30.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1))(workerd@1.20260317.1)(wrangler@4.77.0) + version: 1.30.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))(workerd@1.20260317.1)(wrangler@4.77.0) '@cloudflare/vitest-pool-workers': specifier: 0.13.4 - version: 0.13.4(@vitest/runner@4.1.1)(@vitest/snapshot@4.1.1)(vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1))) + version: 0.13.4(@vitest/runner@4.1.1)(@vitest/snapshot@4.1.1)(vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))) '@playwright/test': specifier: 1.58.2 version: 1.58.2 @@ -53,7 +53,7 @@ importers: version: 0.8.10(@solidjs/router@0.16.1(solid-js@1.9.11))(solid-js@1.9.11) '@tailwindcss/vite': specifier: 4.2.2 - version: 4.2.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + version: 4.2.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) '@testing-library/user-event': specifier: 14.6.1 version: 14.6.1(@testing-library/dom@10.4.1) @@ -74,17 +74,57 @@ importers: version: 5.9.3 vite: specifier: 8.0.1 - version: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + version: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) vite-plugin-solid: specifier: 2.11.11 - version: 2.11.11(solid-js@1.9.11)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + version: 2.11.11(solid-js@1.9.11)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) vitest: specifier: 4.1.1 - version: 4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + version: 4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) wrangler: specifier: 4.77.0 version: 4.77.0 + mcp: + dependencies: + '@modelcontextprotocol/sdk': + specifier: ^1.29.0 + version: 1.29.0(zod@4.3.6) + ws: + specifier: ^8.17.0 + version: 8.20.0 + zod: + specifier: 4.3.6 + version: 4.3.6 + devDependencies: + '@octokit/core': + specifier: 7.0.6 + version: 7.0.6 + '@octokit/plugin-paginate-rest': + specifier: 14.0.0 + version: 14.0.0(@octokit/core@7.0.6) + '@octokit/plugin-retry': + specifier: 8.1.0 + version: 8.1.0(@octokit/core@7.0.6) + '@octokit/plugin-throttling': + specifier: 11.0.3 + version: 11.0.3(@octokit/core@7.0.6) + '@types/ws': + specifier: ^8.0.0 + version: 8.18.1 + tsup: + specifier: ^8.0.0 + version: 8.5.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(typescript@5.9.3) + tsx: + specifier: ^4.0.0 + version: 4.21.0 + typescript: + specifier: 5.9.3 + version: 5.9.3 + vitest: + specifier: ^4.0.0 + version: 4.1.0(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) + packages: '@babel/code-frame@7.29.0': @@ -411,6 +451,12 @@ packages: '@floating-ui/utils@0.2.11': resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + '@hono/node-server@1.19.13': + resolution: {integrity: sha512-TsQLe4i2gvoTtrHje625ngThGBySOgSK3Xo2XRYOdqGN1teR8+I7vchQC46uLJi8OF62YTYA3AhSpumtkhsaKQ==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + '@img/colour@1.1.0': resolution: {integrity: sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ==} engines: {node: '>=18'} @@ -599,6 +645,16 @@ packages: peerDependencies: solid-js: ^1.8.8 + '@modelcontextprotocol/sdk@1.29.0': + resolution: {integrity: sha512-zo37mZA9hJWpULgkRpowewez1y6ML5GsXJPY8FI0tBBCd77HEvza4jDqRKOXgHNn867PVGCyTdzqpz0izu5ZjQ==} + engines: {node: '>=18'} + peerDependencies: + '@cfworker/json-schema': ^4.1.1 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + '@cfworker/json-schema': + optional: true + '@napi-rs/wasm-runtime@1.1.1': resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} @@ -765,6 +821,144 @@ packages: '@rolldown/pluginutils@1.0.0-rc.10': resolution: {integrity: sha512-UkVDEFk1w3mveXeKgaTuYfKWtPbvgck1dT8TUG3bnccrH0XtLTuAyfCoks4Q/M5ZGToSVJTIQYCzy2g/atAOeg==} + '@rollup/rollup-android-arm-eabi@4.60.1': + resolution: {integrity: sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.60.1': + resolution: {integrity: sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.60.1': + resolution: {integrity: sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.60.1': + resolution: {integrity: sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.60.1': + resolution: {integrity: sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.60.1': + resolution: {integrity: sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.60.1': + resolution: {integrity: sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==} + cpu: [arm] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-arm-musleabihf@4.60.1': + resolution: {integrity: sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==} + cpu: [arm] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-arm64-gnu@4.60.1': + resolution: {integrity: sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-arm64-musl@4.60.1': + resolution: {integrity: sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-loong64-gnu@4.60.1': + resolution: {integrity: sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==} + cpu: [loong64] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-loong64-musl@4.60.1': + resolution: {integrity: sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==} + cpu: [loong64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-ppc64-gnu@4.60.1': + resolution: {integrity: sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-ppc64-musl@4.60.1': + resolution: {integrity: sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==} + cpu: [ppc64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-riscv64-gnu@4.60.1': + resolution: {integrity: sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-riscv64-musl@4.60.1': + resolution: {integrity: sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==} + cpu: [riscv64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-s390x-gnu@4.60.1': + resolution: {integrity: sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-x64-gnu@4.60.1': + resolution: {integrity: sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@rollup/rollup-linux-x64-musl@4.60.1': + resolution: {integrity: sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==} + cpu: [x64] + os: [linux] + libc: [musl] + + '@rollup/rollup-openbsd-x64@4.60.1': + resolution: {integrity: sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.60.1': + resolution: {integrity: sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==} + cpu: [arm64] + os: [openharmony] + + '@rollup/rollup-win32-arm64-msvc@4.60.1': + resolution: {integrity: sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.60.1': + resolution: {integrity: sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-gnu@4.60.1': + resolution: {integrity: sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==} + cpu: [x64] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.60.1': + resolution: {integrity: sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==} + cpu: [x64] + os: [win32] + '@sentry-internal/browser-utils@10.46.0': resolution: {integrity: sha512-WB1gBT9G13V02ekZ6NpUhoI1aGHV2eNfjEPthkU2bGBvFpQKnstwzjg7waIRGR7cu+YSW2Q6UI6aQLgBeOPD1g==} engines: {node: '>=18'} @@ -1025,9 +1219,23 @@ packages: '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + '@vitest/expect@4.1.0': + resolution: {integrity: sha512-EIxG7k4wlWweuCLG9Y5InKFwpMEOyrMb6ZJ1ihYu02LVj/bzUwn2VMU+13PinsjRW75XnITeFrQBMH5+dLvCDA==} + '@vitest/expect@4.1.1': resolution: {integrity: sha512-xAV0fqBTk44Rn6SjJReEQkHP3RrqbJo6JQ4zZ7/uVOiJZRarBtblzrOfFIZeYUrukp2YD6snZG6IBqhOoHTm+A==} + '@vitest/mocker@4.1.0': + resolution: {integrity: sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + '@vitest/mocker@4.1.1': resolution: {integrity: sha512-h3BOylsfsCLPeceuCPAAJ+BvNwSENgJa4hXoXu4im0bs9Lyp4URc4JYK4pWLZ4pG/UQn7AT92K6IByi6rE6g3A==} peerDependencies: @@ -1039,21 +1247,56 @@ packages: vite: optional: true + '@vitest/pretty-format@4.1.0': + resolution: {integrity: sha512-3RZLZlh88Ib0J7NQTRATfc/3ZPOnSUn2uDBUoGNn5T36+bALixmzphN26OUD3LRXWkJu4H0s5vvUeqBiw+kS0A==} + '@vitest/pretty-format@4.1.1': resolution: {integrity: sha512-GM+TEQN5WhOygr1lp7skeVjdLPqqWMHsfzXrcHAqZJi/lIVh63H0kaRCY8MDhNWikx19zBUK8ceaLB7X5AH9NQ==} + '@vitest/runner@4.1.0': + resolution: {integrity: sha512-Duvx2OzQ7d6OjchL+trw+aSrb9idh7pnNfxrklo14p3zmNL4qPCDeIJAK+eBKYjkIwG96Bc6vYuxhqDXQOWpoQ==} + '@vitest/runner@4.1.1': resolution: {integrity: sha512-f7+FPy75vN91QGWsITueq0gedwUZy1fLtHOCMeQpjs8jTekAHeKP80zfDEnhrleviLHzVSDXIWuCIOFn3D3f8A==} + '@vitest/snapshot@4.1.0': + resolution: {integrity: sha512-0Vy9euT1kgsnj1CHttwi9i9o+4rRLEaPRSOJ5gyv579GJkNpgJK+B4HSv/rAWixx2wdAFci1X4CEPjiu2bXIMg==} + '@vitest/snapshot@4.1.1': resolution: {integrity: sha512-kMVSgcegWV2FibXEx9p9WIKgje58lcTbXgnJixfcg15iK8nzCXhmalL0ZLtTWLW9PH1+1NEDShiFFedB3tEgWg==} + '@vitest/spy@4.1.0': + resolution: {integrity: sha512-pz77k+PgNpyMDv2FV6qmk5ZVau6c3R8HC8v342T2xlFxQKTrSeYw9waIJG8KgV9fFwAtTu4ceRzMivPTH6wSxw==} + '@vitest/spy@4.1.1': resolution: {integrity: sha512-6Ti/KT5OVaiupdIZEuZN7l3CZcR0cxnxt70Z0//3CtwgObwA6jZhmVBA3yrXSVN3gmwjgd7oDNLlsXz526gpRA==} + '@vitest/utils@4.1.0': + resolution: {integrity: sha512-XfPXT6a8TZY3dcGY8EdwsBulFCIw+BeeX0RZn2x/BtiY/75YGh8FeWGG8QISN/WhaqSrE2OrlDgtF8q5uhOTmw==} + '@vitest/utils@4.1.1': resolution: {integrity: sha512-cNxAlaB3sHoCdL6pj6yyUXv9Gry1NHNg0kFTXdvSIZXLHsqKH7chiWOkwJ5s5+d/oMwcoG9T0bKU38JZWKusrQ==} + accepts@2.0.0: + resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} + engines: {node: '>= 0.6'} + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv-formats@3.0.1: + resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + + ajv@8.18.0: + resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} + ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} @@ -1062,6 +1305,9 @@ packages: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} + any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + aria-query@5.3.0: resolution: {integrity: sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==} @@ -1094,6 +1340,10 @@ packages: blake3-wasm@2.1.5: resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==} + body-parser@2.2.2: + resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} + engines: {node: '>=18'} + bottleneck@2.19.5: resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==} @@ -1102,6 +1352,28 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + bundle-require@5.1.0: + resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.18' + + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + caniuse-lite@1.0.30001780: resolution: {integrity: sha512-llngX0E7nQci5BPJDqoZSbuZ5Bcs9F5db7EtgfwBerX9XGtkkiO4NwfDDIRzHTTwcYC8vC7bmeUEPGrKlR/TkQ==} @@ -1109,16 +1381,55 @@ packages: resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} engines: {node: '>=18'} + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + cjs-module-lexer@1.4.3: resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==} + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + + confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + + content-disposition@1.0.1: + resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==} + engines: {node: '>=18'} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + cookie-signature@1.2.2: + resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==} + engines: {node: '>=6.6.0'} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + cookie@1.1.1: resolution: {integrity: sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==} engines: {node: '>=18'} + cors@2.8.6: + resolution: {integrity: sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==} + engines: {node: '>= 0.10'} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} @@ -1134,6 +1445,10 @@ packages: supports-color: optional: true + depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -1145,9 +1460,20 @@ packages: dom-accessibility-api@0.5.16: resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + electron-to-chromium@1.5.321: resolution: {integrity: sha512-L2C7Q279W2D/J4PLZLk7sebOILDSWos7bMsMNN06rK482umHUrh/3lM8G7IlHFOYip2oAg5nha1rCMxr/rs6ZQ==} + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + enhanced-resolve@5.20.1: resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} engines: {node: '>=10.13.0'} @@ -1163,9 +1489,21 @@ packages: error-stack-parser-es@1.0.5: resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==} + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + es-module-lexer@2.0.0: resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==} + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + esbuild@0.27.3: resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} engines: {node: '>=18'} @@ -1175,13 +1513,38 @@ packages: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} + escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + + eventsource-parser@3.0.6: + resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} + engines: {node: '>=18.0.0'} + + eventsource@3.0.7: + resolution: {integrity: sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==} + engines: {node: '>=18.0.0'} + expect-type@1.3.0: resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} engines: {node: '>=12.0.0'} + express-rate-limit@8.3.2: + resolution: {integrity: sha512-77VmFeJkO0/rvimEDuUC5H30oqUC4EyOhyGccfqoLebB0oiEYfM7nwPrsDsBL1gsTpwfzX8SFy2MT3TDyRq+bg==} + engines: {node: '>= 16'} + peerDependencies: + express: '>= 4.11' + + express@5.2.1: + resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==} + engines: {node: '>= 18'} + fake-indexeddb@6.2.5: resolution: {integrity: sha512-CGnyrvbhPlWYMngksqrSSUT1BAVP49dZocrHuK0SvtR0D5TMs5wP0o3j7jexDJW01KSadjBp1M/71o/KR3nD1w==} engines: {node: '>=18'} @@ -1189,6 +1552,12 @@ packages: fast-content-type-parse@3.0.0: resolution: {integrity: sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==} + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + fdir@6.5.0: resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} engines: {node: '>=12.0.0'} @@ -1198,6 +1567,21 @@ packages: picomatch: optional: true + finalhandler@2.1.1: + resolution: {integrity: sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==} + engines: {node: '>= 18.0.0'} + + fix-dts-default-cjs-exports@1.0.1: + resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} + + forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + + fresh@2.0.0: + resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} + engines: {node: '>= 0.8'} + fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -1208,10 +1592,28 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-tsconfig@4.13.7: + resolution: {integrity: sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -1219,20 +1621,64 @@ packages: resolution: {integrity: sha512-Tz23LR9T9jOGVZm2x1EPdXqwA37G/owYMxRwU0E4miurAtFsPMQ1d2Jc2okUaSjZqAFz2oEn3FLXC5a0a+siyA==} engines: {node: '>=20.0.0'} + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hono@4.12.12: + resolution: {integrity: sha512-p1JfQMKaceuCbpJKAPKVqyqviZdS0eUxH9v82oWo1kb9xjQ5wA6iP3FNVAPDFlz5/p7d45lO+BpSk1tuSZMF4Q==} + engines: {node: '>=16.9.0'} + html-entities@2.3.3: resolution: {integrity: sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==} + http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + idb@8.0.3: resolution: {integrity: sha512-LtwtVyVYO5BqRvcsKuB2iUMnHwPVByPCXFXOpuU96IZPPoPN6xjOGxZQ74pgSVVLQWtUOYgyeL4GE98BY5D3wg==} + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + ip-address@10.1.0: + resolution: {integrity: sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==} + engines: {node: '>= 12'} + + ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + + is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + is-what@4.1.16: resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} engines: {node: '>=12.13'} + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + jiti@2.6.1: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true + jose@6.2.2: + resolution: {integrity: sha512-d7kPDd34KO/YnzaDOlikGpOurfF0ByC2sEV4cANCtdqLlTfBlw2p14O/5d/zv40gJPbIQxfES3nSx1/oYNyuZQ==} + + joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -1241,6 +1687,12 @@ packages: engines: {node: '>=6'} hasBin: true + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + json-schema-typed@8.0.2: + resolution: {integrity: sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==} + json-with-bigint@3.5.8: resolution: {integrity: sha512-eq/4KP6K34kwa7TcFdtvnftvHCD9KvHOGGICWwMFc4dOOKF5t4iYqnfLK8otCRCRv06FXOzGGyqE8h8ElMvvdw==} @@ -1327,6 +1779,17 @@ packages: resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} engines: {node: '>= 12.0.0'} + lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} + engines: {node: '>=14'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -1337,35 +1800,91 @@ packages: magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + media-typer@1.1.0: + resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} + engines: {node: '>= 0.8'} + merge-anything@5.1.7: resolution: {integrity: sha512-eRtbOb1N5iyH0tkQDAoQ4Ipsp/5qSR79Dzrz8hEPxRX10RWWR/iQXdoKmBSRCThY1Fh5EhISDtpSc93fpxUniQ==} engines: {node: '>=12.13'} + merge-descriptors@2.0.0: + resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} + engines: {node: '>=18'} + + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + + mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + miniflare@4.20260317.2: resolution: {integrity: sha512-qNL+yWAFMX6fr0pWU6Lx1vNpPobpnDSF1V8eunIckWvoIQl8y1oBjL2RJFEGY3un+l3f9gwW9dirDPP26usYJQ==} engines: {node: '>=18.0.0'} hasBin: true + mlly@1.8.2: + resolution: {integrity: sha512-d+ObxMQFmbt10sretNDytwt85VrbkhhUA/JBGm1MPaWJ65Cl4wOgLaB1NYvJSZ0Ef03MMEU/0xpPMXUIQ29UfA==} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + nanoid@3.3.11: resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true + negotiator@1.0.0: + resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} + engines: {node: '>= 0.6'} + node-releases@2.0.36: resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + obug@2.1.1: resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + parse5@7.3.0: resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + path-to-regexp@6.3.0: resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==} + path-to-regexp@8.4.2: + resolution: {integrity: sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA==} + pathe@2.0.3: resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} @@ -1376,6 +1895,17 @@ packages: resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} engines: {node: '>=12'} + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkce-challenge@5.0.1: + resolution: {integrity: sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==} + engines: {node: '>=16.20.0'} + + pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + playwright-core@1.58.2: resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} engines: {node: '>=18'} @@ -1386,6 +1916,24 @@ packages: engines: {node: '>=18'} hasBin: true + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} + engines: {node: '>= 18'} + peerDependencies: + jiti: '>=1.21.0' + postcss: '>=8.0.9' + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + jiti: + optional: true + postcss: + optional: true + tsx: + optional: true + yaml: + optional: true + postcss@8.5.8: resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} engines: {node: ^10 || ^12 || >=14} @@ -1394,14 +1942,57 @@ packages: resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + + qs@6.15.0: + resolution: {integrity: sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==} + engines: {node: '>=0.6'} + + range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + + raw-body@3.0.2: + resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} + engines: {node: '>= 0.10'} + react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + rolldown@1.0.0-rc.10: resolution: {integrity: sha512-q7j6vvarRFmKpgJUT8HCAUljkgzEp4LAhPlJUvQhA5LA1SUL36s5QCysMutErzL3EbNOZOkoziSx9iZC4FddKA==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true + rollup@4.60.1: + resolution: {integrity: sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -1411,6 +2002,10 @@ packages: engines: {node: '>=10'} hasBin: true + send@1.2.1: + resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==} + engines: {node: '>= 18'} + seroval-plugins@1.5.1: resolution: {integrity: sha512-4FbuZ/TMl02sqv0RTFexu0SP6V+ywaIe5bAWCCEik0fk17BhALgwvUDVF7e3Uvf9pxmwCEJsRPmlkUE6HdzLAw==} engines: {node: '>=10'} @@ -1421,10 +2016,41 @@ packages: resolution: {integrity: sha512-OwrZRZAfhHww0WEnKHDY8OM0U/Qs8OTfIDWhUD4BLpNJUfXK4cGmjiagGze086m+mhI+V2nD0gfbHEnJjb9STA==} engines: {node: '>=10'} + serve-static@2.2.1: + resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==} + engines: {node: '>= 18'} + + setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + sharp@0.34.5: resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + + side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + + side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + + side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} @@ -1450,12 +2076,25 @@ packages: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + std-env@4.0.0: resolution: {integrity: sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==} + sucrase@3.35.1: + resolution: {integrity: sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + supports-color@10.2.2: resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==} engines: {node: '>=18'} @@ -1467,9 +2106,19 @@ packages: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} engines: {node: '>=6'} + thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + + thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + tinyexec@1.0.4: resolution: {integrity: sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==} engines: {node: '>=18'} @@ -1482,14 +2131,56 @@ packages: resolution: {integrity: sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==} engines: {node: '>=14.0.0'} + toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + + tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + + ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + tsup@8.5.1: + resolution: {integrity: sha512-xtgkqwdhpKWr3tKPmCkvYmS9xnQK3m3XgxZHwSUjvfTjp7YfXe5tT3GgWi0F2N+ZSMsOeWeZFh7ZZFg5iPhing==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + + tsx@4.21.0: + resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==} + engines: {node: '>=18.0.0'} + hasBin: true + + type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + typescript@5.9.3: resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} engines: {node: '>=14.17'} hasBin: true + ufo@1.6.3: + resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==} + undici-types@7.18.2: resolution: {integrity: sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==} @@ -1503,12 +2194,20 @@ packages: universal-user-agent@7.0.3: resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==} + unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + update-browserslist-db@1.2.3: resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + vite-plugin-solid@2.11.11: resolution: {integrity: sha512-YMZCXsLw9kyuvQFEdwLP27fuTQJLmjNoHy90AOJnbRuJ6DwShUxKFo38gdFrWn9v11hnGicKCZEaeI/TFs6JKw==} peerDependencies: @@ -1570,6 +2269,41 @@ packages: vite: optional: true + vitest@4.1.0: + resolution: {integrity: sha512-YbDrMF9jM2Lqc++2530UourxZHmkKLxrs4+mYhEwqWS97WJ7wOYEkcr+QfRgJ3PW9wz3odRijLZjHEaRLTNbqw==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.1.0 + '@vitest/browser-preview': 4.1.0 + '@vitest/browser-webdriverio': 4.1.0 + '@vitest/ui': 4.1.0 + happy-dom: '*' + jsdom: '*' + vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@opentelemetry/api': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + vitest@4.1.1: resolution: {integrity: sha512-yF+o4POL41rpAzj5KVILUxm1GCjKnELvaqmU9TLLUbMfDzuN0UpUR9uaDs+mCtjPe+uYPksXDRLQGGPvj1cTmA==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} @@ -1609,6 +2343,11 @@ packages: resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} engines: {node: '>=12'} + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + why-is-node-running@2.3.0: resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} engines: {node: '>=8'} @@ -1629,6 +2368,9 @@ packages: '@cloudflare/workers-types': optional: true + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + ws@8.18.0: resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} engines: {node: '>=10.0.0'} @@ -1662,6 +2404,11 @@ packages: youch@4.1.0-beta.10: resolution: {integrity: sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==} + zod-to-json-schema@3.25.2: + resolution: {integrity: sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA==} + peerDependencies: + zod: ^3.25.28 || ^4 + zod@3.25.76: resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} @@ -1791,12 +2538,12 @@ snapshots: optionalDependencies: workerd: 1.20260317.1 - '@cloudflare/vite-plugin@1.30.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1))(workerd@1.20260317.1)(wrangler@4.77.0)': + '@cloudflare/vite-plugin@1.30.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))(workerd@1.20260317.1)(wrangler@4.77.0)': dependencies: '@cloudflare/unenv-preset': 2.16.0(unenv@2.0.0-rc.24)(workerd@1.20260317.1) miniflare: 4.20260317.2 unenv: 2.0.0-rc.24 - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) wrangler: 4.77.0 ws: 8.18.0 transitivePeerDependencies: @@ -1804,14 +2551,14 @@ snapshots: - utf-8-validate - workerd - '@cloudflare/vitest-pool-workers@0.13.4(@vitest/runner@4.1.1)(@vitest/snapshot@4.1.1)(vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)))': + '@cloudflare/vitest-pool-workers@0.13.4(@vitest/runner@4.1.1)(@vitest/snapshot@4.1.1)(vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)))': dependencies: '@vitest/runner': 4.1.1 '@vitest/snapshot': 4.1.1 cjs-module-lexer: 1.4.3 esbuild: 0.27.3 miniflare: 4.20260317.2 - vitest: 4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + vitest: 4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) wrangler: 4.77.0 zod: 3.25.76 transitivePeerDependencies: @@ -1948,6 +2695,10 @@ snapshots: '@floating-ui/utils@0.2.11': {} + '@hono/node-server@1.19.13(hono@4.12.12)': + dependencies: + hono: 4.12.12 + '@img/colour@1.1.0': {} '@img/sharp-darwin-arm64@0.34.5': @@ -2099,6 +2850,28 @@ snapshots: '@solid-primitives/utils': 6.4.0(solid-js@1.9.11) solid-js: 1.9.11 + '@modelcontextprotocol/sdk@1.29.0(zod@4.3.6)': + dependencies: + '@hono/node-server': 1.19.13(hono@4.12.12) + ajv: 8.18.0 + ajv-formats: 3.0.1(ajv@8.18.0) + content-type: 1.0.5 + cors: 2.8.6 + cross-spawn: 7.0.6 + eventsource: 3.0.7 + eventsource-parser: 3.0.6 + express: 5.2.1 + express-rate-limit: 8.3.2(express@5.2.1) + hono: 4.12.12 + jose: 6.2.2 + json-schema-typed: 8.0.2 + pkce-challenge: 5.0.1 + raw-body: 3.0.2 + zod: 4.3.6 + zod-to-json-schema: 3.25.2(zod@4.3.6) + transitivePeerDependencies: + - supports-color + '@napi-rs/wasm-runtime@1.1.1': dependencies: '@emnapi/core': 1.9.1 @@ -2233,6 +3006,81 @@ snapshots: '@rolldown/pluginutils@1.0.0-rc.10': {} + '@rollup/rollup-android-arm-eabi@4.60.1': + optional: true + + '@rollup/rollup-android-arm64@4.60.1': + optional: true + + '@rollup/rollup-darwin-arm64@4.60.1': + optional: true + + '@rollup/rollup-darwin-x64@4.60.1': + optional: true + + '@rollup/rollup-freebsd-arm64@4.60.1': + optional: true + + '@rollup/rollup-freebsd-x64@4.60.1': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.60.1': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.60.1': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.60.1': + optional: true + + '@rollup/rollup-linux-loong64-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-loong64-musl@4.60.1': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-ppc64-musl@4.60.1': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.60.1': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.60.1': + optional: true + + '@rollup/rollup-linux-x64-musl@4.60.1': + optional: true + + '@rollup/rollup-openbsd-x64@4.60.1': + optional: true + + '@rollup/rollup-openharmony-arm64@4.60.1': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.60.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.60.1': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.60.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.60.1': + optional: true + '@sentry-internal/browser-utils@10.46.0': dependencies: '@sentry/core': 10.46.0 @@ -2410,12 +3258,12 @@ snapshots: '@tailwindcss/oxide-win32-arm64-msvc': 4.2.2 '@tailwindcss/oxide-win32-x64-msvc': 4.2.2 - '@tailwindcss/vite@4.2.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1))': + '@tailwindcss/vite@4.2.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))': dependencies: '@tailwindcss/node': 4.2.2 '@tailwindcss/oxide': 4.2.2 tailwindcss: 4.2.2 - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) '@testing-library/dom@10.4.1': dependencies: @@ -2479,6 +3327,15 @@ snapshots: dependencies: '@types/node': 25.5.0 + '@vitest/expect@4.1.0': + dependencies: + '@standard-schema/spec': 1.1.0 + '@types/chai': 5.2.3 + '@vitest/spy': 4.1.0 + '@vitest/utils': 4.1.0 + chai: 6.2.2 + tinyrainbow: 3.1.0 + '@vitest/expect@4.1.1': dependencies: '@standard-schema/spec': 1.1.0 @@ -2488,23 +3345,47 @@ snapshots: chai: 6.2.2 tinyrainbow: 3.1.0 - '@vitest/mocker@4.1.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1))': + '@vitest/mocker@4.1.0(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))': + dependencies: + '@vitest/spy': 4.1.0 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) + + '@vitest/mocker@4.1.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0))': dependencies: '@vitest/spy': 4.1.1 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) + + '@vitest/pretty-format@4.1.0': + dependencies: + tinyrainbow: 3.1.0 '@vitest/pretty-format@4.1.1': dependencies: tinyrainbow: 3.1.0 + '@vitest/runner@4.1.0': + dependencies: + '@vitest/utils': 4.1.0 + pathe: 2.0.3 + '@vitest/runner@4.1.1': dependencies: '@vitest/utils': 4.1.1 pathe: 2.0.3 + '@vitest/snapshot@4.1.0': + dependencies: + '@vitest/pretty-format': 4.1.0 + '@vitest/utils': 4.1.0 + magic-string: 0.30.21 + pathe: 2.0.3 + '@vitest/snapshot@4.1.1': dependencies: '@vitest/pretty-format': 4.1.1 @@ -2512,18 +3393,46 @@ snapshots: magic-string: 0.30.21 pathe: 2.0.3 + '@vitest/spy@4.1.0': {} + '@vitest/spy@4.1.1': {} + '@vitest/utils@4.1.0': + dependencies: + '@vitest/pretty-format': 4.1.0 + convert-source-map: 2.0.0 + tinyrainbow: 3.1.0 + '@vitest/utils@4.1.1': dependencies: '@vitest/pretty-format': 4.1.1 convert-source-map: 2.0.0 tinyrainbow: 3.1.0 + accepts@2.0.0: + dependencies: + mime-types: 3.0.2 + negotiator: 1.0.0 + + acorn@8.16.0: {} + + ajv-formats@3.0.1(ajv@8.18.0): + optionalDependencies: + ajv: 8.18.0 + + ajv@8.18.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + ansi-regex@5.0.1: {} ansi-styles@5.2.0: {} + any-promise@1.3.0: {} + aria-query@5.3.0: dependencies: dequal: 2.0.3 @@ -2552,6 +3461,20 @@ snapshots: blake3-wasm@2.1.5: {} + body-parser@2.2.2: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.3 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + on-finished: 2.4.1 + qs: 6.15.0 + raw-body: 3.0.2 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + bottleneck@2.19.5: {} browserslist@4.28.1: @@ -2562,16 +3485,64 @@ snapshots: node-releases: 2.0.36 update-browserslist-db: 1.2.3(browserslist@4.28.1) + bundle-require@5.1.0(esbuild@0.27.3): + dependencies: + esbuild: 0.27.3 + load-tsconfig: 0.2.5 + + bytes@3.1.2: {} + + cac@6.7.14: {} + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + caniuse-lite@1.0.30001780: {} chai@6.2.2: {} + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + cjs-module-lexer@1.4.3: {} + commander@4.1.1: {} + + confbox@0.1.8: {} + + consola@3.4.2: {} + + content-disposition@1.0.1: {} + + content-type@1.0.5: {} + convert-source-map@2.0.0: {} + cookie-signature@1.2.2: {} + + cookie@0.7.2: {} + cookie@1.1.1: {} + cors@2.8.6: + dependencies: + object-assign: 4.1.1 + vary: 1.1.2 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + csstype@3.2.3: {} daisyui@5.5.19: {} @@ -2580,14 +3551,26 @@ snapshots: dependencies: ms: 2.1.3 + depd@2.0.0: {} + dequal@2.0.3: {} detect-libc@2.1.2: {} dom-accessibility-api@0.5.16: {} + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + ee-first@1.1.1: {} + electron-to-chromium@1.5.321: {} + encodeurl@2.0.0: {} + enhanced-resolve@5.20.1: dependencies: graceful-fs: 4.2.11 @@ -2599,8 +3582,16 @@ snapshots: error-stack-parser-es@1.0.5: {} + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + es-module-lexer@2.0.0: {} + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + esbuild@0.27.3: optionalDependencies: '@esbuild/aix-ppc64': 0.27.3 @@ -2632,28 +3623,127 @@ snapshots: escalade@3.2.0: {} + escape-html@1.0.3: {} + estree-walker@3.0.3: dependencies: '@types/estree': 1.0.8 + etag@1.8.1: {} + + eventsource-parser@3.0.6: {} + + eventsource@3.0.7: + dependencies: + eventsource-parser: 3.0.6 + expect-type@1.3.0: {} + express-rate-limit@8.3.2(express@5.2.1): + dependencies: + express: 5.2.1 + ip-address: 10.1.0 + + express@5.2.1: + dependencies: + accepts: 2.0.0 + body-parser: 2.2.2 + content-disposition: 1.0.1 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.2.2 + debug: 4.4.3 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.1 + fresh: 2.0.0 + http-errors: 2.0.1 + merge-descriptors: 2.0.0 + mime-types: 3.0.2 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.15.0 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.2.1 + serve-static: 2.2.1 + statuses: 2.0.2 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + fake-indexeddb@6.2.5: {} fast-content-type-parse@3.0.0: {} + fast-deep-equal@3.1.3: {} + + fast-uri@3.1.0: {} + fdir@6.5.0(picomatch@4.0.3): optionalDependencies: picomatch: 4.0.3 + finalhandler@2.1.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + + fix-dts-default-cjs-exports@1.0.1: + dependencies: + magic-string: 0.30.21 + mlly: 1.8.2 + rollup: 4.60.1 + + forwarded@0.2.0: {} + + fresh@2.0.0: {} + fsevents@2.3.2: optional: true fsevents@2.3.3: optional: true + function-bind@1.1.2: {} + gensync@1.0.0-beta.2: {} + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-tsconfig@4.13.7: + dependencies: + resolve-pkg-maps: 1.0.0 + + gopd@1.2.0: {} + graceful-fs@4.2.11: {} happy-dom@20.8.9: @@ -2668,18 +3758,56 @@ snapshots: - bufferutil - utf-8-validate + has-symbols@1.1.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hono@4.12.12: {} + html-entities@2.3.3: {} + http-errors@2.0.1: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + idb@8.0.3: {} + inherits@2.0.4: {} + + ip-address@10.1.0: {} + + ipaddr.js@1.9.1: {} + + is-promise@4.0.0: {} + is-what@4.1.16: {} + isexe@2.0.0: {} + jiti@2.6.1: {} + jose@6.2.2: {} + + joycon@3.1.1: {} + js-tokens@4.0.0: {} jsesc@3.1.0: {} + json-schema-traverse@1.0.0: {} + + json-schema-typed@8.0.2: {} + json-with-bigint@3.5.8: {} json5@2.2.3: {} @@ -2735,6 +3863,12 @@ snapshots: lightningcss-win32-arm64-msvc: 1.32.0 lightningcss-win32-x64-msvc: 1.32.0 + lilconfig@3.1.3: {} + + lines-and-columns@1.2.4: {} + + load-tsconfig@0.2.5: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -2745,10 +3879,22 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 + math-intrinsics@1.1.0: {} + + media-typer@1.1.0: {} + merge-anything@5.1.7: dependencies: is-what: 4.1.16 + merge-descriptors@2.0.0: {} + + mime-db@1.54.0: {} + + mime-types@3.0.2: + dependencies: + mime-db: 1.54.0 + miniflare@4.20260317.2: dependencies: '@cspotcode/source-map-support': 0.8.1 @@ -2761,26 +3907,69 @@ snapshots: - bufferutil - utf-8-validate + mlly@1.8.2: + dependencies: + acorn: 8.16.0 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.3 + ms@2.1.3: {} + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + nanoid@3.3.11: {} + negotiator@1.0.0: {} + node-releases@2.0.36: {} + object-assign@4.1.1: {} + + object-inspect@1.13.4: {} + obug@2.1.1: {} + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + parse5@7.3.0: dependencies: entities: 6.0.1 + parseurl@1.3.3: {} + + path-key@3.1.1: {} + path-to-regexp@6.3.0: {} + path-to-regexp@8.4.2: {} + pathe@2.0.3: {} picocolors@1.1.1: {} picomatch@4.0.3: {} + pirates@4.0.7: {} + + pkce-challenge@5.0.1: {} + + pkg-types@1.3.1: + dependencies: + confbox: 0.1.8 + mlly: 1.8.2 + pathe: 2.0.3 + playwright-core@1.58.2: {} playwright@1.58.2: @@ -2789,6 +3978,14 @@ snapshots: optionalDependencies: fsevents: 2.3.2 + postcss-load-config@6.0.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0): + dependencies: + lilconfig: 3.1.3 + optionalDependencies: + jiti: 2.6.1 + postcss: 8.5.8 + tsx: 4.21.0 + postcss@8.5.8: dependencies: nanoid: 3.3.11 @@ -2801,8 +3998,34 @@ snapshots: ansi-styles: 5.2.0 react-is: 17.0.2 + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + + qs@6.15.0: + dependencies: + side-channel: 1.1.0 + + range-parser@1.2.1: {} + + raw-body@3.0.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + unpipe: 1.0.0 + react-is@17.0.2: {} + readdirp@4.1.2: {} + + require-from-string@2.0.2: {} + + resolve-from@5.0.0: {} + + resolve-pkg-maps@1.0.0: {} + rolldown@1.0.0-rc.10: dependencies: '@oxc-project/types': 0.120.0 @@ -2824,16 +4047,86 @@ snapshots: '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.10 '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.10 + rollup@4.60.1: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.60.1 + '@rollup/rollup-android-arm64': 4.60.1 + '@rollup/rollup-darwin-arm64': 4.60.1 + '@rollup/rollup-darwin-x64': 4.60.1 + '@rollup/rollup-freebsd-arm64': 4.60.1 + '@rollup/rollup-freebsd-x64': 4.60.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.60.1 + '@rollup/rollup-linux-arm-musleabihf': 4.60.1 + '@rollup/rollup-linux-arm64-gnu': 4.60.1 + '@rollup/rollup-linux-arm64-musl': 4.60.1 + '@rollup/rollup-linux-loong64-gnu': 4.60.1 + '@rollup/rollup-linux-loong64-musl': 4.60.1 + '@rollup/rollup-linux-ppc64-gnu': 4.60.1 + '@rollup/rollup-linux-ppc64-musl': 4.60.1 + '@rollup/rollup-linux-riscv64-gnu': 4.60.1 + '@rollup/rollup-linux-riscv64-musl': 4.60.1 + '@rollup/rollup-linux-s390x-gnu': 4.60.1 + '@rollup/rollup-linux-x64-gnu': 4.60.1 + '@rollup/rollup-linux-x64-musl': 4.60.1 + '@rollup/rollup-openbsd-x64': 4.60.1 + '@rollup/rollup-openharmony-arm64': 4.60.1 + '@rollup/rollup-win32-arm64-msvc': 4.60.1 + '@rollup/rollup-win32-ia32-msvc': 4.60.1 + '@rollup/rollup-win32-x64-gnu': 4.60.1 + '@rollup/rollup-win32-x64-msvc': 4.60.1 + fsevents: 2.3.3 + + router@2.2.0: + dependencies: + debug: 4.4.3 + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.4.2 + transitivePeerDependencies: + - supports-color + + safer-buffer@2.1.2: {} + semver@6.3.1: {} semver@7.7.4: {} + send@1.2.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.1 + mime-types: 3.0.2 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + seroval-plugins@1.5.1(seroval@1.5.1): dependencies: seroval: 1.5.1 seroval@1.5.1: {} + serve-static@2.2.1: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.1 + transitivePeerDependencies: + - supports-color + + setprototypeof@1.2.0: {} + sharp@0.34.5: dependencies: '@img/colour': 1.1.0 @@ -2865,6 +4158,40 @@ snapshots: '@img/sharp-win32-ia32': 0.34.5 '@img/sharp-win32-x64': 0.34.5 + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + siginfo@2.0.0: {} solid-js@1.9.11: @@ -2894,18 +4221,42 @@ snapshots: source-map-js@1.2.1: {} + source-map@0.7.6: {} + stackback@0.0.2: {} + statuses@2.0.2: {} + std-env@4.0.0: {} + sucrase@3.35.1: + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + commander: 4.1.1 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.7 + tinyglobby: 0.2.15 + ts-interface-checker: 0.1.13 + supports-color@10.2.2: {} tailwindcss@4.2.2: {} tapable@2.3.0: {} + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + tinybench@2.9.0: {} + tinyexec@0.3.2: {} + tinyexec@1.0.4: {} tinyglobby@0.2.15: @@ -2915,10 +4266,59 @@ snapshots: tinyrainbow@3.1.0: {} + toidentifier@1.0.1: {} + + tree-kill@1.2.2: {} + + ts-interface-checker@0.1.13: {} + tslib@2.8.1: {} + tsup@8.5.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(typescript@5.9.3): + dependencies: + bundle-require: 5.1.0(esbuild@0.27.3) + cac: 6.7.14 + chokidar: 4.0.3 + consola: 3.4.2 + debug: 4.4.3 + esbuild: 0.27.3 + fix-dts-default-cjs-exports: 1.0.1 + joycon: 3.1.1 + picocolors: 1.1.1 + postcss-load-config: 6.0.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0) + resolve-from: 5.0.0 + rollup: 4.60.1 + source-map: 0.7.6 + sucrase: 3.35.1 + tinyexec: 0.3.2 + tinyglobby: 0.2.15 + tree-kill: 1.2.2 + optionalDependencies: + postcss: 8.5.8 + typescript: 5.9.3 + transitivePeerDependencies: + - jiti + - supports-color + - tsx + - yaml + + tsx@4.21.0: + dependencies: + esbuild: 0.27.3 + get-tsconfig: 4.13.7 + optionalDependencies: + fsevents: 2.3.3 + + type-is@2.0.1: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.2 + typescript@5.9.3: {} + ufo@1.6.3: {} + undici-types@7.18.2: {} undici@7.24.4: {} @@ -2929,13 +4329,17 @@ snapshots: universal-user-agent@7.0.3: {} + unpipe@1.0.0: {} + update-browserslist-db@1.2.3(browserslist@4.28.1): dependencies: browserslist: 4.28.1 escalade: 3.2.0 picocolors: 1.1.1 - vite-plugin-solid@2.11.11(solid-js@1.9.11)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)): + vary@1.1.2: {} + + vite-plugin-solid@2.11.11(solid-js@1.9.11)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)): dependencies: '@babel/core': 7.29.0 '@types/babel__core': 7.20.5 @@ -2943,12 +4347,12 @@ snapshots: merge-anything: 5.1.7 solid-js: 1.9.11 solid-refresh: 0.6.3(solid-js@1.9.11) - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) - vitefu: 1.1.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) + vitefu: 1.1.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) transitivePeerDependencies: - supports-color - vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1): + vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0): dependencies: lightningcss: 1.32.0 picomatch: 4.0.3 @@ -2960,15 +4364,44 @@ snapshots: esbuild: 0.27.3 fsevents: 2.3.3 jiti: 2.6.1 + tsx: 4.21.0 - vitefu@1.1.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)): + vitefu@1.1.2(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)): optionalDependencies: - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) - vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)): + vitest@4.1.0(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)): + dependencies: + '@vitest/expect': 4.1.0 + '@vitest/mocker': 4.1.0(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) + '@vitest/pretty-format': 4.1.0 + '@vitest/runner': 4.1.0 + '@vitest/snapshot': 4.1.0 + '@vitest/spy': 4.1.0 + '@vitest/utils': 4.1.0 + es-module-lexer: 2.0.0 + expect-type: 1.3.0 + magic-string: 0.30.21 + obug: 2.1.1 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 4.0.0 + tinybench: 2.9.0 + tinyexec: 1.0.4 + tinyglobby: 0.2.15 + tinyrainbow: 3.1.0 + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 25.5.0 + happy-dom: 20.8.9 + transitivePeerDependencies: + - msw + + vitest@4.1.1(@types/node@25.5.0)(happy-dom@20.8.9)(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)): dependencies: '@vitest/expect': 4.1.1 - '@vitest/mocker': 4.1.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)) + '@vitest/mocker': 4.1.1(vite@8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0)) '@vitest/pretty-format': 4.1.1 '@vitest/runner': 4.1.1 '@vitest/snapshot': 4.1.1 @@ -2985,7 +4418,7 @@ snapshots: tinyexec: 1.0.4 tinyglobby: 0.2.15 tinyrainbow: 3.1.0 - vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1) + vite: 8.0.1(@types/node@25.5.0)(esbuild@0.27.3)(jiti@2.6.1)(tsx@4.21.0) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 25.5.0 @@ -2995,6 +4428,10 @@ snapshots: whatwg-mimetype@3.0.0: {} + which@2.0.2: + dependencies: + isexe: 2.0.0 + why-is-node-running@2.3.0: dependencies: siginfo: 2.0.0 @@ -3024,6 +4461,8 @@ snapshots: - bufferutil - utf-8-validate + wrappy@1.0.2: {} + ws@8.18.0: {} ws@8.20.0: {} @@ -3043,6 +4482,10 @@ snapshots: cookie: 1.1.1 youch-core: 0.3.3 + zod-to-json-schema@3.25.2(zod@4.3.6): + dependencies: + zod: 4.3.6 + zod@3.25.76: {} zod@4.3.6: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml new file mode 100644 index 00000000..3fde8458 --- /dev/null +++ b/pnpm-workspace.yaml @@ -0,0 +1,2 @@ +packages: + - "mcp" diff --git a/public/_headers b/public/_headers index bd30cf47..994eb668 100644 --- a/public/_headers +++ b/public/_headers @@ -1,5 +1,5 @@ /* - Content-Security-Policy: default-src 'none'; script-src 'self' 'sha256-uEFqyYCMaNy1Su5VmWLZ1hOCRBjkhm4+ieHHxQW6d3Y='; style-src-elem 'self'; style-src-attr 'unsafe-inline'; img-src 'self' data: https://avatars.githubusercontent.com; connect-src 'self' https://api.github.com; font-src 'self'; worker-src 'self'; manifest-src 'self'; frame-ancestors 'none'; base-uri 'self'; form-action 'none'; upgrade-insecure-requests; report-uri /api/csp-report; report-to csp-endpoint + Content-Security-Policy: default-src 'none'; script-src 'self' 'sha256-uEFqyYCMaNy1Su5VmWLZ1hOCRBjkhm4+ieHHxQW6d3Y='; style-src-elem 'self'; style-src-attr 'unsafe-inline'; img-src 'self' data: https://avatars.githubusercontent.com; connect-src 'self' https://api.github.com ws://127.0.0.1:*; font-src 'self'; worker-src 'self'; manifest-src 'self'; frame-ancestors 'none'; base-uri 'self'; form-action 'none'; upgrade-insecure-requests; report-uri /api/csp-report; report-to csp-endpoint Reporting-Endpoints: csp-endpoint="/api/csp-report" X-Content-Type-Options: nosniff Referrer-Policy: strict-origin-when-cross-origin diff --git a/src/app/App.tsx b/src/app/App.tsx index 23a7eb2f..e56dd28a 100644 --- a/src/app/App.tsx +++ b/src/app/App.tsx @@ -5,6 +5,7 @@ import { config, initConfigPersistence, resolveTheme } from "./stores/config"; import { initViewPersistence, pruneStaleIgnoredItems } from "./stores/view"; import { evictStaleEntries } from "./stores/cache"; import { initClientWatcher } from "./services/github"; +import { initMcpRelay } from "./lib/mcp-relay"; import LoginPage from "./pages/LoginPage"; import OAuthCallback from "./pages/OAuthCallback"; import PrivacyPage from "./pages/PrivacyPage"; @@ -165,6 +166,7 @@ export default function App() { initConfigPersistence(); initViewPersistence(); initClientWatcher(); + initMcpRelay(); pruneStaleIgnoredItems(); evictStaleEntries(24 * 60 * 60 * 1000).catch(() => { // Non-fatal — stale eviction failure is acceptable diff --git a/src/app/components/dashboard/DashboardPage.tsx b/src/app/components/dashboard/DashboardPage.tsx index a1f030e2..84d11fc4 100644 --- a/src/app/components/dashboard/DashboardPage.tsx +++ b/src/app/components/dashboard/DashboardPage.tsx @@ -1,5 +1,5 @@ import { createSignal, createMemo, createEffect, Show, Switch, Match, onMount, onCleanup } from "solid-js"; -import { createStore, produce } from "solid-js/store"; +import { createStore, produce, unwrap } from "solid-js/store"; import Header from "../layout/Header"; import TabBar, { TabId } from "../layout/TabBar"; import FilterBar from "../layout/FilterBar"; @@ -23,6 +23,7 @@ import { type DashboardData, } from "../../services/poll"; import { expireToken, user, onAuthCleared, DASHBOARD_STORAGE_KEY } from "../../stores/auth"; +import { updateRelaySnapshot } from "../../lib/mcp-relay"; import { pushNotification } from "../../lib/errors"; import { getClient, getGraphqlRateLimit, fetchRateLimitDetails } from "../../services/github"; import { formatCount } from "../../lib/format"; @@ -481,6 +482,21 @@ export default function DashboardPage() { }; }); + // Push dashboard data into the MCP relay snapshot on each full refresh. + // Tracks lastRefreshedAt (always updated alongside data arrays in pollFetch). + // Hot poll updates are intentionally excluded — relay reflects full-refresh data only. + createEffect(() => { + if (!config.mcpRelayEnabled) return; + if (!dashboardData.lastRefreshedAt) return; + const d = unwrap(dashboardData); + updateRelaySnapshot({ + issues: d.issues, + pullRequests: d.pullRequests, + workflowRuns: d.workflowRuns, + lastUpdatedAt: Date.now(), + }); + }); + const userLogin = createMemo(() => user()?.login ?? ""); const allUsers = createMemo(() => { const login = userLogin().toLowerCase(); diff --git a/src/app/components/onboarding/RepoSelector.tsx b/src/app/components/onboarding/RepoSelector.tsx index 53cbc038..cdb991a0 100644 --- a/src/app/components/onboarding/RepoSelector.tsx +++ b/src/app/components/onboarding/RepoSelector.tsx @@ -12,15 +12,13 @@ import { getClient } from "../../services/github"; import { user } from "../../stores/auth"; import type { TrackedUser } from "../../stores/config"; import { relativeTime } from "../../lib/format"; +import { VALID_REPO_NAME } from "../../../shared/validation"; import LoadingSpinner from "../shared/LoadingSpinner"; import FilterInput from "../shared/FilterInput"; import { Tooltip, InfoTooltip } from "../shared/Tooltip"; import ChevronIcon from "../shared/ChevronIcon"; import { Accordion } from "@kobalte/core"; -// Validates owner/repo format (both segments must be non-empty, no spaces) -const VALID_REPO_NAME = /^[a-zA-Z0-9._-]{1,100}\/[a-zA-Z0-9._-]{1,100}$/; - interface RepoSelectorProps { selectedOrgs: string[]; orgEntries?: OrgEntry[]; // Pre-fetched org entries — skip internal fetchOrgs when provided diff --git a/src/app/components/settings/Section.tsx b/src/app/components/settings/Section.tsx index 2760406a..061a4cf1 100644 --- a/src/app/components/settings/Section.tsx +++ b/src/app/components/settings/Section.tsx @@ -1,10 +1,13 @@ -import { JSX } from "solid-js"; +import { JSX, Show } from "solid-js"; -export default function Section(props: { title: string; children: JSX.Element }) { +export default function Section(props: { title: string; description?: string; children: JSX.Element }) { return (

{props.title}

+ +

{props.description}

+
{props.children}
diff --git a/src/app/components/settings/SettingsPage.tsx b/src/app/components/settings/SettingsPage.tsx index 258f55c8..9c956f99 100644 --- a/src/app/components/settings/SettingsPage.tsx +++ b/src/app/components/settings/SettingsPage.tsx @@ -1,4 +1,5 @@ import { createSignal, createMemo, Show, For, onCleanup, onMount } from "solid-js"; +import { getRelayStatus } from "../../lib/mcp-relay"; import { useNavigate } from "@solidjs/router"; import { config, updateConfig, setMonitoredRepo } from "../../stores/config"; import type { Config } from "../../stores/config"; @@ -687,7 +688,62 @@ export default function SettingsPage() { - {/* Section 8: Data */} + {/* Section 8: MCP Server Relay */} +
+ + saveWithFeedback({ mcpRelayEnabled: e.currentTarget.checked })} + /> + + + + + {getRelayStatus() === "connected" + ? "Connected" + : getRelayStatus() === "connecting" + ? "Connecting..." + : "Not connected"} + + + + { + const port = parseInt(e.currentTarget.value, 10); + if (port >= 1024 && port <= 65535) { + saveWithFeedback({ mcpRelayPort: port }); + } else { + e.currentTarget.value = String(config.mcpRelayPort); + } + }} + /> + + +
+ + {/* Section 9: Data */}
{/* Authentication method */} 0.5 ? "#000000" : "#ffffff"; -} - -/** - * Formats a duration between two ISO timestamps as a human-readable string. - * Example outputs: "2m 34s", "1h 12m", "45s" - */ -export function formatDuration(startedAt: string, completedAt: string | null): string { - if (!startedAt) return "--"; - if (!completedAt) return "--"; - const diffMs = Date.parse(completedAt) - Date.parse(startedAt); - if (isNaN(diffMs) || diffMs <= 0) return "--"; - const totalSec = Math.floor(diffMs / 1000); - const h = Math.floor(totalSec / 3600); - const m = Math.floor((totalSec % 3600) / 60); - const s = totalSec % 60; - const parts: string[] = []; - if (h > 0) parts.push(`${h}h`); - if (m > 0) parts.push(`${m}m`); - if (s > 0) parts.push(`${s}s`); - if (parts.length === 0) return diffMs > 0 ? "<1s" : "--"; - return parts.join(" "); -} - -/** - * Categorizes a PR by size based on total lines changed. - */ -export function prSizeCategory(additions: number, deletions: number): "XS" | "S" | "M" | "L" | "XL" { - const total = (additions || 0) + (deletions || 0); - if (total < 10) return "XS"; - if (total < 100) return "S"; - if (total < 500) return "M"; - if (total < 1000) return "L"; - return "XL"; -} - -/** - * Derives the roles a user has in a PR/issue (author, reviewer, assignee). - * Uses case-insensitive comparison since GitHub logins are case-insensitive. - */ -export function deriveInvolvementRoles( - userLogin: string, - authorLogin: string, - assigneeLogins: string[], - reviewerLogins: string[], - isUpstream?: boolean, -): ("author" | "reviewer" | "assignee" | "involved")[] { - if (!userLogin) return []; - const login = userLogin.toLowerCase(); - const roles: ("author" | "reviewer" | "assignee" | "involved")[] = []; - if (authorLogin.toLowerCase() === login) roles.push("author"); - if (reviewerLogins.some((r) => r.toLowerCase() === login)) roles.push("reviewer"); - if (assigneeLogins.some((a) => a.toLowerCase() === login)) roles.push("assignee"); - if (roles.length === 0 && isUpstream) roles.push("involved"); - return roles; -} - -/** - * Formats a number in compact form (e.g., 1500 → "1.5k"). - */ -export function formatCount(n: number): string { - if (n >= 1000) { - const k = n / 1000; - return k % 1 === 0 ? `${k}k` : `${parseFloat(k.toFixed(1))}k`; - } - return String(n); -} - -/** - * Formats a star count in compact form with M suffix for millions. - * Unlike formatCount, drops decimals above 10k (e.g., 15000 → "15k" not "15.0k"). - */ -export function formatStarCount(count: number): string { - if (count >= 1000000) return `${parseFloat((count / 1000000).toFixed(1))}M`; - if (count >= 10000) return `${Math.round(count / 1000)}k`; - if (count >= 1000) return `${parseFloat((count / 1000).toFixed(1))}k`; - return String(count); -} +// Re-exports from shared/format for backward compat with existing importers. +export { relativeTime, shortRelativeTime, labelTextColor, formatDuration, prSizeCategory, deriveInvolvementRoles, formatCount, formatStarCount } from "../../shared/format"; diff --git a/src/app/lib/mcp-relay.ts b/src/app/lib/mcp-relay.ts new file mode 100644 index 00000000..c57e1ec0 --- /dev/null +++ b/src/app/lib/mcp-relay.ts @@ -0,0 +1,384 @@ +// ── MCP WebSocket relay client ──────────────────────────────────────────────── +// Browser-side relay that exposes dashboard data to a local MCP server. +// No @modelcontextprotocol/sdk dependency — plain WebSocket + JSON-RPC 2.0. + +import { createSignal, createEffect } from "solid-js"; +import { METHODS, NOTIFICATIONS } from "../../shared/protocol"; +import { config } from "../stores/config"; +import { getCoreRateLimit, getGraphqlRateLimit } from "../services/github"; +import type { Issue, PullRequest, WorkflowRun } from "../../shared/types"; + +// ── Types ───────────────────────────────────────────────────────────────────── + +interface RelaySnapshot { + issues: Issue[]; + pullRequests: PullRequest[]; + workflowRuns: WorkflowRun[]; + lastUpdatedAt: number; +} + +interface JsonRpcRequest { + jsonrpc: "2.0"; + id: string | number | null; + method: string; + params?: Record; +} + +interface JsonRpcResponse { + jsonrpc: "2.0"; + id: string | number | null; + result?: unknown; + error?: { code: number; message: string }; +} + +// ── State ───────────────────────────────────────────────────────────────────── + +const BACKOFF_MS = [1000, 10000, 30000, 60000, 300000] as const; + +let _ws: WebSocket | null = null; +let _deliberateDisconnect = false; +let _backoffIndex = 0; +let _backoffTimer: ReturnType | null = null; +let _snapshot: RelaySnapshot | null = null; + +const [_relayStatus, _setRelayStatus] = createSignal<"connected" | "connecting" | "disconnected">("disconnected"); + +export function getRelayStatus(): "connected" | "connecting" | "disconnected" { + return _relayStatus(); +} + +// ── Snapshot ────────────────────────────────────────────────────────────────── + +export function updateRelaySnapshot(data: { + issues: Issue[]; + pullRequests: PullRequest[]; + workflowRuns: WorkflowRun[]; + lastUpdatedAt: number; +}): void { + _snapshot = { ...data }; +} + +function getRelaySnapshot(): RelaySnapshot | null { + return _snapshot; +} + +const SNAPSHOT_METHODS: Set = new Set([ + METHODS.GET_DASHBOARD_SUMMARY, + METHODS.GET_OPEN_PRS, + METHODS.GET_OPEN_ISSUES, + METHODS.GET_FAILING_ACTIONS, + METHODS.GET_PR_DETAILS, +]); + +// ── WebSocket connection ─────────────────────────────────────────────────────── + +function clearBackoffTimer(): void { + if (_backoffTimer !== null) { + clearTimeout(_backoffTimer); + _backoffTimer = null; + } +} + +function sendConfigUpdate(ws: WebSocket): void { + if (ws.readyState !== WebSocket.OPEN) return; + // Send fields directly in params (not nested under config:) to match ConfigUpdatePayloadSchema. + const notification = { + jsonrpc: "2.0", + method: NOTIFICATIONS.CONFIG_UPDATE, + params: { + selectedRepos: config.selectedRepos, + trackedUsers: config.trackedUsers, + upstreamRepos: config.upstreamRepos, + monitoredRepos: config.monitoredRepos, + }, + }; + ws.send(JSON.stringify(notification)); +} + +function sendResponse(ws: WebSocket, response: JsonRpcResponse): void { + if (ws.readyState !== WebSocket.OPEN) return; + ws.send(JSON.stringify(response)); +} + +function handleRequest(ws: WebSocket, req: JsonRpcRequest): void { + const id = req.id; + + const snapshot = getRelaySnapshot(); + + if (SNAPSHOT_METHODS.has(req.method) && !snapshot) { + sendResponse(ws, { + jsonrpc: "2.0", + id, + error: { code: -32002, message: "Dashboard data not yet loaded" }, + }); + return; + } + + switch (req.method) { + case METHODS.GET_DASHBOARD_SUMMARY: { + // Relay snapshot is inherently scoped to the user's items (SPA uses `involves:{user}`). + // The `scope` param is intentionally ignored — relay always reflects the user's dashboard. + const s = snapshot!; + const openPRs = s.pullRequests.filter((p) => p.state === "open"); + const result = { + openPRCount: openPRs.length, + openIssueCount: s.issues.filter((i) => i.state === "open").length, + failingRunCount: s.workflowRuns.filter( + (r) => r.conclusion === "failure" || r.conclusion === "timed_out" + ).length, + needsReviewCount: openPRs.filter((p) => p.reviewDecision === "REVIEW_REQUIRED").length, + approvedUnmergedCount: openPRs.filter((p) => p.reviewDecision === "APPROVED").length, + }; + sendResponse(ws, { jsonrpc: "2.0", id, result }); + break; + } + + case METHODS.GET_OPEN_PRS: { + const params = req.params ?? {}; + let prs = snapshot!.pullRequests.filter((p) => p.state === "open"); + if (typeof params["repo"] === "string" && params["repo"]) { + prs = prs.filter((p) => p.repoFullName === params["repo"]); + } + if (typeof params["status"] === "string" && params["status"]) { + const status = params["status"]; + switch (status) { + case "draft": + prs = prs.filter((p) => p.draft); + break; + case "needs_review": + prs = prs.filter((p) => !p.draft && p.reviewDecision === "REVIEW_REQUIRED"); + break; + case "failing": + prs = prs.filter((p) => p.checkStatus === "failure"); + break; + case "approved": + prs = prs.filter((p) => p.reviewDecision === "APPROVED"); + break; + // "all" and unknown values: no filter + } + } + sendResponse(ws, { jsonrpc: "2.0", id, result: prs }); + break; + } + + case METHODS.GET_OPEN_ISSUES: { + const params = req.params ?? {}; + let issues = snapshot!.issues.filter((i) => i.state === "open"); + if (typeof params["repo"] === "string" && params["repo"]) { + issues = issues.filter((i) => i.repoFullName === params["repo"]); + } + sendResponse(ws, { jsonrpc: "2.0", id, result: issues }); + break; + } + + case METHODS.GET_FAILING_ACTIONS: { + const params = req.params ?? {}; + let runs = snapshot!.workflowRuns.filter( + (r) => r.status === "in_progress" || r.conclusion === "failure" || r.conclusion === "timed_out" + ); + if (typeof params["repo"] === "string" && params["repo"]) { + runs = runs.filter((r) => r.repoFullName === params["repo"]); + } + sendResponse(ws, { jsonrpc: "2.0", id, result: runs }); + break; + } + + case METHODS.GET_PR_DETAILS: { + const params = req.params ?? {}; + const prId = params["id"]; + const prNumber = params["number"]; + const prRepo = params["repo"]; + let pr: PullRequest | undefined; + if (typeof prId === "number") { + pr = snapshot!.pullRequests.find((p) => p.id === prId); + } else if (typeof prNumber === "number" && typeof prRepo === "string") { + pr = snapshot!.pullRequests.find( + (p) => p.number === prNumber && p.repoFullName === prRepo + ); + } + if (!pr) { + sendResponse(ws, { jsonrpc: "2.0", id, result: null }); + } else { + sendResponse(ws, { jsonrpc: "2.0", id, result: pr }); + } + break; + } + + case METHODS.GET_RATE_LIMIT: { + const core = getCoreRateLimit(); + const graphql = getGraphqlRateLimit(); + if (!core && !graphql) { + sendResponse(ws, { + jsonrpc: "2.0", + id, + error: { code: -32002, message: "Rate limit data not yet available" }, + }); + } else { + sendResponse(ws, { + jsonrpc: "2.0", + id, + result: { + core: core + ? { limit: core.limit, remaining: core.remaining, resetAt: core.resetAt.toISOString() } + : null, + graphql: graphql + ? { limit: graphql.limit, remaining: graphql.remaining, resetAt: graphql.resetAt.toISOString() } + : null, + }, + }); + } + break; + } + + case METHODS.GET_CONFIG: { + sendResponse(ws, { + jsonrpc: "2.0", + id, + result: { + selectedRepos: config.selectedRepos, + trackedUsers: config.trackedUsers, + upstreamRepos: config.upstreamRepos, + monitoredRepos: config.monitoredRepos, + }, + }); + break; + } + + case METHODS.GET_REPOS: { + sendResponse(ws, { jsonrpc: "2.0", id, result: config.selectedRepos }); + break; + } + + default: { + sendResponse(ws, { + jsonrpc: "2.0", + id, + error: { code: -32601, message: `Method not found: ${req.method}` }, + }); + break; + } + } +} + +export function connectRelay(port: number): void { + // Close existing connection before opening a new one + if (_ws) { + _ws.onopen = null; + _ws.onmessage = null; + _ws.onclose = null; + _ws.onerror = null; + _ws.close(); + _ws = null; + } + + _deliberateDisconnect = false; + _setRelayStatus("connecting"); + + let ws: WebSocket; + try { + ws = new WebSocket(`ws://127.0.0.1:${port}`); + } catch (err) { + console.warn("[mcp-relay] WebSocket construction failed:", err); + _setRelayStatus("disconnected"); + scheduleReconnect(port); + return; + } + _ws = ws; + + ws.onopen = () => { + _backoffIndex = 0; + _setRelayStatus("connected"); + sendConfigUpdate(ws); + }; + + ws.onmessage = (event: MessageEvent) => { + let req: JsonRpcRequest; + try { + req = JSON.parse(event.data as string) as JsonRpcRequest; + } catch { + console.warn("[mcp-relay] Failed to parse incoming message"); + return; + } + if (req.jsonrpc !== "2.0" || typeof req.method !== "string") return; + handleRequest(ws, req); + }; + + ws.onclose = () => { + if (_ws === ws) { + _ws = null; + _setRelayStatus("disconnected"); + } + if (!_deliberateDisconnect) { + scheduleReconnect(port); + } + }; + + ws.onerror = () => { + // onclose fires after onerror — let onclose handle reconnect logic + console.warn("[mcp-relay] WebSocket error"); + }; +} + +function scheduleReconnect(port: number): void { + clearBackoffTimer(); + const delay = BACKOFF_MS[Math.min(_backoffIndex, BACKOFF_MS.length - 1)]; + _backoffIndex = Math.min(_backoffIndex + 1, BACKOFF_MS.length - 1); + _backoffTimer = setTimeout(() => { + _backoffTimer = null; + if (!_deliberateDisconnect && config.mcpRelayEnabled) { + connectRelay(port); + } + }, delay); +} + +export function disconnectRelay(): void { + _deliberateDisconnect = true; + clearBackoffTimer(); + if (_ws) { + _ws.onopen = null; + _ws.onmessage = null; + _ws.onclose = null; + _ws.onerror = null; + _ws.close(); + _ws = null; + } + _setRelayStatus("disconnected"); +} + +// ── Cleanup on page unload ───────────────────────────────────────────────────── + +window.addEventListener("pagehide", () => disconnectRelay()); +window.addEventListener("beforeunload", () => disconnectRelay()); + +// ── Init ────────────────────────────────────────────────────────────────────── + +let _initialized = false; + +export function initMcpRelay(): void { + if (_initialized) return; + _initialized = true; + + // Watch mcpRelayEnabled — connect when true, disconnect when false + createEffect(() => { + const enabled = config.mcpRelayEnabled; + const port = config.mcpRelayPort; + if (enabled) { + connectRelay(port); + } else { + disconnectRelay(); + } + }); + + // Send config_update whenever relevant config fields change while connected + createEffect(() => { + // Read reactive fields to establish tracking subscriptions + void config.selectedRepos; + void config.trackedUsers; + void config.upstreamRepos; + void config.monitoredRepos; + + if (_ws && _ws.readyState === WebSocket.OPEN) { + sendConfigUpdate(_ws); + } + }); +} diff --git a/src/app/services/api.ts b/src/app/services/api.ts index 48637a0a..4a1e26d6 100644 --- a/src/app/services/api.ts +++ b/src/app/services/api.ts @@ -2,6 +2,11 @@ import { getClient, cachedRequest, updateGraphqlRateLimit } from "./github"; import { pushNotification } from "../lib/errors"; import type { ApiCallSource } from "./api-usage"; import type { TrackedUser } from "../stores/config"; +import { VALID_REPO_NAME, VALID_TRACKED_LOGIN, SEARCH_RESULT_CAP } from "../../shared/validation"; +import type { Issue, PullRequest, WorkflowRun, RepoRef, RepoEntry, OrgEntry, CheckStatus, ApiError } from "../../shared/types"; + +// ── Re-exports from shared/types (backward compat for existing importers) ───── +export type { Issue, PullRequest, WorkflowRun, RepoRef, RepoEntry, OrgEntry, CheckStatus, ApiError, RateLimitInfo, DashboardSummary } from "../../shared/types"; // ── Types ──────────────────────────────────────────────────────────────────── @@ -11,107 +16,6 @@ interface GraphQLRateLimit { resetAt: string; } -export interface OrgEntry { - login: string; - avatarUrl: string; - type: "org" | "user"; -} - -export interface RepoRef { - owner: string; - name: string; - fullName: string; -} - -export interface RepoEntry extends RepoRef { - pushedAt: string | null; -} - -export interface Issue { - id: number; - number: number; - title: string; - state: string; - htmlUrl: string; - createdAt: string; - updatedAt: string; - userLogin: string; - userAvatarUrl: string; - labels: { name: string; color: string }[]; - assigneeLogins: string[]; - repoFullName: string; - comments: number; - starCount?: number; - surfacedBy?: string[]; -} - -export interface CheckStatus { - status: "success" | "failure" | "pending" | "conflict" | null; -} - -export interface PullRequest { - id: number; - number: number; - title: string; - state: string; - draft: boolean; - htmlUrl: string; - createdAt: string; - updatedAt: string; - userLogin: string; - userAvatarUrl: string; - headSha: string; - headRef: string; - baseRef: string; - assigneeLogins: string[]; - reviewerLogins: string[]; - repoFullName: string; - checkStatus: CheckStatus["status"]; - additions: number; - deletions: number; - changedFiles: number; - comments: number; - reviewThreads: number; - labels: { name: string; color: string }[]; - reviewDecision: "APPROVED" | "CHANGES_REQUESTED" | "REVIEW_REQUIRED" | null; - totalReviewCount: number; - starCount?: number; - /** False when only light fields are loaded (phase 1); true/undefined when fully enriched */ - enriched?: boolean; - /** GraphQL global node ID — used for hot-poll status updates */ - nodeId?: string; - surfacedBy?: string[]; -} - -export interface WorkflowRun { - id: number; - name: string; - status: string; - conclusion: string | null; - event: string; - workflowId: number; - headSha: string; - headBranch: string; - runNumber: number; - htmlUrl: string; - createdAt: string; - updatedAt: string; - repoFullName: string; - isPrRun: boolean; - runStartedAt: string; - completedAt: string | null; - runAttempt: number; - displayTitle: string; - actorLogin: string; -} - -export interface ApiError { - repo: string; - statusCode: number | null; - message: string; - retryable: boolean; -} - // ── Raw GitHub API shapes (minimal) ───────────────────────────────────────── interface RawOrg { @@ -208,13 +112,6 @@ function extractSearchPartialData(err: unknown): T | null { return null; } -const VALID_REPO_NAME = /^[A-Za-z0-9._-]{1,100}\/[A-Za-z0-9._-]{1,100}$/; -// Allows alphanumeric/hyphen base (1-39 chars) with optional literal [bot] suffix for GitHub -// App bot accounts. Case-sensitive [bot] is intentional — GitHub always uses lowercase. -const VALID_TRACKED_LOGIN = /^[A-Za-z0-9-]{1,39}(\[bot\])?$/; - -const SEARCH_RESULT_CAP = 1000; - function chunkArray(arr: T[], size: number): T[][] { const chunks: T[][] = []; for (let i = 0; i < arr.length; i += size) { diff --git a/src/app/services/github.ts b/src/app/services/github.ts index c4bbefc9..6b27dc67 100644 --- a/src/app/services/github.ts +++ b/src/app/services/github.ts @@ -13,13 +13,9 @@ const GitHubOctokit = Octokit.plugin(throttling, retry, paginateRest); // ── Types ──────────────────────────────────────────────────────────────────── -type GitHubOctokitInstance = InstanceType; +import type { RateLimitInfo } from "../../shared/types"; -export interface RateLimitInfo { - limit: number; - remaining: number; - resetAt: Date; -} +type GitHubOctokitInstance = InstanceType; // ── Rate limit signals ─────────────────────────────────────────────────────── diff --git a/src/app/stores/config.ts b/src/app/stores/config.ts index 926724ca..89f6df84 100644 --- a/src/app/stores/config.ts +++ b/src/app/stores/config.ts @@ -1,13 +1,17 @@ -import { z } from "zod"; import { createStore, produce } from "solid-js/store"; import { createEffect, onCleanup } from "solid-js"; import { pushNotification } from "../lib/errors"; +import { ConfigSchema, RepoRefSchema, THEME_OPTIONS } from "../../shared/schemas"; +import type { Config, ThemeId } from "../../shared/schemas"; +import { z } from "zod"; + +// ── Re-exports from shared/schemas (backward compat for existing importers) ─── +export { ConfigSchema, RepoRefSchema, TrackedUserSchema, THEME_OPTIONS, type Config, type TrackedUser, type ThemeId } from "../../shared/schemas"; export const CONFIG_STORAGE_KEY = "github-tracker:config"; -// Light themes first, then dark themes. "auto" uses system preference (corporate/dim). -export const THEME_OPTIONS = ["auto", "corporate", "cupcake", "light", "nord", "dim", "dracula", "dark", "forest"] as const; -export type ThemeId = (typeof THEME_OPTIONS)[number]; +// ── Browser-only theme helpers ──────────────────────────────────────────────── +// These use window.matchMedia and must stay in the browser layer. export const DARK_THEMES: ReadonlySet = new Set(["dim", "dracula", "dark", "forest"]); export const AUTO_LIGHT_THEME = "corporate" as const; export const AUTO_DARK_THEME = "dim" as const; @@ -18,56 +22,6 @@ export function resolveTheme(theme: ThemeId): string { return prefersDark ? AUTO_DARK_THEME : AUTO_LIGHT_THEME; } -const REPO_SEGMENT = /^[A-Za-z0-9._-]{1,100}$/; - -export const RepoRefSchema = z.object({ - owner: z.string().regex(REPO_SEGMENT), - name: z.string().regex(REPO_SEGMENT), - fullName: z.string().regex(/^[A-Za-z0-9._-]{1,100}\/[A-Za-z0-9._-]{1,100}$/), -}); - -export const TrackedUserSchema = z.object({ - login: z.string(), - avatarUrl: z.string().url().refine( - (u) => u.startsWith("https://avatars.githubusercontent.com/"), - "Avatar URL must be from GitHub CDN" - ), - name: z.string().nullable(), - type: z.enum(["user", "bot"]).default("user"), -}); - -export type TrackedUser = z.infer; - -export const ConfigSchema = z.object({ - selectedOrgs: z.array(z.string()).default([]), - selectedRepos: z.array(RepoRefSchema).default([]), - upstreamRepos: z.array(RepoRefSchema).default([]), - monitoredRepos: z.array(RepoRefSchema).max(10).default([]), - trackedUsers: z.array(TrackedUserSchema).max(10).default([]), - refreshInterval: z.number().min(0).max(3600).default(300), - hotPollInterval: z.number().min(10).max(120).default(30), - maxWorkflowsPerRepo: z.number().min(1).max(20).default(5), - maxRunsPerWorkflow: z.number().min(1).max(10).default(3), - notifications: z - .object({ - enabled: z.boolean().default(false), - issues: z.boolean().default(true), - pullRequests: z.boolean().default(true), - workflowRuns: z.boolean().default(true), - }) - .default({ enabled: false, issues: true, pullRequests: true, workflowRuns: true }), - theme: z.enum(THEME_OPTIONS).default("auto"), - viewDensity: z.enum(["compact", "comfortable"]).default("comfortable"), - itemsPerPage: z.number().min(10).max(100).default(25), - defaultTab: z.enum(["issues", "pullRequests", "actions", "tracked"]).default("issues"), - rememberLastTab: z.boolean().default(true), - onboardingComplete: z.boolean().default(false), - authMethod: z.enum(["oauth", "pat"]).default("oauth"), - enableTracking: z.boolean().default(false), -}); - -export type Config = z.infer; - export function loadConfig(): Config { try { const raw = localStorage.getItem(CONFIG_STORAGE_KEY); @@ -126,6 +80,14 @@ export function setMonitoredRepo(repo: z.infer, monitored: ); } +export function setMcpRelayEnabled(enabled: boolean): void { + updateConfig({ mcpRelayEnabled: enabled }); +} + +export function setMcpRelayPort(port: number): void { + updateConfig({ mcpRelayPort: port }); +} + export function resetConfig(): void { const defaults = ConfigSchema.parse({}); setConfig(defaults); diff --git a/src/shared/format.ts b/src/shared/format.ts new file mode 100644 index 00000000..4a73871e --- /dev/null +++ b/src/shared/format.ts @@ -0,0 +1,136 @@ +// ── Shared format utilities ─────────────────────────────────────────────────── +// Browser-agnostic formatting functions shared between the SPA and MCP server. + +const rtf = new Intl.RelativeTimeFormat("en", { numeric: "auto" }); + +/** + * Formats an ISO date string as a relative time string (e.g., "2 hours ago"). + * Uses Intl.RelativeTimeFormat for natural language output. + */ +export function relativeTime(isoString: string): string { + const diffMs = Date.now() - Date.parse(isoString); + if (isNaN(diffMs)) return ""; + if (diffMs < 0) return rtf.format(0, "second"); + const diffSec = Math.floor(diffMs / 1000); + + if (diffSec < 60) return rtf.format(-diffSec, "second"); + const diffMin = Math.floor(diffSec / 60); + if (diffMin < 60) return rtf.format(-diffMin, "minute"); + const diffHr = Math.floor(diffMin / 60); + if (diffHr < 24) return rtf.format(-diffHr, "hour"); + const diffDay = Math.floor(diffHr / 24); + if (diffDay < 30) return rtf.format(-diffDay, "day"); + const diffMonth = Math.floor(diffDay / 30); + if (diffMonth < 12) return rtf.format(-diffMonth, "month"); + return rtf.format(-Math.floor(diffMonth / 12), "year"); +} + +/** + * Formats an ISO date string as a compact relative time string (e.g., "3h", "7d", "2mo"). + * Returns "now" for differences under 60 seconds or future timestamps (clock skew). + * Returns "" for invalid input. + */ +export function shortRelativeTime(isoString: string): string { + const diffMs = Date.now() - Date.parse(isoString); + if (isNaN(diffMs)) return ""; + if (diffMs < 0) return "now"; + const diffSec = Math.floor(diffMs / 1000); + if (diffSec < 60) return "now"; + const diffMin = Math.floor(diffSec / 60); + if (diffMin < 60) return `${diffMin}m`; + const diffHr = Math.floor(diffMin / 60); + if (diffHr < 24) return `${diffHr}h`; + const diffDay = Math.floor(diffHr / 24); + if (diffDay < 30) return `${diffDay}d`; + const diffMonth = Math.floor(diffDay / 30); + if (diffMonth < 12) return `${diffMonth}mo`; + return `${Math.floor(diffMonth / 12)}y`; +} + +/** + * Computes text color (black or white) for a GitHub label hex color. + * Based on perceived luminance. + */ +export function labelTextColor(hexColor: string): string { + const r = parseInt(hexColor.slice(0, 2), 16); + const g = parseInt(hexColor.slice(2, 4), 16); + const b = parseInt(hexColor.slice(4, 6), 16); + const luminance = (0.299 * r + 0.587 * g + 0.114 * b) / 255; + return luminance > 0.5 ? "#000000" : "#ffffff"; +} + +/** + * Formats a duration between two ISO timestamps as a human-readable string. + * Example outputs: "2m 34s", "1h 12m", "45s" + */ +export function formatDuration(startedAt: string, completedAt: string | null): string { + if (!startedAt) return "--"; + if (!completedAt) return "--"; + const diffMs = Date.parse(completedAt) - Date.parse(startedAt); + if (isNaN(diffMs) || diffMs <= 0) return "--"; + const totalSec = Math.floor(diffMs / 1000); + const h = Math.floor(totalSec / 3600); + const m = Math.floor((totalSec % 3600) / 60); + const s = totalSec % 60; + const parts: string[] = []; + if (h > 0) parts.push(`${h}h`); + if (m > 0) parts.push(`${m}m`); + if (s > 0) parts.push(`${s}s`); + if (parts.length === 0) return diffMs > 0 ? "<1s" : "--"; + return parts.join(" "); +} + +/** + * Categorizes a PR by size based on total lines changed. + */ +export function prSizeCategory(additions: number, deletions: number): "XS" | "S" | "M" | "L" | "XL" { + const total = (additions || 0) + (deletions || 0); + if (total < 10) return "XS"; + if (total < 100) return "S"; + if (total < 500) return "M"; + if (total < 1000) return "L"; + return "XL"; +} + +/** + * Derives the roles a user has in a PR/issue (author, reviewer, assignee). + * Uses case-insensitive comparison since GitHub logins are case-insensitive. + */ +export function deriveInvolvementRoles( + userLogin: string, + authorLogin: string, + assigneeLogins: string[], + reviewerLogins: string[], + isUpstream?: boolean, +): ("author" | "reviewer" | "assignee" | "involved")[] { + if (!userLogin) return []; + const login = userLogin.toLowerCase(); + const roles: ("author" | "reviewer" | "assignee" | "involved")[] = []; + if (authorLogin.toLowerCase() === login) roles.push("author"); + if (reviewerLogins.some((r) => r.toLowerCase() === login)) roles.push("reviewer"); + if (assigneeLogins.some((a) => a.toLowerCase() === login)) roles.push("assignee"); + if (roles.length === 0 && isUpstream) roles.push("involved"); + return roles; +} + +/** + * Formats a number in compact form (e.g., 1500 → "1.5k"). + */ +export function formatCount(n: number): string { + if (n >= 1000) { + const k = n / 1000; + return k % 1 === 0 ? `${k}k` : `${parseFloat(k.toFixed(1))}k`; + } + return String(n); +} + +/** + * Formats a star count in compact form with M suffix for millions. + * Unlike formatCount, drops decimals above 10k (e.g., 15000 → "15k" not "15.0k"). + */ +export function formatStarCount(count: number): string { + if (count >= 1000000) return `${parseFloat((count / 1000000).toFixed(1))}M`; + if (count >= 10000) return `${Math.round(count / 1000)}k`; + if (count >= 1000) return `${parseFloat((count / 1000).toFixed(1))}k`; + return String(count); +} diff --git a/src/shared/protocol.ts b/src/shared/protocol.ts new file mode 100644 index 00000000..58d312d4 --- /dev/null +++ b/src/shared/protocol.ts @@ -0,0 +1,17 @@ +// ── MCP protocol constants ──────────────────────────────────────────────────── +// Method names and notification types for the GitHub Tracker MCP server. + +export const METHODS = { + GET_DASHBOARD_SUMMARY: "get_dashboard_summary", + GET_OPEN_PRS: "get_open_prs", + GET_OPEN_ISSUES: "get_open_issues", + GET_FAILING_ACTIONS: "get_failing_actions", + GET_PR_DETAILS: "get_pr_details", + GET_RATE_LIMIT: "get_rate_limit", + GET_CONFIG: "get_config", + GET_REPOS: "get_repos", +} as const; + +export const NOTIFICATIONS = { + CONFIG_UPDATE: "config_update", +} as const; diff --git a/src/shared/schemas.ts b/src/shared/schemas.ts new file mode 100644 index 00000000..3714069a --- /dev/null +++ b/src/shared/schemas.ts @@ -0,0 +1,62 @@ +// ── Shared Zod schemas ──────────────────────────────────────────────────────── +// Browser-agnostic schemas shared between the SPA and MCP server. +// Note: DARK_THEMES, resolveTheme, AUTO_LIGHT_THEME, AUTO_DARK_THEME are +// intentionally kept in src/app/stores/config.ts (they use window.matchMedia). + +import { z } from "zod"; +import { VALID_TRACKED_LOGIN } from "./validation.js"; + +export const THEME_OPTIONS = ["auto", "corporate", "cupcake", "light", "nord", "dim", "dracula", "dark", "forest"] as const; +export type ThemeId = (typeof THEME_OPTIONS)[number]; + +const REPO_SEGMENT = /^[A-Za-z0-9._-]{1,100}$/; + +export const RepoRefSchema = z.object({ + owner: z.string().regex(REPO_SEGMENT), + name: z.string().regex(REPO_SEGMENT), + fullName: z.string().regex(/^[A-Za-z0-9._-]{1,100}\/[A-Za-z0-9._-]{1,100}$/), +}); + +export const TrackedUserSchema = z.object({ + login: z.string().regex(VALID_TRACKED_LOGIN), + avatarUrl: z.string().url().refine( + (u) => u.startsWith("https://avatars.githubusercontent.com/"), + "Avatar URL must be from GitHub CDN" + ), + name: z.string().nullable(), + type: z.enum(["user", "bot"]).default("user"), +}); + +export type TrackedUser = z.infer; + +export const ConfigSchema = z.object({ + selectedOrgs: z.array(z.string()).default([]), + selectedRepos: z.array(RepoRefSchema).default([]), + upstreamRepos: z.array(RepoRefSchema).default([]), + monitoredRepos: z.array(RepoRefSchema).max(10).default([]), + trackedUsers: z.array(TrackedUserSchema).max(10).default([]), + refreshInterval: z.number().min(0).max(3600).default(300), + hotPollInterval: z.number().min(10).max(120).default(30), + maxWorkflowsPerRepo: z.number().min(1).max(20).default(5), + maxRunsPerWorkflow: z.number().min(1).max(10).default(3), + notifications: z + .object({ + enabled: z.boolean().default(false), + issues: z.boolean().default(true), + pullRequests: z.boolean().default(true), + workflowRuns: z.boolean().default(true), + }) + .default({ enabled: false, issues: true, pullRequests: true, workflowRuns: true }), + theme: z.enum(THEME_OPTIONS).default("auto"), + viewDensity: z.enum(["compact", "comfortable"]).default("comfortable"), + itemsPerPage: z.number().min(10).max(100).default(25), + defaultTab: z.enum(["issues", "pullRequests", "actions", "tracked"]).default("issues"), + rememberLastTab: z.boolean().default(true), + onboardingComplete: z.boolean().default(false), + authMethod: z.enum(["oauth", "pat"]).default("oauth"), + enableTracking: z.boolean().default(false), + mcpRelayEnabled: z.boolean().default(false), + mcpRelayPort: z.number().int().min(1024).max(65535).default(9876), +}); + +export type Config = z.infer; diff --git a/src/shared/tsconfig.json b/src/shared/tsconfig.json new file mode 100644 index 00000000..8ac9b866 --- /dev/null +++ b/src/shared/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "composite": true, + "declaration": true, + "declarationMap": true, + "outDir": "../../dist/shared", + "rootDir": ".", + "module": "NodeNext", + "moduleResolution": "nodenext", + "target": "ES2022", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true + }, + "include": ["./**/*.ts"] +} diff --git a/src/shared/types.ts b/src/shared/types.ts new file mode 100644 index 00000000..299a6af1 --- /dev/null +++ b/src/shared/types.ts @@ -0,0 +1,117 @@ +// ── Shared domain types ─────────────────────────────────────────────────────── +// These are browser-agnostic types shared between the SPA and MCP server. + +export interface OrgEntry { + login: string; + avatarUrl: string; + type: "org" | "user"; +} + +export interface RepoRef { + owner: string; + name: string; + fullName: string; +} + +export interface RepoEntry extends RepoRef { + pushedAt: string | null; +} + +export interface Issue { + id: number; + number: number; + title: string; + state: string; + htmlUrl: string; + createdAt: string; + updatedAt: string; + userLogin: string; + userAvatarUrl: string; + labels: { name: string; color: string }[]; + assigneeLogins: string[]; + repoFullName: string; + comments: number; + starCount?: number; + surfacedBy?: string[]; +} + +export interface CheckStatus { + status: "success" | "failure" | "pending" | "conflict" | null; +} + +export interface PullRequest { + id: number; + number: number; + title: string; + state: string; + draft: boolean; + htmlUrl: string; + createdAt: string; + updatedAt: string; + userLogin: string; + userAvatarUrl: string; + headSha: string; + headRef: string; + baseRef: string; + assigneeLogins: string[]; + reviewerLogins: string[]; + repoFullName: string; + checkStatus: CheckStatus["status"]; + additions: number; + deletions: number; + changedFiles: number; + comments: number; + reviewThreads: number; + labels: { name: string; color: string }[]; + reviewDecision: "APPROVED" | "CHANGES_REQUESTED" | "REVIEW_REQUIRED" | null; + totalReviewCount: number; + starCount?: number; + /** False when only light fields are loaded (phase 1); true/undefined when fully enriched */ + enriched?: boolean; + /** GraphQL global node ID — used for hot-poll status updates */ + nodeId?: string; + surfacedBy?: string[]; +} + +export interface WorkflowRun { + id: number; + name: string; + status: string; + conclusion: string | null; + event: string; + workflowId: number; + headSha: string; + headBranch: string; + runNumber: number; + htmlUrl: string; + createdAt: string; + updatedAt: string; + repoFullName: string; + isPrRun: boolean; + runStartedAt: string; + completedAt: string | null; + runAttempt: number; + displayTitle: string; + actorLogin: string; +} + +export interface ApiError { + repo: string; + statusCode: number | null; + message: string; + retryable: boolean; +} + +export interface RateLimitInfo { + limit: number; + remaining: number; + resetAt: Date; +} + +export interface DashboardSummary { + openPRCount: number; + openIssueCount: number; + failingRunCount: number; + needsReviewCount: number; + approvedUnmergedCount: number; +} diff --git a/src/shared/validation.ts b/src/shared/validation.ts new file mode 100644 index 00000000..e64a0c0a --- /dev/null +++ b/src/shared/validation.ts @@ -0,0 +1,10 @@ +// ── Shared validation constants ─────────────────────────────────────────────── +// Browser-agnostic regex and constants used by both SPA and MCP server. + +export const VALID_REPO_NAME = /^[A-Za-z0-9._-]{1,100}\/[A-Za-z0-9._-]{1,100}$/; + +// Allows alphanumeric/hyphen base (1-39 chars) with optional literal [bot] suffix for GitHub +// App bot accounts. Case-sensitive [bot] is intentional — GitHub always uses lowercase. +export const VALID_TRACKED_LOGIN = /^[A-Za-z0-9-]{1,39}(\[bot\])?$/; + +export const SEARCH_RESULT_CAP = 1000; diff --git a/tests/app/lib/mcp-relay.test.ts b/tests/app/lib/mcp-relay.test.ts new file mode 100644 index 00000000..50dc74ee --- /dev/null +++ b/tests/app/lib/mcp-relay.test.ts @@ -0,0 +1,866 @@ +// ── MCP relay client unit tests ─────────────────────────────────────────────── +// Tests the SPA-side relay module (src/app/lib/mcp-relay.ts). +// WebSocket is mocked via a class constructor — happy-dom has no functional WebSocket. + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { makeIssue, makePullRequest, makeWorkflowRun } from "../../helpers/factories"; + +// ── Module-level mocks ───────────────────────────────────────────────────────── + +const mockConfigStore = { + mcpRelayEnabled: true, + mcpRelayPort: 9876, + selectedRepos: [{ owner: "owner", name: "repo", fullName: "owner/repo" }], + trackedUsers: [], + upstreamRepos: [], + monitoredRepos: [], +}; + +vi.mock("../../../src/app/stores/config", () => ({ + get config() { + return mockConfigStore; + }, +})); + +let _mockCoreRateLimit: { limit: number; remaining: number; resetAt: Date } | null = null; +let _mockGraphqlRateLimit: { limit: number; remaining: number; resetAt: Date } | null = null; + +vi.mock("../../../src/app/services/github", () => ({ + getCoreRateLimit: () => _mockCoreRateLimit, + getGraphqlRateLimit: () => _mockGraphqlRateLimit, +})); + +// ── Mock WebSocket factory ───────────────────────────────────────────────────── + +interface MockWs { + readyState: number; + send: ReturnType; + close: ReturnType; + onopen: ((e: Event) => void) | null; + onmessage: ((e: MessageEvent) => void) | null; + onclose: ((e: CloseEvent) => void) | null; + onerror: ((e: Event) => void) | null; + _triggerOpen(): void; + _triggerMessage(data: string): void; + _triggerClose(): void; +} + +/** + * Creates a single-instance WebSocket mock. The Constructor is a real class + * that can be called with `new`, returning the shared instance. + */ +function makeSingleInstanceMock(): { ws: MockWs; Constructor: typeof WebSocket } { + const ws: MockWs = { + readyState: 0, + send: vi.fn(), + close: vi.fn().mockImplementation(function (this: MockWs) { + this.readyState = 3; + }), + onopen: null, + onmessage: null, + onclose: null, + onerror: null, + _triggerOpen() { + this.readyState = 1; + this.onopen?.(new Event("open")); + }, + _triggerMessage(data: string) { + this.onmessage?.(new MessageEvent("message", { data })); + }, + _triggerClose() { + this.readyState = 3; + this.onclose?.(new CloseEvent("close", { code: 1000, reason: "" })); + }, + }; + + // Using Object.assign on the prototype to make `new Constructor()` return `ws` + function MockWsCtor(this: MockWs) { + return ws; + } + MockWsCtor.OPEN = 1; + MockWsCtor.CONNECTING = 0; + MockWsCtor.CLOSING = 2; + MockWsCtor.CLOSED = 3; + + return { ws, Constructor: MockWsCtor as unknown as typeof WebSocket }; +} + +/** + * Creates a multi-instance WebSocket mock for reconnect/backoff tests. + */ +function makeMultiInstanceMock(): { instances: MockWs[]; Constructor: typeof WebSocket } { + const instances: MockWs[] = []; + + function MockWsCtor(this: MockWs) { + const ws: MockWs = { + readyState: 0, + send: vi.fn(), + close: vi.fn().mockImplementation(function (this: MockWs) { + this.readyState = 3; + }), + onopen: null, + onmessage: null, + onclose: null, + onerror: null, + _triggerOpen() { + this.readyState = 1; + this.onopen?.(new Event("open")); + }, + _triggerMessage(data: string) { + this.onmessage?.(new MessageEvent("message", { data })); + }, + _triggerClose() { + this.readyState = 3; + this.onclose?.(new CloseEvent("close", { code: 1000, reason: "" })); + }, + }; + instances.push(ws); + return ws; + } + MockWsCtor.OPEN = 1; + MockWsCtor.CONNECTING = 0; + MockWsCtor.CLOSING = 2; + MockWsCtor.CLOSED = 3; + + return { instances, Constructor: MockWsCtor as unknown as typeof WebSocket }; +} + +// ── Helper: load module fresh and set up mock WS ────────────────────────────── + +async function loadModule(Constructor: typeof WebSocket) { + vi.resetModules(); + vi.stubGlobal("WebSocket", Constructor); + // Stub window.addEventListener for the module-level pagehide/beforeunload handlers + vi.stubGlobal("window", { + addEventListener: vi.fn(), + removeEventListener: vi.fn(), + }); + return import("../../../src/app/lib/mcp-relay"); +} + +// ── Tests ────────────────────────────────────────────────────────────────────── + +describe("updateRelaySnapshot / handleRequest", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + _mockCoreRateLimit = null; + _mockGraphqlRateLimit = null; + }); + + it("stores snapshot and returns PRs via GET_OPEN_PRS", () => { + const issues = [makeIssue({ state: "open" })]; + const prs = [makePullRequest({ state: "open", repoFullName: "owner/repo" })]; + const runs = [makeWorkflowRun({ conclusion: "success" })]; + + mod.updateRelaySnapshot({ issues, pullRequests: prs, workflowRuns: runs, lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 1, + method: "get_open_prs", + params: {}, + })); + + const prResponse = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 1); + expect(prResponse).toBeDefined(); + const parsed = JSON.parse(prResponse!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); + + it("returns -32002 error when snapshot is null and method needs it", () => { + // No updateRelaySnapshot called — _snapshot is null + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 2, + method: "get_dashboard_summary", + params: { scope: "involves_me" }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 2); + expect(response).toBeDefined(); + const parsed = JSON.parse(response!) as { error: { code: number; message: string } }; + expect(parsed.error.code).toBe(-32002); + expect(parsed.error.message).toContain("not yet loaded"); + }); + + it("returns -32601 for unknown method", () => { + mod.updateRelaySnapshot({ + issues: [], + pullRequests: [], + workflowRuns: [], + lastUpdatedAt: Date.now(), + }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 3, + method: "completely_unknown_method", + params: {}, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 3); + expect(response).toBeDefined(); + const parsed = JSON.parse(response!) as { error: { code: number } }; + expect(parsed.error.code).toBe(-32601); + }); +}); + +describe("GET_DASHBOARD_SUMMARY handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("computes correct summary counts from snapshot", () => { + const issues = [ + makeIssue({ state: "open" }), + makeIssue({ state: "open" }), + makeIssue({ state: "closed" }), + ]; + const prs = [ + makePullRequest({ state: "open", reviewDecision: "REVIEW_REQUIRED" }), + makePullRequest({ state: "open", reviewDecision: "APPROVED" }), + makePullRequest({ state: "closed" }), + ]; + const runs = [ + makeWorkflowRun({ conclusion: "failure" }), + makeWorkflowRun({ conclusion: "timed_out" }), + makeWorkflowRun({ conclusion: "success" }), + ]; + + mod.updateRelaySnapshot({ issues, pullRequests: prs, workflowRuns: runs, lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 10, + method: "get_dashboard_summary", + params: { scope: "involves_me" }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 10); + expect(response).toBeDefined(); + const parsed = JSON.parse(response!) as { + result: { + openIssueCount: number; + openPRCount: number; + failingRunCount: number; + needsReviewCount: number; + approvedUnmergedCount: number; + }; + }; + expect(parsed.result.openIssueCount).toBe(2); + expect(parsed.result.openPRCount).toBe(2); + expect(parsed.result.failingRunCount).toBe(2); + expect(parsed.result.needsReviewCount).toBe(1); + expect(parsed.result.approvedUnmergedCount).toBe(1); + }); +}); + +describe("GET_OPEN_PRS repo filter", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("filters by repo when repo param is provided", () => { + const pr1 = makePullRequest({ state: "open", repoFullName: "owner/repo-a" }); + const pr2 = makePullRequest({ state: "open", repoFullName: "owner/repo-b" }); + mod.updateRelaySnapshot({ issues: [], pullRequests: [pr1, pr2], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 20, + method: "get_open_prs", + params: { repo: "owner/repo-a" }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 20); + const parsed = JSON.parse(response!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); + + it("returns all open PRs when no filter is provided", () => { + const prs = [ + makePullRequest({ state: "open" }), + makePullRequest({ state: "open" }), + makePullRequest({ state: "closed" }), + ]; + mod.updateRelaySnapshot({ issues: [], pullRequests: prs, workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 21, + method: "get_open_prs", + params: {}, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 21); + const parsed = JSON.parse(response!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(2); + }); +}); + +describe("GET_PR_DETAILS handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("returns PR by repo+number", () => { + const pr = makePullRequest({ number: 42, repoFullName: "owner/repo", state: "open" }); + mod.updateRelaySnapshot({ issues: [], pullRequests: [pr], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 30, + method: "get_pr_details", + params: { repo: "owner/repo", number: 42 }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 30); + const parsed = JSON.parse(response!) as { result: { number: number } }; + expect(parsed.result.number).toBe(42); + }); + + it("returns result: null when PR not found", () => { + mod.updateRelaySnapshot({ issues: [], pullRequests: [], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 31, + method: "get_pr_details", + params: { repo: "owner/repo", number: 9999 }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 31); + const parsed = JSON.parse(response!) as { result: unknown; error?: unknown }; + expect(parsed.result).toBeNull(); + expect(parsed.error).toBeUndefined(); + }); + + it("returns PR by numeric id", () => { + const pr = makePullRequest({ state: "open" }); + mod.updateRelaySnapshot({ issues: [], pullRequests: [pr], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 32, + method: "get_pr_details", + params: { id: pr.id }, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 32); + const parsed = JSON.parse(response!) as { result: { id: number } }; + expect(parsed.result.id).toBe(pr.id); + }); +}); + +describe("GET_OPEN_PRS status filter", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + function setupAndConnect(prs: ReturnType[]) { + mod.updateRelaySnapshot({ issues: [], pullRequests: prs, workflowRuns: [], lastUpdatedAt: Date.now() }); + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + return responses; + } + + it("filters by status=draft", () => { + const prs = [ + makePullRequest({ state: "open", draft: true }), + makePullRequest({ state: "open", draft: false }), + ]; + const responses = setupAndConnect(prs); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 90, method: "get_open_prs", params: { status: "draft" } })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 90)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); + + it("filters by status=needs_review (non-draft, REVIEW_REQUIRED)", () => { + const prs = [ + makePullRequest({ state: "open", draft: false, reviewDecision: "REVIEW_REQUIRED" }), + makePullRequest({ state: "open", draft: true, reviewDecision: "REVIEW_REQUIRED" }), + makePullRequest({ state: "open", draft: false, reviewDecision: "APPROVED" }), + ]; + const responses = setupAndConnect(prs); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 91, method: "get_open_prs", params: { status: "needs_review" } })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 91)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); + + it("filters by status=failing", () => { + const prs = [ + makePullRequest({ state: "open", checkStatus: "failure" }), + makePullRequest({ state: "open", checkStatus: "success" }), + ]; + const responses = setupAndConnect(prs); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 92, method: "get_open_prs", params: { status: "failing" } })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 92)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); + + it("filters by status=approved", () => { + const prs = [ + makePullRequest({ state: "open", reviewDecision: "APPROVED" }), + makePullRequest({ state: "open", reviewDecision: "REVIEW_REQUIRED" }), + ]; + const responses = setupAndConnect(prs); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 93, method: "get_open_prs", params: { status: "approved" } })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 93)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); +}); + +describe("GET_OPEN_ISSUES handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("returns open issues", () => { + const issues = [makeIssue({ state: "open" }), makeIssue({ state: "open" }), makeIssue({ state: "closed" })]; + mod.updateRelaySnapshot({ issues, pullRequests: [], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 50, method: "get_open_issues", params: {} })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 50)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(2); + }); + + it("filters by repo", () => { + const issues = [makeIssue({ state: "open", repoFullName: "owner/a" }), makeIssue({ state: "open", repoFullName: "owner/b" })]; + mod.updateRelaySnapshot({ issues, pullRequests: [], workflowRuns: [], lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 51, method: "get_open_issues", params: { repo: "owner/a" } })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 51)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); +}); + +describe("GET_FAILING_ACTIONS handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("returns failing and in-progress runs", () => { + const runs = [ + makeWorkflowRun({ status: "in_progress", conclusion: null }), + makeWorkflowRun({ status: "completed", conclusion: "failure" }), + makeWorkflowRun({ status: "completed", conclusion: "timed_out" }), + makeWorkflowRun({ status: "completed", conclusion: "success" }), + ]; + mod.updateRelaySnapshot({ issues: [], pullRequests: [], workflowRuns: runs, lastUpdatedAt: Date.now() }); + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 60, method: "get_failing_actions", params: {} })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 60)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(3); + }); +}); + +describe("GET_CONFIG handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("returns config fields without requiring snapshot", () => { + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 70, method: "get_config", params: {} })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 70)!) as { + result: { selectedRepos: unknown[]; trackedUsers: unknown[]; upstreamRepos: unknown[]; monitoredRepos: unknown[] }; + }; + expect(parsed.result.selectedRepos).toBeDefined(); + expect(parsed.result.trackedUsers).toBeDefined(); + }); +}); + +describe("GET_REPOS handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("returns selectedRepos without requiring snapshot", () => { + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + ws._triggerMessage(JSON.stringify({ jsonrpc: "2.0", id: 80, method: "get_repos", params: {} })); + const parsed = JSON.parse(responses.find((r) => (JSON.parse(r) as { id?: number }).id === 80)!) as { result: unknown[] }; + expect(parsed.result).toHaveLength(1); + }); +}); + +describe("GET_RATE_LIMIT handler", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + _mockCoreRateLimit = null; + _mockGraphqlRateLimit = null; + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + _mockCoreRateLimit = null; + _mockGraphqlRateLimit = null; + }); + + it("returns -32002 when no rate limit data available", () => { + // Both are null + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 40, + method: "get_rate_limit", + params: {}, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 40); + const parsed = JSON.parse(response!) as { error: { code: number } }; + expect(parsed.error.code).toBe(-32002); + }); + + it("returns rate limit data when available", () => { + _mockCoreRateLimit = { limit: 5000, remaining: 4000, resetAt: new Date("2026-04-07T12:00:00Z") }; + + const responses: string[] = []; + ws.send = vi.fn((data: string) => responses.push(data)); + mod.connectRelay(9876); + ws._triggerOpen(); + + ws._triggerMessage(JSON.stringify({ + jsonrpc: "2.0", + id: 41, + method: "get_rate_limit", + params: {}, + })); + + const response = responses.find((r) => (JSON.parse(r) as { id?: number }).id === 41); + const parsed = JSON.parse(response!) as { result: { core: { limit: number; remaining: number } } }; + expect(parsed.result.core.limit).toBe(5000); + expect(parsed.result.core.remaining).toBe(4000); + }); +}); + +describe("connectRelay — config update on connect", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + let WsCtor: typeof WebSocket; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + WsCtor = mock.Constructor; + mod = await loadModule(WsCtor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("sends config_update notification when connection opens", () => { + const sentMessages: string[] = []; + ws.send = vi.fn((data: string) => sentMessages.push(data)); + + mod.connectRelay(9876); + ws._triggerOpen(); + + // First sent message should be a config_update notification + expect(sentMessages.length).toBeGreaterThan(0); + const configMsg = sentMessages.find((m) => (JSON.parse(m) as { method?: string }).method === "config_update"); + expect(configMsg).toBeDefined(); + // Params are flat (no config: wrapper) to match ConfigUpdatePayloadSchema. + const parsed = JSON.parse(configMsg!) as { params: { selectedRepos: unknown[] } }; + expect(parsed.params.selectedRepos).toBeDefined(); + }); + + it("uses the WebSocket constructor with the correct URL", () => { + const constructorCalls: string[] = []; + // Wrap the constructor to track calls + const TrackingCtor = function (url: string) { + constructorCalls.push(url); + return ws; + } as unknown as typeof WebSocket; + (TrackingCtor as { OPEN: number }).OPEN = 1; + (TrackingCtor as { CONNECTING: number }).CONNECTING = 0; + (TrackingCtor as { CLOSING: number }).CLOSING = 2; + (TrackingCtor as { CLOSED: number }).CLOSED = 3; + vi.stubGlobal("WebSocket", TrackingCtor); + + mod.connectRelay(9876); + expect(constructorCalls[constructorCalls.length - 1]).toBe("ws://127.0.0.1:9876"); + }); +}); + +describe("disconnectRelay — skips reconnect", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let ws: MockWs; + + beforeEach(async () => { + const mock = makeSingleInstanceMock(); + ws = mock.ws; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("closes WebSocket and sets status to disconnected", () => { + mod.connectRelay(9876); + ws._triggerOpen(); + expect(mod.getRelayStatus()).toBe("connected"); + + mod.disconnectRelay(); + expect(mod.getRelayStatus()).toBe("disconnected"); + expect(ws.close).toHaveBeenCalled(); + }); +}); + +describe("backoff reconnect sequence", () => { + let mod: typeof import("../../../src/app/lib/mcp-relay"); + let instances: MockWs[]; + + beforeEach(async () => { + vi.useFakeTimers(); + const mock = makeMultiInstanceMock(); + instances = mock.instances; + mod = await loadModule(mock.Constructor); + }); + + afterEach(() => { + vi.useRealTimers(); + vi.unstubAllGlobals(); + vi.restoreAllMocks(); + }); + + it("reconnects after 1s on first disconnect (initial backoff)", async () => { + mod.connectRelay(9876); + expect(instances).toHaveLength(1); + + // Trigger open (resets backoffIndex to 0) then close (schedules 1s reconnect) + instances[0]._triggerOpen(); + instances[0]._triggerClose(); + expect(instances).toHaveLength(1); // Still 1 right after close + + // After 1s: first reconnect fires + vi.advanceTimersByTime(1000); + expect(instances).toHaveLength(2); + }); + + it("increases backoff on repeated failures without successful open", async () => { + mod.connectRelay(9876); + + // First close without opening — schedules reconnect at 1s (backoffIndex=0) + instances[0]._triggerClose(); + vi.advanceTimersByTime(1000); + expect(instances).toHaveLength(2); + + // Second close without opening — schedules reconnect at 10s (backoffIndex=1) + instances[1]._triggerClose(); + vi.advanceTimersByTime(9999); + expect(instances).toHaveLength(2); // Not yet (needs 10000ms) + + vi.advanceTimersByTime(1); + expect(instances).toHaveLength(3); + }); + + it("resets backoff index on successful connection", async () => { + mod.connectRelay(9876); + expect(instances).toHaveLength(1); + + // Open and close — first reconnect at 1s + instances[0]._triggerOpen(); + instances[0]._triggerClose(); + + vi.advanceTimersByTime(1000); + await vi.runAllTimersAsync(); + expect(instances).toHaveLength(2); + + // Second connection opens successfully — resets backoff to 0 + instances[1]._triggerOpen(); + + // Close — should reconnect at 1s again + instances[1]._triggerClose(); + + vi.advanceTimersByTime(1000); + await vi.runAllTimersAsync(); + expect(instances).toHaveLength(3); + }); + + it("does not reconnect after deliberate disconnect", async () => { + mod.connectRelay(9876); + instances[0]._triggerOpen(); + + mod.disconnectRelay(); + + // Advance far past any backoff delay + vi.advanceTimersByTime(400000); + await vi.runAllTimersAsync(); + + // Should still only have the initial connection attempt + expect(instances).toHaveLength(1); + }); +}); diff --git a/tests/helpers/factories.ts b/tests/helpers/factories.ts new file mode 100644 index 00000000..2a30e2fd --- /dev/null +++ b/tests/helpers/factories.ts @@ -0,0 +1,103 @@ +import type { Issue, PullRequest, WorkflowRun, ApiError } from "../../src/shared/types.js"; +import type { TrackedItem } from "../../src/app/stores/view.js"; + +let nextId = 1; + +export function makeIssue(overrides: Partial = {}): Issue { + return { + id: nextId++, + number: 1, + title: "Test issue", + state: "open", + htmlUrl: "https://github.com/owner/repo/issues/1", + createdAt: "2024-01-10T08:00:00Z", + updatedAt: "2024-01-12T14:30:00Z", + userLogin: "octocat", + userAvatarUrl: "https://github.com/images/error/octocat_happy.gif", + labels: [], + assigneeLogins: [], + repoFullName: "owner/repo", + comments: 0, + ...overrides, + }; +} + +export function makePullRequest(overrides: Partial = {}): PullRequest { + return { + id: nextId++, + number: 1, + title: "Test pull request", + state: "open", + draft: false, + htmlUrl: "https://github.com/owner/repo/pull/1", + createdAt: "2024-01-10T08:00:00Z", + updatedAt: "2024-01-12T14:30:00Z", + userLogin: "octocat", + userAvatarUrl: "https://github.com/images/error/octocat_happy.gif", + headSha: "abc123def456", + headRef: "feature/test-branch", + baseRef: "main", + assigneeLogins: [], + reviewerLogins: [], + repoFullName: "owner/repo", + checkStatus: null, + additions: 0, + deletions: 0, + changedFiles: 0, + comments: 0, + reviewThreads: 0, + labels: [], + reviewDecision: null, + totalReviewCount: 0, + enriched: true, + ...overrides, + }; +} + +export function makeWorkflowRun(overrides: Partial = {}): WorkflowRun { + return { + id: nextId++, + name: "CI", + status: "completed", + conclusion: "success", + event: "push", + workflowId: 1, + headSha: "abc123def456", + headBranch: "main", + runNumber: 1, + htmlUrl: "https://github.com/owner/repo/actions/runs/1", + createdAt: "2024-01-10T08:00:00Z", + updatedAt: "2024-01-12T14:30:00Z", + repoFullName: "owner/repo", + isPrRun: false, + runStartedAt: "2024-01-10T08:00:00Z", + completedAt: "2024-01-10T08:05:00Z", + runAttempt: 1, + displayTitle: "Workflow 1", + actorLogin: "user", + ...overrides, + }; +} + +export function makeTrackedItem(overrides: Partial = {}): TrackedItem { + const id = overrides.id ?? nextId++; + return { + id, + number: id, + type: "issue", + repoFullName: "owner/repo", + title: "Test tracked item", + addedAt: Date.now(), + ...overrides, + }; +} + +export function makeApiError(overrides: Partial = {}): ApiError { + return { + repo: "owner/repo", + statusCode: 500, + message: "Internal server error", + retryable: true, + ...overrides, + }; +} diff --git a/tests/helpers/index.tsx b/tests/helpers/index.tsx index d4b595b3..063d6839 100644 --- a/tests/helpers/index.tsx +++ b/tests/helpers/index.tsx @@ -1,110 +1,9 @@ import { render } from "@solidjs/testing-library"; import { MemoryRouter, createMemoryHistory } from "@solidjs/router"; import { resetViewState } from "../../src/app/stores/view"; -import type { TrackedItem } from "../../src/app/stores/view"; -import type { Issue, PullRequest, WorkflowRun, ApiError } from "../../src/app/services/api"; import type { JSX } from "solid-js"; -let nextId = 1; - -export function makeIssue(overrides: Partial = {}): Issue { - return { - id: nextId++, - number: 1, - title: "Test issue", - state: "open", - htmlUrl: "https://github.com/owner/repo/issues/1", - createdAt: "2024-01-10T08:00:00Z", - updatedAt: "2024-01-12T14:30:00Z", - userLogin: "octocat", - userAvatarUrl: "https://github.com/images/error/octocat_happy.gif", - labels: [], - assigneeLogins: [], - repoFullName: "owner/repo", - comments: 0, - ...overrides, - }; -} - -export function makePullRequest(overrides: Partial = {}): PullRequest { - return { - id: nextId++, - number: 1, - title: "Test pull request", - state: "open", - draft: false, - htmlUrl: "https://github.com/owner/repo/pull/1", - createdAt: "2024-01-10T08:00:00Z", - updatedAt: "2024-01-12T14:30:00Z", - userLogin: "octocat", - userAvatarUrl: "https://github.com/images/error/octocat_happy.gif", - headSha: "abc123def456", - headRef: "feature/test-branch", - baseRef: "main", - assigneeLogins: [], - reviewerLogins: [], - repoFullName: "owner/repo", - checkStatus: null, - additions: 0, - deletions: 0, - changedFiles: 0, - comments: 0, - reviewThreads: 0, - labels: [], - reviewDecision: null, - totalReviewCount: 0, - enriched: true, - ...overrides, - }; -} - -export function makeWorkflowRun(overrides: Partial = {}): WorkflowRun { - return { - id: nextId++, - name: "CI", - status: "completed", - conclusion: "success", - event: "push", - workflowId: 1, - headSha: "abc123def456", - headBranch: "main", - runNumber: 1, - htmlUrl: "https://github.com/owner/repo/actions/runs/1", - createdAt: "2024-01-10T08:00:00Z", - updatedAt: "2024-01-12T14:30:00Z", - repoFullName: "owner/repo", - isPrRun: false, - runStartedAt: "2024-01-10T08:00:00Z", - completedAt: "2024-01-10T08:05:00Z", - runAttempt: 1, - displayTitle: "Workflow 1", - actorLogin: "user", - ...overrides, - }; -} - -export function makeTrackedItem(overrides: Partial = {}): TrackedItem { - const id = overrides.id ?? nextId++; - return { - id, - number: id, - type: "issue", - repoFullName: "owner/repo", - title: "Test tracked item", - addedAt: Date.now(), - ...overrides, - }; -} - -export function makeApiError(overrides: Partial = {}): ApiError { - return { - repo: "owner/repo", - statusCode: 500, - message: "Internal server error", - retryable: true, - ...overrides, - }; -} +export { makeIssue, makePullRequest, makeWorkflowRun, makeApiError, makeTrackedItem } from "./factories.js"; export function renderWithRouter( component: () => JSX.Element, diff --git a/vitest.workspace.ts b/vitest.workspace.ts index 56881a0f..88901674 100644 --- a/vitest.workspace.ts +++ b/vitest.workspace.ts @@ -6,6 +6,8 @@ import tailwindcss from "@tailwindcss/vite"; export default defineConfig({ test: { projects: [ + // MCP server tests (Node.js environment) + "mcp/vitest.config.ts", // Browser/DOM tests (stores, services, UI) defineProject({ plugins: [solid(), tailwindcss()],