diff --git a/.claude/hooks/block-hack-scripts.sh b/.claude/hooks/block-hack-scripts.sh deleted file mode 100755 index 4d02407eb..000000000 --- a/.claude/hooks/block-hack-scripts.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash -# block-hack-scripts.sh — PreToolUse hook for Bash tool -# Blocks direct invocation of hack/ scripts; nudges toward make targets. -# Reading hack/ files (cat, head, grep, etc.) is allowed. - -set -euo pipefail - -input=$(cat) -command=$(printf '%s' "$input" | sed -n 's/.*"command" *: *"\(.*\)".*/\1/p' | head -1) - -# Empty command — nothing to check -[ -z "$command" ] && exit 0 - -# Allow read-only operations on hack/ files -if printf '%s' "$command" | grep -qP '^\s*(cat|head|tail|less|read|ls|grep|rg|diff|wc|file|stat)\b'; then - exit 0 -fi - -# Pattern: hack script invocation at start of command or after a separator -# Matches: ./hack/foo.sh, hack/foo.sh, bash ./hack/foo.sh, sh hack/foo.sh -# Also after && ; || | -if ! printf '%s' "$command" | grep -qP '(^|\s|&&|;|\|\||\|)\s*(bash\s+|sh\s+)?(\.\/)?hack\/\S+\.sh'; then - exit 0 -fi - -# Extract the script name(s) that matched -script=$(printf '%s' "$command" | grep -oP '(\.\/)?hack\/\S+\.sh' | head -1 | sed 's|^\./||') - -# Map scripts to make targets -case "$script" in - hack/release.sh) target="make release" ;; - hack/build-all.sh) target="make build-all" ;; - hack/lint-drift.sh) target="make lint-drift" ;; - hack/lint-docs.sh) target="make lint-docs" ;; - hack/plugin-reload.sh) target="make plugin-reload" ;; - hack/reinstall.sh) target="make install" ;; - hack/gpg-fix.sh) target="make gpg-fix / make gpg-test" ;; - *) target="" ;; -esac - -if [ -n "$target" ]; then - reason="Use \`${target}\` instead of invoking \`${script}\` directly." -else - reason="Direct hack/ script invocation blocked. Ask the user to run it manually, or create a make target first." -fi - -printf '{"decision":"block","reason":"%s"}\n' "$reason" diff --git a/.claude/skills/_ctx-audit/SKILL.md b/.claude/skills/_ctx-audit/SKILL.md index b2832ea91..6b6253c1b 100644 --- a/.claude/skills/_ctx-audit/SKILL.md +++ b/.claude/skills/_ctx-audit/SKILL.md @@ -323,8 +323,6 @@ rg 'default|Default' docs/configuration.md -n ``` Cross-check: -- `config.DirContext` value matches the `context_dir` default in docs - and sample `.ctxrc` - `FileReadOrder` entries match the `priority_order` list in sample `.ctxrc` and the docs "Default priority order" section - `DefaultTokenBudget`, `DefaultArchiveAfterDays`, etc. in `rc.go` diff --git a/.claude/skills/_ctx-backup/SKILL.md b/.claude/skills/_ctx-backup/SKILL.md deleted file mode 100644 index f29f3da4b..000000000 --- a/.claude/skills/_ctx-backup/SKILL.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -name: _ctx-backup -description: "Backup project context and global Claude data to SMB share. Use before risky operations, at end of session, or on request." -allowed-tools: Bash(ctx system backup*), Bash(ls /tmp/ctx-backup*) ---- - -Backup `.context/`, `.claude/`, `ideas/`, and `~/.claude/` to -the configured SMB share. - -## When to Use - -- Before risky operations (major refactors, dependency upgrades) -- At the end of a productive session -- When the user explicitly asks for a backup -- Before switching branches with uncommitted context changes - -## When NOT to Use - -- When `CTX_BACKUP_SMB_URL` is not configured (the command will - error — tell the user to set it up) -- Multiple times in quick succession with no changes in between - -## Usage Examples - -```text -/backup -/backup project -/backup global -/backup all -``` - -## Arguments - -| Argument | What it backs up | -|-----------|-----------------------------------------------| -| (none) | Same as `all` | -| `project` | Project context only (`.context/`, `.claude/`, `ideas/`) | -| `global` | Global Claude data only (`~/.claude/`) | -| `all` | Both project and global | - -## Execution - -Based on the argument, run the appropriate command: - -```bash -# For "project" -ctx system backup --scope project - -# For "global" -ctx system backup --scope global - -# For "all" or no argument -ctx system backup --scope all -``` - -## Process - -1. Parse the argument (default to `all` if none provided) -2. Run the appropriate `ctx system backup` command -3. Report the archive path and size from the output -4. Confirm success to the user - -## Quality Checklist - -- [ ] The command completed without errors -- [ ] Archive size is reported to the user -- [ ] If the SMB share was not mounted, the error is clearly - communicated diff --git a/.context/AGENT_PLAYBOOK.md b/.context/AGENT_PLAYBOOK.md index b64593fd6..6ba48fe9c 100644 --- a/.context/AGENT_PLAYBOOK.md +++ b/.context/AGENT_PLAYBOOK.md @@ -15,6 +15,54 @@ making a decision, learning something, or hitting a milestone: persist before continuing. Don't wait for session end; it may never come cleanly. +## File Interaction Protocol + +When a task involves reading, modifying, or reasoning about a file: + +1. **Read before act** + - Read the file content directly before making any change + - Do not rely on memory, summaries, or prior reads +2. **No partial reads** + - Do not sample the beginning or end of a file and assume the rest +3. **Freshness requirement** + - A read must be recent relative to the action + - Do not reuse stale context from earlier in the session +4. **No implicit scope** + - "This change is small" is not a valid justification + - "This file is large" is not a valid justification +5. **Edit authority comes from visibility** + - If you haven't seen it, you don't get to modify it + +## Spec Requirement + +Do not begin implementation work without a spec. + +- Every implementation task must trace to a spec file +- **If no spec exists, STOP and create one first** +- Do not treat task text alone as a substitute for a spec + +## Independent Review + +Sub-agent review is not optional once implementation begins. + +A review must be invoked when ANY of the following occur: + +- Before the first modification to the codebase +- After completing one or more tasks in TASKS.md +- Before declaring the work complete + +Required review inputs: +- the governing spec +- TASKS.md +- the current implementation + +Review prompt: +- "Review , TASKS.md, and the current implementation for drift, + omissions, invalid assumptions, and incomplete requirements." + +Do not declare work complete until review findings are either resolved or +explicitly recorded. + ## Invoking ctx Always use `ctx` from PATH: @@ -24,9 +72,23 @@ ctx agent # ✓ correct ./dist/ctx # ✗ avoid hardcoded paths go run ./cmd/ctx # ✗ avoid unless developing ctx itself ``` - Check with `which ctx` if unsure whether it's installed. +### When ctx Returns an Error + +Triage the error before reacting: + +- **Invocation error**: the message points at your call: unknown + flag, unknown command, wrong argument count, missing required + flag. Read `ctx --help`, fix the call, and retry. +- **Everything else**: missing context directory, config problem, + hook rejection, permission denied, unexpected failure. Relay the + output to the user **verbatim** and stop. Do not add flags, run + other commands, edit files to fix the cause, or retry. Wait for + the user's next instruction. + +When unsure which kind you're looking at, treat it as the second. + ## Context Readback Before starting any work, read the required context files and confirm to the @@ -36,7 +98,7 @@ conventions." Do not begin implementation until you have done so. ## Supplementary Files These files live in `.context/` alongside the core context files. -Read them when the task at hand warrants it — not on every session. +Read them when the task at hand warrants it, not on every session. | File | Read when | |--------------------|----------------------------------------------------------------| @@ -44,6 +106,28 @@ Read them when the task at hand warrants it — not on every session. | DETAILED_DESIGN.md | Deep-diving into internals (generated via `/ctx-architecture`) | | GLOSSARY.md | Encountering unfamiliar project-specific terminology | +## Context Directory Lives at the Project Root + +The project root is the parent of `.context/`, by contract — +specifically `filepath.Dir(ContextDir())`. That's where `ctx sync`, +`ctx drift`, and the memory-drift hook look for code, secrets, +and `MEMORY.md`. + +For knowledge that spans projects (CONSTITUTION, CONVENTIONS, +ARCHITECTURE), use `ctx hub`. + +Recommended layout: + +``` +~/WORKSPACE/my-project + ├── .git + ├── .context + ├── Makefile + ├── Makefile.ctx + └── specs + └── ... +``` + ## Reason Before Acting Before implementing any non-trivial change, think through it step-by-step: @@ -100,7 +184,6 @@ Surface problems worth mentioning: - **Stale context files** (not modified recently): mention before stale context influences work - **Bloated token count** (over 30k): offer `ctx compact` -- **Long single-line entries**: run `ctx fmt` to normalize line widths - **Drift between files and code**: spot-check paths from ARCHITECTURE.md against the actual file tree @@ -123,14 +206,6 @@ is running long: Context compaction happens automatically, but the next window loses nuance. Explicit persistence is cheaper than re-discovery. -### Check Available Skills - -Before starting any task, scan the skill list in your system -prompt to see if a dedicated skill already handles the request. -Prefer invoking an existing skill over ad-hoc implementation: -skills encode project conventions, quality gates, and -persistence steps that are easy to miss otherwise. - ### Conversational Triggers Users rarely invoke skills explicitly. Recognize natural language: @@ -242,7 +317,7 @@ Never assume. If you don't see it in files, you don't know it. ## Planning Work Every commit requires a `Spec:` trailer (CONSTITUTION rule). This means -every piece of work needs a spec — no exceptions, no "trivial" qualifier. +every piece of work needs a spec; no exceptions, no "trivial" qualifier. A one-liner bugfix gets a one-paragraph spec; a multi-package feature gets a full design document. The spec exists for traceability, not ceremony. @@ -341,4 +416,3 @@ re-discovering it. 5 minutes reading saves 50 minutes of wasted work. - [ ] DECISIONS.md has no superseded entries unmarked - [ ] LEARNINGS.md gotchas still relevant - [ ] Run `ctx drift` and address warnings -- [ ] Run `ctx fmt` to normalize line widths diff --git a/.context/AGENT_PLAYBOOK_GATE.md b/.context/AGENT_PLAYBOOK_GATE.md index 02a5cf5d0..d5c3b2974 100644 --- a/.context/AGENT_PLAYBOOK_GATE.md +++ b/.context/AGENT_PLAYBOOK_GATE.md @@ -8,19 +8,40 @@ lifecycle details, or anti-patterns. ```bash ctx status # correct -./dist/ctx # wrong — never hardcode paths -go run ./cmd/ctx # wrong — unless developing ctx itself +./dist/ctx # wrong: never hardcode paths +go run ./cmd/ctx # wrong: unless developing ctx itself ``` +## When `ctx` Errors + +If the error names your flag, argument, or command, read +`ctx --help` and fix the call. Otherwise, relay verbatim +and stop. When unsure, stop. + +## File Interaction Protocol + +When a task involves reading, modifying, or reasoning about a file: + +1. **Read before act**: Do not rely on memory, summaries, or prior reads +2. **No partial reads**: Do not sample and assume the rest +3. **Freshness requirement**: Do not reuse stale context from earlier in the + session +4. **Edit authority comes from visibility**: If you haven't seen it, you don't + get to modify it +5. **Coverage requirement**: Before editing, state what parts of the file were + read and why they are sufficient + ## Planning Work +Do not begin implementation without a spec. + Every commit requires a `Spec:` trailer. Every piece of work needs -a spec — no exceptions. Scale the spec to the work. Use `/ctx-spec` +a spec; no exceptions. Scale the spec to the work. Use `/ctx-spec` to scaffold. ## Proactive Persistence -After completing a task, making a decision, or hitting a gotcha — +After completing a task, making a decision, or hitting a gotcha, persist before continuing. Don't wait for session end. ## Chunk and Checkpoint @@ -28,16 +49,25 @@ persist before continuing. Don't wait for session end. For multi-step work: commit after each chunk, persist learnings, run tests before moving on. Track progress via TASKS.md checkboxes. +## Independent Review + +A review must occur: + +* Before the first code change +* After completing tasks +* Before presenting results + +Review must consider: + +* Spec +* TASKS.md +* Current implementation + ## Tool Preferences Use the `gemini-search` MCP server for web searches. Fall back to built-in search only if `gemini-search` is not connected. -## Check Available Skills - -Before starting any task, scan available skills to see if one -already handles the request. Prefer skills over ad-hoc work. - ## Conversational Triggers | User Says | Action | diff --git a/.context/ARCHITECTURE.md b/.context/ARCHITECTURE.md index 430d0e325..5357482a2 100644 --- a/.context/ARCHITECTURE.md +++ b/.context/ARCHITECTURE.md @@ -19,377 +19,70 @@ and warns if they do not exist on disk. Keep paths accurate. ## Overview -ctx is a CLI tool that creates and manages a `.context/` directory -containing structured markdown files. These files provide persistent, -token-budgeted, priority-ordered context for AI coding assistants -across sessions. An MCP server exposes the same capabilities to any -MCP-compatible agent over JSON-RPC 2.0. + -Design philosophy: +## Package/Module Dependency Graph -- **Markdown-centric**: all context is plain markdown; no databases, - no proprietary formats. Files are human-readable and version- - controlled alongside the code they describe. -- **Token-budgeted**: context assembly respects configurable token - limits so AI agents receive the most important information first - without exceeding their context window. -- **Priority-ordered**: files are loaded in a deliberate sequence - (rules before tasks, conventions before architecture) so agents - internalize constraints before acting. -- **Convention over configuration**: sensible defaults with optional - `.ctxrc` overrides. No config file required to get started. -- **Agent-agnostic**: the MCP server speaks standard protocol; the - CLI works from any shell. No agent-specific coupling in core code. - -For per-module deep dives (types, exported API, data flow, edge -cases), see [DETAILED_DESIGN.md](DETAILED_DESIGN.md). - -## Layered Architecture - -The codebase is organized into strict dependency layers. Each layer -may only import from layers below it. - -``` -Layer 6: Entry Points - cmd/ctx, bootstrap (34 commands registered) - -Layer 5: CLI Commands + MCP Server - internal/cli/* (34 cmd/core packages) - internal/mcp/* (JSON-RPC 2.0 server) - -Layer 4: Output + Errors - internal/write/* (46 writer packages) - internal/err/* (35 error packages) - -Layer 3: Domain Logic - entity, entry, context/*, drift, index, task, tidy, - trace, journal/*, memory, notify, claude - -Layer 2: Infrastructure - io, format, parse, sanitize, validate, inspect, - flagbind, exec/*, log/*, crypto, sysinfo, rc - -Layer 1: Foundation (zero internal dependencies) - internal/config/* (60+ sub-packages) - internal/assets (embedded FS + 14 typed readers) - -Layer 0: Quality Gates (test-only) - internal/audit, internal/compliance -``` - -## Package Dependency Graph + BOOT[bootstrap] - BOOT --> CLI[cli/* 34 commands] - BOOT --> MCP[mcp/server] - - CLI --> CORE[core/ packages] - CLI --> WRITE[write/* 46 pkgs] - CLI --> ERR[err/* 35 pkgs] - - MCP --> HANDLER[mcp/handler] - MCP --> PROTO[mcp/proto] - HANDLER --> DOMAIN - - CORE --> DOMAIN[domain packages] - WRITE --> FMT[format] - WRITE --> DESC[assets/read/desc] - ERR --> DESC - - DOMAIN --> INFRA[infrastructure] - DOMAIN --> RC[rc] - - INFRA --> CONFIG[config/* 60+ pkgs] - INFRA --> ASSETS[assets + read/*] - RC --> CONFIG + core["core (no deps)"] + api["api"] --> core + cli["cli"] --> api ``` - -*Full dependency matrix: -[architecture-dia-dependencies.md](architecture-dia-dependencies.md)* +--> ## Component Map -### Foundation (zero internal dependencies) - -| Package | Purpose | Key Exports | -|---------|---------|-------------| -| `internal/config/*` | 60+ sub-packages: constants, types, regex, text keys | Domain-specific constants imported granularly | -| `internal/assets` | Embedded templates via `go:embed` | `FS` (single embed) | -| `internal/assets/read/*` | 14 typed accessor packages | `desc.Text()`, `skill.Content()`, `entry.List()` | -| `internal/assets/tpl` | Sprintf-based format templates | Entry, journal, loop, obsidian templates | - -### Infrastructure - -| Package | Purpose | Key Exports | -|---------|---------|-------------| -| `internal/io` | Guarded file I/O with path validation | `SafeReadFile()`, `SafeWriteFile()`, `SafePost()` | -| `internal/format` | Display formatting (time, bytes, tokens) | `TimeAgo()`, `Bytes()`, `Tokens()`, `Truncate()` | -| `internal/parse` | Text-to-typed-value conversions | `Date()` | -| `internal/sanitize` | Input mutation to conform constraints | `Filename()` | -| `internal/validate` | Path validation and symlink checks | `Boundary()`, `Symlink()` | -| `internal/inspect` | String predicates and position queries | `Contains()`, `StartsWithCtxMarker()` | -| `internal/flagbind` | Cobra flag binding with YAML descriptions | `BoolFlag()`, `StringFlag()`, `IntFlag()` | -| `internal/exec/*` | External command wrappers (5 packages) | `git.Run()`, `dep.GoListPackages()` | -| `internal/log/*` | Event logging + stderr warnings | `event.Append()`, `warn.Warn()` | -| `internal/crypto` | AES-256-GCM encryption (stdlib only) | `Encrypt()`, `Decrypt()`, `GenerateKey()` | -| `internal/sysinfo` | OS metrics with platform build tags | `Collect()`, `Evaluate()` | -| `internal/rc` | Runtime config (.ctxrc + env + flags) | `RC()`, `ContextDir()`, `TokenBudget()` | - -### Domain Logic - -| Package | Purpose | Key Exports | -|---------|---------|-------------| -| `internal/entity` | Shared domain types (no logic) | `Session`, `Context`, `FileInfo`, `EntryParams` | -| `internal/entry` | Entry validation and writing | `ValidateAndWrite()` | -| `internal/context/*` | Context loading with token counting | `load.Do()`, `token.Estimate()`, `summary.Generate()` | -| `internal/drift` | Context quality validation (7 checks) | `Detect()`, `Report.Status()` | -| `internal/index` | Markdown index tables | `Update()`, `ParseEntryBlocks()` | -| `internal/task` | Task checkbox parsing | `Completed()`, `Pending()`, `SubTask()` | -| `internal/tidy` | Context file maintenance | `CompactResult`, `parseBlockAt()` | -| `internal/trace` | Commit-to-context linking | `Collect()`, `FormatTrailer()` | -| `internal/journal/parser` | Session transcript parsing (4 formats) | `ParseFile()`, `FindSessionsForCWD()` | -| `internal/journal/state` | Journal pipeline state (JSON) | `Load()`, `Save()`, `Mark*()` | -| `internal/memory` | Memory bridge (MEMORY.md sync) | `DiscoverPath()`, `Sync()`, `SelectContent()` | -| `internal/notify` | Fire-and-forget webhooks | `Send()`, `LoadWebhook()` | -| `internal/claude` | Claude Code integration types | `Skills()`, `SkillContent()` | - -### MCP Server (`internal/mcp/*`) - -| Package | Purpose | -|---------|---------| -| `mcp/proto` | JSON-RPC 2.0 message types, MCP constants | -| `mcp/server` | Main loop: stdin read, dispatch, stdout write | -| `mcp/server/dispatch` | Method-based request routing | -| `mcp/server/dispatch/poll` | File mtime polling for change notifications | -| `mcp/server/catalog` | URI-to-file resource mapping (9 resources) | -| `mcp/server/route/*` | Handlers: initialize, ping, tool, prompt, resource | -| `mcp/server/def/*` | Tool (11) and prompt (5) definitions | -| `mcp/handler` | Domain logic as free functions taking `*entity.MCPDeps` | -| `entity.MCPSession` | Per-session advisory state (pure data + mutations) | - -### CLI Commands (`internal/cli/*`) - -34 commands in 8 groups, each following cmd/root + core/ taxonomy: - -| Group | Commands | -|-------|----------| -| Getting Started | `initialize`, `status`, `guide` | -| Context | `add`, `load`, `agent`, `sync`, `drift`, `compact` | -| Artifacts | `decision`, `learning`, `task` | -| Sessions | `journal`, `memory`, `remind`, `pad` | -| Runtime | `config`, `permission`, `pause`, `resume` | -| Integration | `setup`, `mcp`, `watch`, `notify`, `loop` | -| Diagnostics | `doctor`, `change`, `dep`, `why`, `trace` | -| Utilities | `reindex` | -| Hidden | `serve`, `site`, `system` (34 hook subcommands) | - -### Output Layer - -| Package | Purpose | -|---------|---------| -| `internal/write/*` | 46 packages: formatted terminal/JSON output | -| `internal/err/*` | 35 packages: error constructors with YAML text | - -### Quality Gates (test-only) - -| Package | Purpose | -|---------|---------| -| `internal/audit` | AST-based codebase invariant tests | -| `internal/compliance` | File-level convention adherence tests | - -## Data Flow Diagrams - -Five core flows define how data moves through the system: - -1. **`ctx init`**: User invokes -> `cli/initialize` reads embedded - templates from `assets` -> creates `.context/` directory -> writes - all template files -> generates AES-256 key -> deploys hooks and - skills -> merges `settings.local.json` -> writes/merges `CLAUDE.md`. - -2. **`ctx agent`**: Agent invokes with `--budget N` -> - `context/load.Do()` reads all `.md` files -> entries scored by - recency and relevance -> sorted and fitted to token budget -> - overflow entries listed as "Also Noted" -> returns Markdown packet. + -## External Dependencies +## Data Flow -Two direct Go dependencies: `spf13/cobra` (CLI framework), -`gopkg.in/yaml.v3` (YAML parsing). Optional external tools: -`zensical` (static site generation) and `gpg` (commit signing). +>API: OK + API-->>User: 201 Created +``` +--> -Local: `make build` (CGO_ENABLED=0, ldflags version), `make audit` -(gofmt, go vet, golangci-lint, lint scripts, tests), `make smoke` -(integration tests). Release: `hack/release.sh` bumps VERSION, -generates release notes, builds all targets, creates signed git tag. -CI: GitHub Actions on push; release on `v*` tags producing 6 -platform binaries (darwin/linux/windows x amd64/arm64). +## Key Patterns -*Full build pipeline diagram: -[architecture-dia-build.md](architecture-dia-build.md)* + ## File Layout + diff --git a/.context/CONSTITUTION.md b/.context/CONSTITUTION.md index 24c32b0ca..eaaf35fa2 100644 --- a/.context/CONSTITUTION.md +++ b/.context/CONSTITUTION.md @@ -12,11 +12,13 @@ DO NOT UPDATE FOR: - Temporary constraints (use TASKS.md blockers) --> -These rules are INVIOLABLE. If a task requires violating these, the task is wrong. +These rules are INVIOLABLE. If a task requires violating these, the +task is wrong. ## Completion Over Motion -Work is only complete when it is **fully done**, not when progress has been made. +Work is only complete when it is **fully done**, not when progress +has been made. - The requested outcome must be delivered end-to-end. - Partial progress is not completion. @@ -30,6 +32,16 @@ If you start something, you own it, you finish it. --- +## Context Integrity Invariants + +- [ ] **Never** modify or reason about a file based on partial or assumed content +- [ ] If a file is the subject of an operation, its relevant contents must be + **fully understood** before acting +- [ ] Sampling, guessing, or relying on prior assumptions instead of reading + is a **violation** + +--- + ## No Excuse Generation **Never default to deferral.** @@ -73,27 +85,36 @@ Leave the system in a better state than you found it. - [ ] All code must pass tests before commit - [ ] No TODO comments in main branch (move to TASKS.md) -- [ ] Path construction uses stdlib — no string concatenation (security: prevents path traversal) +- [ ] Path construction uses stdlib: no string concatenation + (security: prevents path traversal) ## Process Invariants - [ ] All architectural changes require a decision record -- [ ] Context loading is not a detour from your task. It IS the first step of every session. A 30-second read delay is always cheaper than a decision made without context. -- [ ] Every commit references a spec (`Spec: specs/.md` trailer) — no exceptions, no "non-trivial" qualifier. Even one-liner fixes need a spec for traceability. Use `/ctx-commit` instead of raw `git commit`. +- [ ] Context loading is not a detour from your task. It IS the first + step of every session. A 30-second read delay is always cheaper + than a decision made without context. +- [ ] Every commit references a spec (`Spec: specs/.md` trailer): + no exceptions, no "non-trivial" qualifier. Even one-liner fixes + need a spec for traceability. Use `/ctx-commit` instead of raw + `git commit`. ## TASKS.md Structure Invariants -TASKS.md must remain a replayable checklist. Uncheck all items and re-run = verify/redo all tasks in order. +TASKS.md must remain a replayable checklist. Uncheck all items and +re-run = verify/redo all tasks in order. -- [ ] **Never move tasks** — tasks stay in their Phase section permanently -- [ ] **Never remove Phase headers** — Phase labels provide structure and order -- [ ] **Never merge or collapse Phase sections** — each phase is a logical unit -- [ ] **Never delete tasks** — mark as `[x]` completed, or `[-]` skipped with reason -- [ ] **Use inline labels for status** — add `#in-progress` to task text, don't move it -- [ ] **No "In Progress" / "Next Up" sections** — these encourage moving tasks -- [ ] **Ask before restructuring** — if structure changes seem needed, ask the user first +- [ ] **Never move tasks**: tasks stay in their Phase section permanently +- [ ] **Never remove Phase headers**: Phase labels provide structure and order +- [ ] **Never merge or collapse Phase sections**: each phase is a logical unit +- [ ] **Never delete tasks**: mark as `[x]` completed, or `[-]` skipped with reason +- [ ] **Use inline labels for status**: add `#in-progress` to task text, don't move it +- [ ] **No "In Progress" / "Next Up" sections**: these encourage moving tasks +- [ ] **Ask before restructuring**: if structure changes seem needed, ask the user first ## Context Preservation Invariants -- [ ] **Archival is allowed, deletion is not** — use `ctx tasks archive` to move completed tasks to `.context/archive/`, never delete context history -- [ ] **Archive preserves structure** — archived tasks keep their Phase headers for traceability +- [ ] **Archival is allowed, deletion is not**: use `ctx task archive` to move + completed tasks to `.context/archive/`, never delete context history +- [ ] **Archive preserves structure**: archived tasks keep their Phase headers + for traceability diff --git a/.context/CONVENTIONS.md b/.context/CONVENTIONS.md index a86800509..adf3d8624 100644 --- a/.context/CONVENTIONS.md +++ b/.context/CONVENTIONS.md @@ -26,39 +26,9 @@ DO NOT UPDATE FOR: - **Maps reference constants**: Use constants as keys, not literals - `map[string]X{ConstKey: value}` not `map[string]X{"literal": value}` -## Casing - -- **Proper nouns keep their casing** in comments, strings, and docs - - `Markdown` not `markdown` (it's a language name) - - `YAML`, `JSON`, `TOML` — always uppercase - - `GitHub`, `JavaScript`, `PostgreSQL` — match official casing - - Exception: code fence language identifiers are lowercase (`` ```markdown ``) - -## Predicates - -- **No Is/Has/Can prefixes**: `Completed()` not - `IsCompleted()`, `Empty()` not `IsEmpty()` -- Applies to exported methods that return bool -- Private helpers may use prefixes when it reads more naturally - -## File Organization - -- **Public API in main file, private helpers in separate logical files** - - `loader.go` (exports `Load()`) + `process.go` (unexported helpers) - - NOT: one file with unexported functions stacked at the bottom -- Reasoning: agent loads only the public API file unless - it needs implementation detail -- **Name files after what they contain, not their role** - - `format.go`, `sort.go`, `parse.go` — named by responsibility - - NOT: `util.go`, `utils.go`, `helper.go`, `common.go` — junk drawer names - - If a file can't be named without a generic label, - its contents don't belong together - - Existing junk drawers should be split as their contents grow - ## Patterns -- **Centralize magic strings**: All repeated literals - belong in a `config` or `constants` package +- **Centralize magic strings**: All repeated literals belong in a `config` or `constants` package - If a string appears in 3+ files, it needs a constant - If a string is used for comparison, it needs a constant - **Path construction**: Always use stdlib path joining @@ -67,75 +37,19 @@ DO NOT UPDATE FOR: - Node: `path.join(dir, file)` - Never: `dir + "/" + file` - **Constants reference constants**: Self-referential definitions - - `FileType[UpdateTypeTask] = FilenameTask` not - `FileType["task"] = "TASKS.md"` -- **No error variable shadowing**: Use descriptive names - when multiple errors exist in a function - - `readErr`, `writeErr`, `indexErr` — not repeated `err` / `err :=` - - Shadowed `err` silently disconnects from the outer - variable, causing subtle bugs + - `FileType[UpdateTypeTask] = FilenameTask` not `FileType["task"] = "TASKS.md"` - **Colocate related code**: Group by feature, not by type - `session/run.go`, `session/types.go`, `session/parse.go` - Not: `runners/session.go`, `types/session.go`, `parsers/session.go` -## Line Width - -- **Target ~80 characters**: Highly encouraged, not a hard limit - - Some lines will naturally exceed it (long strings, - struct tags, URLs) — that's fine - - Drift accumulates silently, especially in test code - - Break at natural points: function arguments, struct fields, chained calls - -## Duplication - -- **Non-test code**: Apply the rule of three — extract - when a block appears 3+ times - - Watch for copy-paste during task-focused sessions - where the agent prioritizes completion over shape -- **Test code**: Some duplication is acceptable for readability - - When the same setup/assertion block appears 3+ times, extract a test helper - - Use `t.Helper()` so failure messages point to the caller, not the helper - ## Testing - **Colocate tests**: Test files live next to source files - `foo.go` → `foo_test.go` in same package - Not a separate `tests/` folder -- **Test the unit, not the file**: One test file can test - multiple related functions +- **Test the unit, not the file**: One test file can test multiple related functions - **Integration tests are separate**: `cli_test.go` for end-to-end binary tests -## Code Change Heuristics - -- **Present interpretations, don't pick silently**: If a request has multiple - valid readings, lay them out rather than guessing -- **Push back when warranted**: If a simpler approach exists, say so -- **"Would a senior engineer call this overcomplicated?"**: If yes, simplify -- **Match existing style**: Even if you'd write it differently in a greenfield -- **Every changed line traces to the request**: If it doesn't, revert it - -## Decision Heuristics - -- **"Would I start this today?"**: If not, continuing is - the sunk cost — evaluate only future value -- **"Reversible or one-way door?"**: Reversible decisions - don't need deep analysis -- **"Does the analysis cost more than the decision?"**: - Stop deliberating when the options are within an order - of magnitude -- **"Order of magnitude, not precision"**: 10x better - matters; 10% better usually doesn't - -## Refactoring - -- **Measure the end state, not the effort**: When refactoring, ask what the - codebase looks like *after*, not how much work the change is -- **Three questions before restructuring**: - 1. What's the smallest codebase that solves this? - 2. Does the proposed change result in less total code? - 3. What can we delete now that this change makes obsolete? -- **Deletion is a feature**: Writing 50 lines that delete 200 is a net win - ## Documentation - **Godoc format**: Use canonical sections @@ -152,107 +66,5 @@ DO NOT UPDATE FOR: // - Type: Description of return value func FunctionName(param1, param2 string) error ``` -- **Struct field documentation**: Exported structs with 2+ fields - must document every field. Two accepted forms: - ```go - // Option A: Fields section in docblock (preferred for 4+ fields) - // TypeName describes X. - // - // Fields: - // - FieldA: Description - // - FieldB: Description - type TypeName struct { - - // Option B: Inline comments (acceptable for 2-3 fields) - // TypeName describes X. - type TypeName struct { - // FieldA is the description. - FieldA string - FieldB string // Description - } - ``` -- **Package doc in doc.go**: Each package gets a `doc.go` with package-level - documentation describing behavior, not structure. Do NOT include - `# File Organization` sections listing files — they drift when files are - added, renamed, or removed, and the filesystem is self-documenting +- **Package doc in doc.go**: Each package gets a `doc.go` with package-level documentation - **Copyright headers**: All source files get the project copyright header - -## Blog Publishing - -- **Checklist for ideas/ → docs/blog/ promotion**: - 1. Update date in frontmatter to publish date - 2. Fix relative paths (from `../docs/blog/` to peer references) - 3. Add cross-links to/from companion posts ("See also" sections) - 4. Add "The Arc" section connecting to the series narrative - 5. Update `docs/blog/index.md` with entry (newest first) - 6. Verify all link targets exist - 7. Build and test before commit -- **Arc section**: Every post includes "The Arc" near the end, framing - where the post sits in the broader blog narrative -- **See also links**: Use italic `*See also: [Title](file) -- one-line - description connecting the two posts.*` format at the end of posts -- **Frontmatter**: Include copyright header, title, date, author, topics list -- **Blog index order**: Newest post first, with topic tags and 3-4 line summary - -- **Update admonitions for historical blog content**: Use MkDocs admonitions - (`!!! note "Update"`) at the top of blog post sections where features have - been superseded or installation has changed. Link to current documentation. - Keep original content intact below for historical context. -- **New CLI subcommand documentation checklist**: Update docs in at least - three places: (1) Feature page — commands table, usage section, skill/NL - table. (2) CLI reference — full reference entry with args, flags, examples. - (3) Relevant recipes. (4) zensical.toml — only if adding a new page. -- **Rename/refactor documentation checklist**: Scope ALL documentation impact - before implementation. Three anchors plus one tangential: (1) Docstrings. - (2) User-facing docs (`docs/`). (3) Recipes (`docs/recipes/`). (4) Blog - posts and release notes. Also check: skills, hook messages, YAML text - files, `.context/` files, and specs. -- **Stage site/ with docs/ changes**: The generated HTML is tracked in git - with no CI build step - -## Error Handling - -- **Zero silent error discard**: Handle every error, never suppress with - `_ =` or `//nolint:errcheck`. Production: defer-close logs to stderr - via `log.Warn()`. Test: `t.Fatal(err)` for setup, `t.Log(err)` for - cleanup. For gosec false positives: fix the code rather than adding - nolint markers — the goal is zero golangci-lint suppressions -- **Error constructors in internal/err**: Never in per-package err.go - files — eliminates the broken-window pattern where agents add local - errors when they see a local err.go exists - -## CLI Structure - -- **CLI package taxonomy**: Every package under `internal/cli/` follows: - parent.go (Cmd wiring), doc.go, `cmd/root/` or `cmd//` - (implementation), `core/` (shared helpers) -- **cmd/ directories**: Only cmd.go, run.go, and tests — helpers and - output go to `core/` -- **core/ structs**: Consolidated into a single `types.go` file -- **User-facing text via assets**: All text routed through - `internal/assets` with YAML-backed TextDescKeys — no inline strings - in `core/` or `cmd/` packages -- **config/ doc.go**: Every package under `internal/config/` must have - a doc.go with the project header and a one-line package comment -- **DescKey prefix**: Not CmdDescKey — `cmd.DescKeyFoo` not - `cmd.CmdDescKeyFoo` (Go package hygiene, avoids stutter) -- **Cobra Use: fields**: Must reference `cmd.Use*` constants, never raw - strings or `cmd.DescKey*` -- **Run functions exported PascalCase**: `Run`, `RunImport`, - `RunArchive` etc. No private `runXXX` variants -- **write/ packages write to stdio only**: Functions take - `*cobra.Command`, not `io.Writer`. Exception: `write/rc` writes to - `os.Stderr` because rc loads before cobra -- **Package directory names singular**: Unless Go convention requires - plural -- **Import grouping**: stdlib — blank line — external deps (cobra, - yaml) — blank line — ctx imports. Three groups, always in this order -- **camelCase import aliases**: `cFlag` not `cflag`, `cfgFmt` not - `cfgfmt` -- **Icons and symbols as token constants**: Not unicode escapes -- **Cross-cutting domain types in internal/entity**: Types used by one - package stay in that package; types used across packages go to entity - -- Warn format strings centralized in config/warn/ — use warn.Close, - warn.Write, warn.Remove, warn.Mkdir, warn.Rename, warn.Walk, warn.Getwd, - warn.Readdir, warn.Marshal instead of inline format strings in log.Warn calls diff --git a/.context/DECISIONS.md b/.context/DECISIONS.md index b9fbe3457..557f48513 100644 --- a/.context/DECISIONS.md +++ b/.context/DECISIONS.md @@ -3,78 +3,8 @@ | Date | Decision | |----|--------| -| 2026-04-13 | Walk boundary uses git as a hint, not a requirement | -| 2026-04-11 | Journal stays local; LEARNINGS.md is the shareable layer | -| 2026-04-11 | `Entry.Author` is server-authoritative, not client-authoritative | -| 2026-04-09 | Architecture skill pipeline is a triad not a quartet | -| 2026-04-08 | Remove #done tag convention, simplify task archival | -| 2026-04-06 | Use hook relay for session provenance instead of JSONL parsing or env vars | -| 2026-04-04 | TestNoMagicStrings and TestNoMagicValues no longer exempt const/var definitions outside config/ | -| 2026-04-04 | String-typed enums belong in config/, not domain packages | -| 2026-04-03 | Output functions belong in write/ (consolidated) | -| 2026-04-03 | YAML text externalization pipeline (consolidated) | -| 2026-04-03 | Package taxonomy and code placement (consolidated) | -| 2026-04-03 | Eager init over lazy loading (consolidated) | -| 2026-04-03 | Pure logic separation of concerns (consolidated) | -| 2026-04-03 | config/ explosion is correct — fix is documentation, not restructuring | -| 2026-04-01 | IRC to Discord as primary community channel | -| 2026-04-01 | AST audit tests live in internal/audit/, one file per check | -| 2026-04-01 | Split assets/hooks/ into assets/integrations/ + assets/hooks/messages/ | -| 2026-04-01 | Rename ctx hook → ctx setup to disambiguate from the hook system | -| 2026-03-31 | Split log into log/event and log/warn to break import cycles | -| 2026-03-31 | Context-load-gate injects only CONSTITUTION and AGENT_PLAYBOOK_GATE, not full ReadOrder | -| 2026-03-31 | Spec signal words and nudge threshold are user-configurable via .ctxrc | -| 2026-03-30 | Flags-not-subcommands for journal source: list and show are view modes on a noun, not independent entities | -| 2026-03-30 | Journal consumed recall — recall CLI package deleted | -| 2026-03-30 | Classify rules are user-configurable via .ctxrc | -| 2026-03-25 | Architecture analysis and enrichment are separate skills — constraint is the feature | -| 2026-03-25 | Companion tools documented as optional MCP enhancements with runtime check | -| 2026-03-25 | Prompt templates removed — skills are the single agent instruction mechanism | -| 2026-03-24 | Write-once baseline with explicit end-consolidation for consolidation lifecycle | -| 2026-03-23 | Pre/pre HTML tags promoted to shared constants in config/marker | -| 2026-03-22 | Output functions belong in write/, never in core/ or cmd/ | -| 2026-03-20 | Shared formatting utilities belong in internal/format | -| 2026-03-20 | Go-YAML linkage check added to lint-drift as check 5 | -| 2026-03-18 | Singular command names for all CLI entities | -| 2026-03-17 | Pre-compute-then-print for write package output blocks | -| 2026-03-16 | Resource name constants in config/mcp/resource, mapping in server/resource | -| 2026-03-16 | Rename --consequences flag to --consequence for singular consistency | -| 2026-03-14 | Error package taxonomy: 22 domain files replace monolithic errors.go | -| 2026-03-14 | Session prefixes are parser vocabulary, not i18n text | -| 2026-03-14 | System path deny-list as safety net, not security boundary | -| 2026-03-14 | Config-driven freshness check with per-file review URLs | -| 2026-03-13 | Delete ctx-context-monitor skill — hook output is self-sufficient | -| 2026-03-13 | build target depends on sync-why to prevent embedded doc drift | -| 2026-03-12 | Recommend companion RAGs as peer MCP servers not bridge through ctx | -| 2026-03-12 | Rename ctx-map skill to ctx-architecture | -| 2026-03-07 | Use composite directory path constants for multi-segment paths | -| 2026-03-06 | Drop fatih/color dependency — Unicode symbols are sufficient for terminal output, color was redundant | -| 2026-03-06 | PR #27 (MCP server) meets v0.1 spec requirements — merge-ready pending 3 compliance fixes | -| 2026-03-06 | Skills stay CLI-based; MCP Prompts are the protocol equivalent | -| 2026-03-06 | Peer MCP model for external tool integration | -| 2026-03-06 | Create internal/parse for shared text-to-typed-value conversions | -| 2026-03-06 | Centralize errors in internal/err, not per-package err.go files | -| 2026-03-05 | Gitignore .context/memory/ for this project | -| 2026-03-05 | Memory bridge design: three-phase architecture with hook nudge + on-demand | -| 2026-03-05 | Revised strategic analysis: blog-first execution order, bidirectional sync as top-level section | -| 2026-03-04 | Interface-based GraphBuilder for multi-ecosystem ctx deps | -| 2026-03-02 | Billing threshold piggybacks on check-context-size, not heartbeat | -| 2026-03-02 | Replace auto-migration with stderr warning for legacy keys | -| 2026-03-02 | Consolidate all session state to .context/state/ | -| 2026-03-01 | PersistentPreRunE init guard with three-level exemption | -| 2026-03-01 | Global encryption key at ~/.ctx/.ctx.key | -| 2026-03-01 | Heartbeat token telemetry: conditional fields, not always-present | -| 2026-03-01 | Hook log rotation: size-based with one previous generation, matching eventlog pattern | -| 2026-03-01 | Promote 6 private skills to bundled plugin skills; keep 7 project-local | -| 2026-02-27 | Context window detection: JSONL-first fallback order | -| 2026-02-27 | Context injection architecture v2 (consolidated) | -| 2026-02-26 | .context/state/ directory for project-scoped runtime state | -| 2026-02-26 | Hook and notification design (consolidated) | -| 2026-02-26 | ctx init and CLAUDE.md handling (consolidated) | -| 2026-02-26 | Task and knowledge management (consolidated) | -| 2026-02-26 | Agent autonomy and separation of concerns (consolidated) | -| 2026-02-26 | Security and permissions (consolidated) | -| 2026-02-27 | Webhook and notification design (consolidated) | +| 2026-04-25 | Use t.Setenv for subprocess env in tests, not append(os.Environ(), ...) | +| 2026-04-25 | Tighten state.Dir / rc.ContextDir to (string, error) with sentinel errors | - -## [2026-04-13-153617] Walk boundary uses git as a hint, not a requirement - -**Status**: Accepted - -**Context**: ctx init failed when a non-ctx-initialized repo lived inside a -ctx-initialized parent workspace. walkForContextDir walked up and found the -parent's .context, then the boundary check rejected it. We considered -project-marker heuristics (go.mod, package.json) and making git mandatory. - -**Decision**: Walk boundary uses git as a hint, not a requirement - -**Rationale**: Project markers are unreliable (e.g. package.json for customer -shipments, Haskell projects have no common marker). Making git mandatory breaks -ctx's 'git recommended but not required' stance. Git-as-hint resolves the bug -without new dependencies: walk finds candidate, validate against git root, -discard if outside; fall back to CWD when no git is found. - -**Consequence**: walkForContextDir now consults findGitRoot to anchor ancestor -.context candidates. Monorepos, submodules, and nested workspaces resolve -correctly. No-git projects still work via CWD fallback. - ---- - -## [2026-04-11-200000] Journal stays local; LEARNINGS.md is the shareable layer - -**Status**: Accepted - -**Context**: With the hub now carrying shared project context between machines -and eventually between teammates, the question came up whether enriched -journal entries should ride along — either the raw `.context/journal/` files -or an "export enriched entries as shareable learning items" pipeline layered -on top of `/ctx-journal-enrich`. The journal is already gitignored per the -2026-03-05 `.context/memory/` decision and for the same reason: it's a -first-person log of raw prompts, half-formed thoughts, dead ends, personal -names, and things the user talks through with themselves. It sits in the -same trust tier as shell history or a private notebook. - -The trade-off is real: shared journals would make it trivial for teammates -(or future-me on another machine) to see the full reasoning trail behind a -decision. But "full reasoning trail" is precisely the thing that makes a -journal journal and not a changelog — it includes the parts the author -hasn't decided to stand behind yet, plus incidental private content. - -**Decision**: The journal is **Tier-0 personal** and never leaves the -originating machine. No hub sync, no export-by-default, no -enriched-entries-as-shareable-items pipeline. The enrichment pipeline -(`/ctx-journal-enrich`) stays as-is: journal → human-in-the-loop review → -explicit promotion to LEARNINGS.md / DECISIONS.md / CONVENTIONS.md via the -existing `/ctx-learning-add`, `/ctx-decision-add`, `/ctx-convention-add` -commands. Those distilled artifacts are **Tier-1 shareable** and are what -the hub syncs when a team opts into shared context. - -The promotion boundary is therefore the enrichment step, not a new export -pipeline. The user is the gate. - -**Rationale**: Any "shareable enriched journal entry" pipeline would have to -re-implement the trust boundary that `/ctx-learning-add` already enforces: -the human decides what's worth sharing, strips incidental private content, -and rewrites it as a standalone artifact. A second pipeline that tries to -do this automatically would either (a) leak private content by accident, or -(b) require the same human review and thus collapse back into -`/ctx-learning-add`. The principled answer is that there is no second -pipeline — LEARNINGS.md *is* the shareable form of the journal. - -This also preserves the psychological safety of the journal: the author -can write freely because they know nothing they write is one sync away -from a teammate's screen. Lose that property and the journal stops being a -journal and starts being a changelog draft. - -**Consequence**: - -- Journal files stay gitignored and stay out of `ctx hub` sync paths. Any - future code that walks context files for replication must exclude - `.context/journal/` explicitly and be covered by a test. -- `/ctx-journal-enrich` remains the promotion boundary. Its output targets - are LEARNINGS.md / DECISIONS.md / CONVENTIONS.md, never a separate - "shareable journal" bucket. -- Hub docs (`docs/home/hub.md`, `docs/recipes/hub-personal.md`, - `docs/recipes/hub-team.md`, `docs/security/hub.md`) should state the - Tier-0 / Tier-1 split explicitly so users building team workflows don't - assume "shared context" means "shared everything." -- The sync code path in `internal/hub/sync_helper.go` and any future - replication of context files must enforce this exclusion at the - code level — a gitignore entry is a user-convenience signal, not a - hub-trust boundary. -- A potential future "personal multi-machine journal sync" (same human, - different laptops) is explicitly **out of scope** of this decision. If - it ever ships, it rides a different transport (encrypted-at-rest, - single-user, not the team hub) and needs its own decision record. - -**Alternatives considered**: - -- **Sync raw journal files via hub**: rejected. Inverts the gitignore - decision, leaks private content by construction, destroys the - journal's "safe to write freely" property. -- **Auto-export enriched entries as a new shareable artifact type**: - rejected. Duplicates `/ctx-learning-add` without the human gate, or - collapses back into it. No real difference from the status quo except - the opportunity for accidental leakage. -- **Opt-in per-entry "publish to hub" flag in the journal**: rejected as - premature. If the user wants an entry on the hub, the existing flow is - one command away — write it as a learning or decision. A second path - adds surface area without adding capability. - -**Related**: Reinforces the 2026-03-05 `.context/memory/` gitignore -decision (same trust-tier reasoning for a different private artifact). - -## [2026-04-11-180000] `Entry.Author` is server-authoritative, not client-authoritative - -**Status**: Accepted - -**Context**: The `Entry.Author` field on hub entries is copied verbatim from -the client's publish request (`handler.go:82`). It's optional, freeform, and -unauthenticated — a client with a valid token for project `alpha` can publish -entries claiming `Author: "bob@acme.com"` regardless of who actually -authenticated. This is the same spoofing pattern as `Origin` (audit finding -H-04) and was flagged as audit finding H-22 with three options: keep, drop, -override, or promote. The decision was never formally closed. - -The premise that resolved it: **identity is eventually part of the token**. -Under the sysadmin-registry MVP, the server already knows `{user_id, project}` -from the authenticated token. Under the PKI stretch, the signed claim carries -identity cryptographically. In both models, the client has nothing to say about -authorship that the server doesn't already know with higher confidence. - -**Decision**: `Entry.Author` is **server-authoritative**. The server stamps it -from the authenticated identity source on every publish. The client's -`pe.Author` input is ignored (or rejected — implementation choice, not -semantic difference). The field stays in the wire format but its semantics -change from "whatever the client said" to "whatever the server's auth layer -resolved." - -Stamping source by phase: - -- **Today (pre-registry)**: `Author = ClientInfo.ProjectName`, same source as - the `Origin` server-enforcement fix (H-04). Lossy but consistent. -- **Registry MVP**: `Author = users.json` row's `user_id` (e.g., - `alice@acme.com`). Precise per-human attribution. -- **PKI stretch**: `Author = signed claim's sub field`. Cryptographic identity. - -**Rationale**: Dropping the field is wrong because the registry MVP will -already give us a per-user identity to stamp — removing Author just to re-add -it later is churn. "Override" and "promote" are cosmetically different forms -of the same decision (server fills from auth context); "promote" is what -happens naturally once the registry MVP types the field as `UserID`. -Client-sourced Author is indefensible because it replicates the Origin -spoofing vector in a second field. - -**Consequence**: - -- The Author field stays on the wire and in `Entry{}`. -- Client-side code that populates `pe.Author` from local config becomes a - no-op. Audit `ctx connect publish` and `ctx add --share` for any such - code paths before the server-enforcement fix lands. -- `handler.go publish()` fills Author from the authenticated context (the - same `ClientInfo` that H-04 pulls for Origin). Single unified - auth-to-handler pipe. -- `docs/security/hub.md` "Compromised client token" section gets rewritten: - attribution becomes **wrong** on compromise (attacker's token maps to - attacker's identity), not **forgeable** (attacker cannot stamp someone - else's name). -- The sysadmin-registry spec (`specs/hub-identity-registry.md`, tasked) - MUST include a `user_id` field per row — it's the stamping source. -- Three open tasks collapse into one: H-22 resolves to "implement - server-authoritative Author" instead of "decide Author fate." TASKS.md - updated. - -**Alternatives considered**: - -- **Keep client-authoritative**: rejected. Same spoofing vector as Origin; - trivially defeats any downstream attribution check. -- **Drop the field**: rejected. The registry MVP will need per-human - attribution anyway. Dropping today is churn that gets undone - immediately. -- **Override at client-side before publish**: rejected. Puts the security - boundary on the wrong side of the trust zone. Must be server-side. - -**Follow-up — client-advisory metadata**: the client still has useful -information to share that isn't an identity claim: a human-friendly -display name, the machine that made the publish, the tool version, a -CI system label, a team/role handle. This lives on a **new sibling -field `Meta`** (a `ClientMetadata` sub-struct), not on `Author`. The -separation of types is what protects the security property: `Author` -is reserved for server-authoritative identity, `Meta` is -client-advisory and explicitly labeled as such in any rendered -surface. `Meta` fields are size-capped individually (256 bytes) and -in aggregate (2 KB), validated for plain-string content (no -newlines, no control characters), and never claimed as attribution -in any API response. The renderer MUST label `Meta`-sourced values -with prose like "client label" or "client-reported" so readers -cannot mistake them for authoritative identity. See TASKS.md for -the implementation task. - ---- - -## [2026-04-09-001332] Architecture skill pipeline is a triad not a quartet - -**Status**: Accepted - -**Context**: Had a proposed ctx-architecture-extend for extension point mapping, -making four skills - -**Decision**: Architecture skill pipeline is a triad not a quartet - -**Rationale**: Extension points already covered per-module in DETAILED_DESIGN -and by registration site discovery in enrich. Fourth skill fragments pipeline -without distinct value - -**Consequence**: Pipeline is map enrich hunt. Three skills three questions: how -does it work, how well does it connect, where will it break - ---- - -## [2026-04-08-013731] Remove #done tag convention, simplify task archival - -**Status**: Accepted - -**Context**: Tasks had #done:YYYY-MM-DD timestamps that agents added -inconsistently and nobody read. compact --archive filtered by age using these -timestamps. - -**Decision**: Remove #done tag convention, simplify task archival - -**Rationale**: [x] checkbox is semantically sufficient. git blame provides the -completion timestamp. Removing #done eliminates redundant ceremony and -simplifies compact --archive to archive all completed tasks regardless of age. - -**Consequence**: compact --archive no longer filters by archive_after_days for -tasks. The .ctxrc field is inert but retained for backwards compatibility. -Historical #done tags in archives are preserved. - ---- - -## [2026-04-06-204212] Use hook relay for session provenance instead of JSONL parsing or env vars - -**Status**: Accepted - -**Context**: Needed to give agents awareness of their session ID, branch, and -commit hash for task/decision/learning provenance. Considered three approaches: -(1) parsing most-recent JSONL at runtime, (2) CTX_SESSION_ID env var, (3) hook -relay via UserPromptSubmit. - -**Decision**: Use hook relay for session provenance instead of JSONL parsing or -env vars - -**Rationale**: JSONL parsing breaks with parallel sessions (wrong file picked). -Env vars aren't exported by Claude Code. Hook relay is zero-state: the hook -receives session_id from Claude Code on every prompt, emits it, agent absorbs -through repetition. No counters, no cleanup, no resume edge cases. - -**Consequence**: Provenance depends on the hook being registered (enabledPlugins -in settings.local.json). Projects without plugin registration get no provenance. -Filed as separate bug. - ---- - -## [2026-04-04-025755] TestNoMagicStrings and TestNoMagicValues no longer exempt const/var definitions outside config/ - -**Status**: Accepted - -**Context**: The isConstDef/isVarDef blanket exemption masked 156+ string and 7 -numeric constants in the wrong package - -**Decision**: TestNoMagicStrings and TestNoMagicValues no longer exempt -const/var definitions outside config/ - -**Rationale**: Const definitions outside config/ are magic values in the wrong -place — naming them does not fix the structural problem - -**Consequence**: All new code with string/numeric constants outside config/ -fails these tests immediately - ---- - -## [2026-04-04-025746] String-typed enums belong in config/, not domain packages - -**Status**: Accepted - -**Context**: Debated whether type IssueType string with const values belongs in -domain or config. The string value is the same regardless of type annotation. - -**Decision**: String-typed enums belong in config/, not domain packages - -**Rationale**: Types without behavior belong in config. Promote to entity/ only -when methods/interfaces appear. - -**Consequence**: All type Foo string + const blocks outside config/ are now -caught by TestNoMagicStrings. - ---- - -## [2026-04-03-180000] Output functions belong in write/ (consolidated) - -**Status**: Accepted - -**Consolidated from**: 2 entries (2026-03-21 to 2026-03-22) - -**Decision**: Output functions belong in write/, logic and types in core/, -orchestration in cmd/ - -**Rationale**: The write/ taxonomy is flat by domain — each CLI feature gets -its own write/ package. core/ owns domain logic and types. cmd/ owns Cobra -orchestration. Functions that call cmd.Print/Println/Printf belong in write/. -core/ never imports cobra for output purposes. - -**Consequence**: All new CLI output must go through a write/ package. No -cmd.Print* calls in internal/cli/ outside of internal/write/. - ---- - -## [2026-04-03-180000] YAML text externalization pipeline (consolidated) - -**Status**: Accepted - -**Consolidated from**: 5 entries (2026-03-06 to 2026-04-03) - -**Decision**: All user-facing text externalized to embedded YAML domain files, -justified by agent legibility and drift prevention — not i18n - -**Rationale**: The real justification is agent legibility (named DescKey -constants as traversable graphs) and drift prevention (TestDescKeyYAMLLinkage -catches orphans mechanically). i18n is a free downstream consequence. The -exhaustive test verifies all constants resolve to non-empty YAML values — new -keys are automatically covered. - -**Consequence**: commands.yaml split into 4 domain files (commands, flags, text, -examples) loaded via dedicated loaders. text.yaml split into 6 domain files -loaded via loadYAMLDir. The 3-file ceremony (DescKey + YAML + write/err -function) is the cost of agent-legible, drift-proof output. - ---- - -## [2026-04-03-180000] Package taxonomy and code placement (consolidated) - -**Status**: Accepted - -**Consolidated from**: 3 entries (2026-03-06 to 2026-03-13) - -**Decision**: Three-zone taxonomy: cmd/ for Cobra wiring (cmd.go + run.go), -core/ for logic and types, assets/ for templates and user-facing text. config/ -for structural constants only. - -**Rationale**: Taxonomical symmetry makes navigation instant and agent-friendly. -Domain types that multiple packages consume belong in domain packages -(internal/entry), not CLI subpackages. Templates and user-facing text live in -assets/ for i18n readiness; structural constants (paths, limits, regexes) stay -in config/. - -**Consequence**: Every CLI package has the same predictable shape. Shared entry -types live in internal/entry. Template files (tpl_*.go) moved from config/ to -assets/. 474 files changed in initial restructuring. - ---- - -## [2026-04-03-180000] Eager init over lazy loading (consolidated) - -**Status**: Accepted - -**Consolidated from**: 2 entries (2026-03-16 to 2026-03-18) - -**Decision**: Explicit Init() called eagerly at startup for static embedded data -and resource lookups, instead of per-accessor sync.Once or package-level init() - -**Rationale**: Static embedded data is required at startup — sync.Once per -accessor is cargo cult. Package-level init() hides startup dependencies and -makes ordering unclear. Explicit Init() called from main.go / NewServer makes -the dependency visible and testable. - -**Consequence**: Maps unexported, accessors are plain lookups. Tests call Init() -in TestMain. res.Init() called from NewServer before ToList(). No package-level -side effects, zero sync.Once in the lookup pipeline. - ---- - -## [2026-04-03-180000] Pure logic separation of concerns (consolidated) - -**Status**: Accepted - -**Consolidated from**: 3 entries (2026-03-15 to 2026-03-23) - -**Decision**: Pure-logic functions return data structs; callers own I/O, file -writes, and reporting. Function pointers in param structs replaced with text -keys. - -**Rationale**: Pure logic with no I/O lets both MCP (JSON-RPC) and CLI (cobra) -callers control output independently. Methods that don't access receiver state -hide their true dependencies — make them free functions. If all callers of a -callback vary only by a string key, the callback is data in disguise. - -**Consequence**: CompactContext returns CompactResult; callers iterate -FileUpdates. Server response helpers in server/out, prompt builders in -server/prompt. All cross-cutting param structs in entity are -function-pointer-free. - ---- - -## [2026-04-03-133244] config/ explosion is correct — fix is documentation, not restructuring - -**Status**: Accepted - -**Context**: Architecture analysis flagged 60+ config sub-packages as a -bottleneck. Evaluation showed the alternative (8-10 domain packages) trades -granular imports for fat dependency units. Current structure gives zero internal -dependencies, surgical dependency tracking, and minimal recompile scope. - -**Decision**: config/ explosion is correct — fix is documentation, not -restructuring - -**Rationale**: Go's compilation unit is the package. Granular packages mean -precise dependency tracking. The developer experience cost (IDE noise, package -discovery) is real but solvable with a README decision tree, not restructuring. -Restructuring would be massive mechanical churn for cosmetic benefit. - -**Consequence**: config/README.md written with organizational guide and decision -tree. No restructuring planned. embed/text/ file count will shrink naturally -when tpl/ migrates to text/template. - ---- - -## [2026-04-01-233247] IRC to Discord as primary community channel - -**Status**: Accepted - -**Context**: Discord server exists at https://ctx.ist/discord; IRC/libera.chat -references were stale - -**Decision**: IRC to Discord as primary community channel - -**Rationale**: Discord is faster for async community support; IRC was historical - -**Consequence**: Updated zensical.toml, README, community docs, journal -template. Added community footer to ctx help and ctx init output via YAML assets -pipeline - ---- - -## [2026-04-01-233246] AST audit tests live in internal/audit/, one file per check - -**Status**: Accepted - -**Context**: Needed a home for AST-based codebase invariant tests separate from -the existing compliance_test.go monolith - -**Decision**: AST audit tests live in internal/audit/, one file per check - -**Rationale**: One test per file prevents the 1200+ line monster pattern. Shared -helpers in helpers_test.go with sync.Once caching. Package is all _test.go -except doc.go — produces no binary, not importable - -**Consequence**: New checks are added as individual *_test.go files; the pattern -(loadPackages, walk AST, collect violations, t.Error) is established and -repeatable - ---- - -## [2026-04-01-074417] Split assets/hooks/ into assets/integrations/ + assets/hooks/messages/ - -**Status**: Accepted - -**Context**: The directory mixed Copilot integration templates with hook message -templates - -**Decision**: Split assets/hooks/ into assets/integrations/ + -assets/hooks/messages/ - -**Rationale**: Integration assets (Copilot instructions, AGENTS.md, CLI -scripts/skills) are not hooks. Hook messages ARE the hook system templates. - -**Consequence**: integrations/ for tool integration assets, hooks/messages/ for -hook system templates. Embed directives and all config constants updated. - ---- - -## [2026-04-01-074416] Rename ctx hook → ctx setup to disambiguate from the hook system - -**Status**: Accepted - -**Context**: PR #45 contributor assumed hook meant the setup command, causing -naming collisions with the PreToolUse/PostToolUse hook system - -**Decision**: Rename ctx hook → ctx setup to disambiguate from the hook system - -**Rationale**: hook has a specific meaning in ctx; setup accurately describes -generating AI tool integration configs - -**Consequence**: CLI breaking change. All docs, specs, TypeScript extension, and -YAML assets updated. Released specs left as historical. - ---- - -## [2026-03-31-224245] Split log into log/event and log/warn to break import cycles - -**Status**: Accepted - -**Context**: io and notify could not import log.Warn because log imported both -of them for event logging, creating circular dependencies - -**Decision**: Split log into log/event and log/warn to break import cycles - -**Rationale**: Separating concerns (stderr sink vs JSONL event log) into -subpackages eliminated the cycle. Warn sink is foundation-level with only config -imports, event logging is higher-level - -**Consequence**: All stderr warnings now route through logWarn.Warn(). New code -importing log/warn has no cycle risk. Event types moved to internal/entity - ---- - -## [2026-03-31-182003] Context-load-gate injects only CONSTITUTION and AGENT_PLAYBOOK_GATE, not full ReadOrder - -**Status**: Accepted - -**Context**: Force-loading ~14k tokens of context files (8 files) every session -diluted attention without proportional value. CLAUDE.md already instructs agents -to read full context files on-demand. Behavioral prose in force-loaded content -was routinely skipped. - -**Decision**: Context-load-gate injects only CONSTITUTION and -AGENT_PLAYBOOK_GATE, not full ReadOrder - -**Rationale**: Hard rules (CONSTITUTION) must be present before any action. -Distilled directives (gate file) provide actionable session-start guidance in -~2k tokens. Full playbook, conventions, architecture, decisions, learnings are -pulled on-demand when task context requires them. - -**Consequence**: New AGENT_PLAYBOOK_GATE.md file must stay in sync with -AGENT_PLAYBOOK.md. HTML comment cross-reference added to playbook header for -contributor discoverability. - ---- - -## [2026-03-31-005113] Spec signal words and nudge threshold are user-configurable via .ctxrc - -**Status**: Accepted - -**Context**: Initially hardcoded signal words and 150-char threshold in run.go. -User pointed out these are localizable vocabulary, following the -session_prefixes / classify_rules pattern - -**Decision**: Spec signal words and nudge threshold are user-configurable via -.ctxrc - -**Rationale**: Signal words are language-dependent and project-dependent — a -Spanish-speaking user or a non-Go project would have different signal terms - -**Consequence**: Added spec_signal_words and spec_nudge_min_len to CtxRC struct, -rc accessors with defaults in config/entry, JSON schema updated - ---- - -## [2026-03-30-075927] Flags-not-subcommands for journal source: list and show are view modes on a noun, not independent entities - -**Status**: Accepted - -**Context**: During the journal-recall merge, recall had separate list and show -subcommands. Merging them into journal created a design choice: source list + -source show (three levels) vs source --show (two levels). - -**Decision**: Flags-not-subcommands for journal source: list and show are view -modes on a noun, not independent entities - -**Rationale**: Keeps CLI nesting to two levels max. Default behavior (bare -source) lists sessions; --show switches to inspect mode. When two operations -differ only in how they view the same data, make them flags on one command. - -**Consequence**: journal source dispatches via --show flag rather than -positional subcommand. Future view-mode toggles should follow this pattern. - ---- - -## [2026-03-30-003756] Journal consumed recall — recall CLI package deleted - -**Status**: Accepted - -**Context**: ctx recall was never registered in bootstrap; ctx journal had all -the same subcommands - -**Decision**: Journal consumed recall — recall CLI package deleted - -**Rationale**: One dead command group creates confusion in docs and skills. -Journal is the canonical command group. - -**Consequence**: internal/cli/recall/ deleted, 19 doc files updated, -docs/cli/recall.md renamed to journal.md, zensical.toml updated. MCP tool -ctx_recall rename tasked separately (API contract) - ---- - -## [2026-03-30-003745] Classify rules are user-configurable via .ctxrc - -**Status**: Accepted - -**Context**: Memory entry classification used hardcoded keyword rules that could -not be customized - -**Decision**: Classify rules are user-configurable via .ctxrc - -**Rationale**: Users may work in domains where the default keywords do not match -(non-English, specialized terminology). Same pattern as session_prefixes. - -**Consequence**: classify_rules in .ctxrc overrides defaults; schema updated; -rc.ClassifyRules() accessor with fallback to config/memory.DefaultClassifyRules - ---- - -## [2026-03-25-233646] Architecture analysis and enrichment are separate skills — constraint is the feature - -**Status**: Accepted - -**Context**: Observed that agents take shortcuts when code intelligence tools -are available during architecture analysis. A 5.2x depth reduction was measured -(5866 vs 1124 lines) when GitNexus was available during reading. Mentioning -unavailable tools by name in a skill plants the idea for the agent to use them. - -**Decision**: Architecture analysis and enrichment are separate skills — -constraint is the feature - -**Rationale**: Discovery requires forced reading without shortcuts. Validation -and quantification are a separate pass. Two-pass compiler analogy: semantic -parsing (human-style reading) then static analysis (graph enrichment). Never -mention tools you want the agent to avoid — absence is the only reliable -constraint. - -**Consequence**: ctx-architecture deliberately excludes code intelligence tools -from allowed-tools and never mentions them. ctx-architecture-enrich is a -separate skill that runs after, using the deep artifacts as baseline. Gemini is -allowed in both for upstream/external lookups only. - ---- - -## [2026-03-25-173337] Companion tools documented as optional MCP enhancements with runtime check - -**Status**: Accepted - -**Context**: Gemini Search and GitNexus improve skills but no docs mentioned -them and no code checked their availability - -**Decision**: Companion tools documented as optional MCP enhancements with -runtime check - -**Rationale**: Users should know what tools enhance their workflow without being -forced to install them. Suppressible via .ctxrc for users who don't want them. - -**Consequence**: /ctx-remember smoke-tests MCPs at session start. -companion_check: false suppresses. - ---- - -## [2026-03-25-173336] Prompt templates removed — skills are the single agent instruction mechanism - -**Status**: Accepted - -**Context**: Prompt templates (.context/prompts/) overlapped with skills but had -no discoverability — even the project creator didn't know they existed - -**Decision**: Prompt templates removed — skills are the single agent -instruction mechanism - -**Rationale**: Adding metadata to prompts to fix discoverability would recreate -the skill system. One concept is better than two. - -**Consequence**: code-review, explain, refactor promoted to proper skills. ctx -prompt CLI removed. loop.md retained as ctx loop config file at -.context/loop.md. - ---- - -## [2026-03-24-001001] Write-once baseline with explicit end-consolidation for consolidation lifecycle - -**Status**: Accepted - -**Context**: Designing the consolidation nudge hook; multi-pass consolidation -spans dozens of sessions and you cannot programmatically distinguish feature -from consolidation sessions - -**Decision**: Write-once baseline with explicit end-consolidation for -consolidation lifecycle - -**Rationale**: First ctx-consolidate stamps baseline (write-once), user runs -end-consolidation when done. Failure mode is silence (no stale nudges), not -wrong behavior - -**Consequence**: Requires mark-consolidation, end-consolidation, and -snooze-consolidation plumbing commands. Spec: specs/consolidation-nudge-hook.md - ---- - -## [2026-03-23-165612] Pre/pre HTML tags promoted to shared constants in config/marker - -**Status**: Accepted - -**Context**: Two packages (normalize and format) used hardcoded pre strings -independently - -**Decision**: Pre/pre HTML tags promoted to shared constants in config/marker - -**Rationale**: Cross-package magic strings belong in config constants per -CONVENTIONS.md - -**Consequence**: marker.TagPre and marker.TagPreClose are the canonical -references; package-local constants deleted - ---- - -## [2026-03-22-084316] Output functions belong in write/, never in core/ or cmd/ - -**Status**: Accepted - -**Context**: System write migration revealed that cmd.Print* calls scattered -across core/ and cmd/ packages prevented localization and violated separation of -concerns - -**Decision**: Output functions belong in write/, never in core/ or cmd/ - -**Rationale**: The write/ taxonomy is flat by domain — each CLI feature gets -its own write/ package. core/ owns logic and types, cmd/ owns orchestration, -write/ owns all output. - -**Consequence**: All new CLI output must go through a write/ package. No -cmd.Print* calls in internal/cli/ outside of internal/write/. - ---- - -## [2026-03-20-232506] Shared formatting utilities belong in internal/format - -**Status**: Accepted - -**Context**: Pluralize, Duration, DurationAgo, and TruncateFirstLine were -duplicated across memory/core, change/core, and other CLI packages - -**Decision**: Shared formatting utilities belong in internal/format - -**Rationale**: internal/format already existed with TimeAgo and Number -formatters. Centralizing prevents duplication and matches the convention that -domain-agnostic utilities live in shared packages, not CLI subpackages - -**Consequence**: CLI packages import internal/format instead of defining local -helpers. Local copies deleted. - ---- - -## [2026-03-20-160103] Go-YAML linkage check added to lint-drift as check 5 - -**Status**: Accepted - -**Context**: Prior refactoring sessions left broken and orphan linkages between -Go DescKey constants and YAML entries that caused silent runtime failures - -**Decision**: Go-YAML linkage check added to lint-drift as check 5 - -**Rationale**: Shell-based grep+comm approach fits the existing lint-drift -pattern, runs at CI time, and is simpler than programmatic Go AST parsing - -**Consequence**: CI-time check catches orphans in both directions plus -cross-namespace duplicates, preventing recurrence - ---- - -## [2026-03-18-193623] Singular command names for all CLI entities - -**Status**: Accepted - -**Context**: ctx add used learning (singular) but ctx learnings was plural. -Inconsistency across 6 commands. - -**Decision**: Singular command names for all CLI entities - -**Rationale**: Less headache for i18n; one rule (singular = entity); developers -think in OOP. Use field values come from DescKey constants for -single-source-of-truth renaming. - -**Consequence**: All commands singular: task, decision, learning, change, -permission, dep. YAML keys, desc constants, directory names, and 50+ files -updated. - ---- - -## [2026-03-17-105627] Pre-compute-then-print for write package output blocks - -**Status**: Accepted - -**Context**: Audit of internal/write/ found 337 Println calls across 160 -functions. Asked whether text/template or single format strings would clean up -multi-Println functions like InfoLoopGenerated. - -**Decision**: Pre-compute-then-print for write package output blocks - -**Rationale**: text/template trades compile-time safety for runtime errors and -only 38 of 160 functions benefit from consolidation. fmt.Sprintf with -pre-computed conditional args handles all cases without new dependencies. -Loop-based functions stay imperative. - -**Consequence**: Functions with 4+ Printlns pre-compute conditionals into -strings, then emit one cmd.Println with a multiline block template. Per-line -Tpl* constants replaced with TplXxxBlock. Trivial (1-3 line) and loop-based -functions excluded. - ---- - -## [2026-03-16-104142] Resource name constants in config/mcp/resource, mapping in server/resource - -**Status**: Accepted - -**Context**: MCP resource handler had string literals scattered through -handle_resource.go and rebuilt the resource list on every call - -**Decision**: Resource name constants in config/mcp/resource, mapping in -server/resource - -**Rationale**: Constants follow the same pattern as config/mcp/tool. Mapping -stays in server/resource because it bridges config constants with assets text -(too many cross-cutting deps for a config package). Resource list and URI lookup -are pre-built once at server init. - -**Consequence**: URI-to-file lookup is O(1) via pre-built map; resource list -built once in NewServer, not per request; no string literals in handler code - ---- - -## [2026-03-16-022635] Rename --consequences flag to --consequence for singular consistency - -**Status**: Accepted - -**Context**: All other CLI flags (context, rationale, lesson, application) are -singular nouns. consequences was the only plural. - -**Decision**: Rename --consequences flag to --consequence for singular -consistency - -**Rationale**: Singular form matches the pattern. Consistency wins over natural -language preference. - -**Consequence**: 75+ files updated. Breaking change for --consequences users. - ---- - -## [2026-03-14-180905] Error package taxonomy: 22 domain files replace monolithic errors.go - -**Status**: Accepted - -**Context**: internal/err/errors.go was 1995 lines with 188 functions in one -file - -**Decision**: Error package taxonomy: 22 domain files replace monolithic -errors.go - -**Rationale**: Convention requires files named by responsibility, not junk -drawers; domain grouping makes it possible to find error constructors by domain - -**Consequence**: 22 files (backup, config, crypto, date, fs, git, hook, init, -journal, memory, notify, pad, parser, prompt, recall, reminder, session, site, -skill, state, task, validation); errors.go deleted - ---- - -## [2026-03-14-131152] Session prefixes are parser vocabulary, not i18n text - -**Status**: Accepted - -**Context**: Markdown session parser had hardcoded Session:/Oturum: pair in -text.yaml as session_prefix/session_prefix_alt — didn't scale beyond two -languages - -**Decision**: Session prefixes are parser vocabulary, not i18n text - -**Rationale**: Session header prefixes are recognition patterns for parsing, not -user-facing interface strings. Separating content recognition from interface -language lets users parse multilingual session files without code changes. -Single-language default (Session:) avoids implicit favoritism. - -**Consequence**: Prefixes moved to .ctxrc session_prefixes list. text.yaml -entries and embed.go constants removed. Parser reads from rc.SessionPrefixes() -with fallback to config/parser.DefaultSessionPrefixes. Users extend via .ctxrc. - ---- - -## [2026-03-14-110748] System path deny-list as safety net, not security boundary - -**Status**: Accepted - -**Context**: Replacing nolint:gosec directives with centralized I/O wrappers in -internal/io - -**Decision**: System path deny-list as safety net, not security boundary - -**Rationale**: ctx paths are internally constructed from config constants. The -deny-list catches agent hallucinations (writing to /etc), not adversarial input. -Public security docs would imply a threat model that does not exist. - -**Consequence**: internal/io/doc.go documents limitations honestly for -contributors. No user-facing security docs. The deny-list is a modicum of -protection, not a promise. - ---- - -## [2026-03-14-093748] Config-driven freshness check with per-file review URLs - -**Status**: Accepted - -**Context**: Building a hook to warn when technology-dependent constants go -stale. Initially hardcoded the file list and Anthropic docs URL in the binary, -but this only worked inside the ctx repo and assumed all projects care about -Anthropic docs. - -**Decision**: Config-driven freshness check with per-file review URLs - -**Rationale**: Making the file list and review URLs configurable via .ctxrc -freshness_files means any project can opt in. Per-file review_url avoids -special-casing by project name — ctx sets Anthropic docs, other projects set -their own vendor links or omit it entirely. - -**Consequence**: The hook is a no-op by default (opt-in). ctx's own .ctxrc -carries the tracked files. All nudge text goes through assets/text.yaml for -localization. No project detection logic needed. - ---- - -## [2026-03-13-223111] Delete ctx-context-monitor skill — hook output is self-sufficient - -**Status**: Accepted - -**Context**: The skill documented how to relay context window warnings, but the -hook message already includes IMPORTANT: Relay this context window warning to -the user VERBATIM which agents follow without the skill. - -**Decision**: Delete ctx-context-monitor skill — hook output is -self-sufficient - -**Rationale**: No mechanism exists for hooks to trigger skills. The skill was -never loaded during sessions. Adding enforcement elsewhere would either be too -far back in context (playbook) or dilute the already-crisp hook message. - -**Consequence**: One fewer skill to maintain. No behavioral change — agents -continue relaying warnings as before. - ---- - -## [2026-03-13-151955] build target depends on sync-why to prevent embedded doc drift - -**Status**: Accepted - -**Context**: assets/why/ files had silently drifted from their docs/ sources - -**Decision**: build target depends on sync-why to prevent embedded doc drift - -**Rationale**: Derived assets that are not in the build dependency chain will -drift — the only reliable enforcement is making the build fail without sync - -**Consequence**: Every make build now copies docs into assets before compiling - ---- - -## [2026-03-12-133007] Recommend companion RAGs as peer MCP servers not bridge through ctx - -**Status**: Accepted - -**Context**: Explored whether ctx should proxy RAG queries or integrate a RAG -directly - -**Decision**: Recommend companion RAGs as peer MCP servers not bridge through -ctx - -**Rationale**: MCP is the composition layer — agents already compose multiple -servers. ctx is context, RAGs are intelligence. No bridging, no plugin system, -no schema abstraction - -**Consequence**: Spec created at ideas/spec-companion-intelligence.md; future -work is documentation and UX only - ---- - -## [2026-03-12-133007] Rename ctx-map skill to ctx-architecture - -**Status**: Accepted - -**Context**: The name 'map' didn't convey the iterative, architectural nature of -the ritual - -**Decision**: Rename ctx-map skill to ctx-architecture - -**Rationale**: 'architecture' better describes surveying and evolving project -structure across sessions - -**Consequence**: All cross-references updated across skills, docs, .context -files, and settings - ---- - -## [2026-03-07-221155] Use composite directory path constants for multi-segment paths - -**Status**: Accepted - -**Context**: Needed a constant for hooks/messages path used in message.go and -message_cmd.go - -**Decision**: Use composite directory path constants for multi-segment paths - -**Rationale**: Matches existing pattern of DirClaudeHooks = '.claude/hooks' — -keeps filepath.Join calls cleaner and avoids scattering path segments - -**Consequence**: New multi-segment directory paths should be single constants -(e.g. DirHooksMessages, DirMemoryArchive) rather than joined from individual -segment constants - ---- - -## [2026-03-06-200306] Drop fatih/color dependency — Unicode symbols are sufficient for terminal output, color was redundant - -**Status**: Accepted - -**Context**: fatih/color was used in 32 files for green checkmarks, yellow -warnings, cyan headings, dim text - -**Decision**: Drop fatih/color dependency — Unicode symbols are sufficient for -terminal output, color was redundant - -**Rationale**: Every colored output already had a semantic symbol (✓, ⚠, -○) that conveyed the same meaning; color added visual noise in non-terminal -contexts (logs, pipes) - -**Consequence**: Removed --no-color flag (only existed for color.NoColor); one -fewer external dependency; FlagNoColor retained in config for CLI compatibility - ---- - -## [2026-03-06-141507] PR #27 (MCP server) meets v0.1 spec requirements — merge-ready pending 3 compliance fixes - -**Status**: Accepted - -**Context**: Reviewed PR against specs/mcp-server.md; all 7 action items -addressed, CI fails on 3 mechanical compliance issues - -**Decision**: PR #27 (MCP server) meets v0.1 spec requirements — merge-ready -pending 3 compliance fixes - -**Rationale**: All spec requirements met; CI failures are trivial and low-risk; -keeping PR open risks merge conflicts during active refactoring - -**Consequence**: Merge and fix compliance issues in follow-up commit on main - ---- - -## [2026-03-06-184816] Skills stay CLI-based; MCP Prompts are the protocol equivalent - -**Status**: Accepted - -**Context**: Question arose whether skills should switch from ctx CLI (Bash) to -MCP tool calls once the MCP server ships - -**Decision**: Skills stay CLI-based; MCP Prompts are the protocol equivalent - -**Rationale**: CLI is always available (PATH prerequisite); MCP requires -optional configuration. Hooks will always be CLI (shell commands). Two access -patterns in the same tool is gratuitous complexity. - -**Consequence**: Skills call CLI. MCP Prompts call MCP Tools. Hooks call CLI. -Clean layer separation; no replacement, only parallel access paths. - ---- - -## [2026-03-06-184812] Peer MCP model for external tool integration - -**Status**: Accepted - -**Context**: Evaluated three integration models (orchestrator, peer, hub) for -how ctx relates to GitNexus and context-mode - -**Decision**: Peer MCP model for external tool integration - -**Rationale**: Peer model (side-by-side MCP servers, each queried independently -by the agent) respects ctx's markdown-on-filesystem invariant and avoids -coupling. ctx provides behavioral scaffolding; external tools provide their -specialties. - -**Consequence**: ctx MCP Prompts can reference external tools by convention -without tight coupling. No plugin registry needed. - ---- - -## [2026-03-06-050132] Create internal/parse for shared text-to-typed-value conversions - -**Status**: Accepted - -**Context**: parseDate with 2006-01-02 duplicated in 5+ files; needed a home -that is not internal/utils or internal/strings (collides with stdlib) - -**Decision**: Create internal/parse for shared text-to-typed-value conversions - -**Rationale**: internal/parse scopes to convert text to typed values without -becoming a junk drawer. Name invites sibling functions (duration, identifier -parsing) naturally. - -**Consequence**: parse.Date() is the first function; config.DateFormat holds the -layout constant. Other time.Parse callers can migrate incrementally. - ---- - -## [2026-03-06-050131] Centralize errors in internal/err, not per-package err.go files - -**Status**: Accepted - -**Context**: Duplicate error constructors across 5+ CLI packages; agents copying -the pattern when they see a local err.go - -**Decision**: Centralize errors in internal/err, not per-package err.go files - -**Rationale**: Single location makes duplicates visible, enables future sentinel -errors, and prevents broken-window accumulation - -**Consequence**: All CLI err.go files migrated and deleted. New errors go to -internal/err/errors.go exclusively. - ---- - -## [2026-03-05-205424] Gitignore .context/memory/ for this project - -**Status**: Accepted - -**Context**: Memory mirror contains copies of MEMORY.md which holds strategic -analysis and session notes - -**Decision**: Gitignore .context/memory/ for this project - -**Rationale**: Strategic content should not be in git history. Docs updated to -say 'often git-tracked' for the general recommendation — this project is the -exception. - -**Consequence**: Mirror and archives are local-only for this project. Other -projects can still track them. Sync and drift detection work the same way -regardless. - ---- - -## [2026-03-05-042154] Memory bridge design: three-phase architecture with hook nudge + on-demand - -**Status**: Accepted - -**Context**: Brainstormed how to bridge Claude Code MEMORY.md with ctx -structured context files - -**Decision**: Memory bridge design: three-phase architecture with hook nudge + -on-demand - -**Rationale**: Hook nudge + on-demand gives user choice and freedom. Wrap-up is -the publish trigger, never commit (footgun). Heuristic classification for v1, no -LLM. Marker-based merge for bidirectional conflict. Mirror is git-tracked + -timestamped archives. Foundation spec delivers sync/status/diff/hook; import and -publish are future phases. - -**Consequence**: Foundation spec in specs/memory-bridge.md, import/publish specs -deferred to ideas/. Tasked out as S-0.1.1 through S-0.1.10 in ideas/TASKS.md. - ---- - -## [2026-03-05-023937] Revised strategic analysis: blog-first execution order, bidirectional sync as top-level section - -**Status**: Accepted - -**Context**: Editorial review of ideas/claude-memory-strategic-analysis.md -surfaced six structural weaknesses in competitive positioning - -**Decision**: Revised strategic analysis: blog-first execution order, -bidirectional sync as top-level section - -**Rationale**: 200-line cap is fragile differentiator (demoted); org-scoped -memory is the real threat (elevated to HIGH); model agnosticism is premature -(parked with trigger condition); bidirectional sync is the most underweighted -insight (promoted); narrative shapes categories before implementation does (blog -first) - -**Consequence**: Execution order is now S-3 (blog) -> S-0 -> S-1 -> S-2. -Strategic doc restructured from 9 to 10 sections. Blog post shipped as first -deliverable. - ---- - -## [2026-03-04-105238] Interface-based GraphBuilder for multi-ecosystem ctx deps - -**Status**: Accepted - -**Context**: P-1.3 questioned whether non-Go dependency support would introduce -bloat and whether a semantic approach was better - -**Decision**: Interface-based GraphBuilder for multi-ecosystem ctx deps - -**Rationale**: The output pipeline (map[string][]string to Mermaid/table/JSON) -was already language-agnostic. Each ecosystem builder is ~40 lines — this is -finishing what was started, not bloat. Static manifest parsing (no external -tools for Node/Python) keeps dependencies minimal. - -**Consequence**: ctx deps now auto-detects Go, Node.js, Python, Rust. --type -flag overrides detection. ctx-architecture skill works across ecosystems without -changes. - ---- - -## [2026-03-02-165038] Billing threshold piggybacks on check-context-size, not heartbeat - -**Status**: Accepted - -**Context**: User wanted a configurable token-count nudge for billing awareness -(Claude Pro 1M context, extra cost after 200k). Heartbeat produces zero stdout -and can't relay to user. - -**Decision**: Billing threshold piggybacks on check-context-size, not heartbeat - -**Rationale**: check-context-size already reads tokens, has VERBATIM relay -working, and runs every prompt. Adding a third independent trigger there is -minimal code and follows established patterns. - -**Consequence**: New .ctxrc field billing_token_warn (default 0 = disabled). -One-shot per session via billing-warned-{sessionID} state file. -Template-overridable via check-context-size/billing.txt. - ---- - -## [2026-03-02-123611] Replace auto-migration with stderr warning for legacy keys - -**Status**: Accepted - -**Context**: Auto-migration code existed for promoting keys from -~/.local/ctx/keys/ and .context/.ctx.key to ~/.ctx/.ctx.key. Userbase is small -and this is alpha — no need to bloat the codebase. - -**Decision**: Replace auto-migration with stderr warning for legacy keys - -**Rationale**: Warn-only is simpler, avoids silent file operations, and puts the -user in control. Migration instructions in docs are sufficient for the small -userbase. - -**Consequence**: MigrateKeyFile() now only warns on stderr. promoteToGlobal() -helper deleted. Tests verify keys are not moved. - ---- - -## [2026-03-02-005213] Consolidate all session state to .context/state/ - -**Status**: Accepted - -**Context**: Session-scoped state (cooldown tombstones, pause markers, daily -throttle markers) was split between /tmp (via secureTempDir()) and -.context/state/ for project-scoped state - -**Decision**: Consolidate all session state to .context/state/ - -**Rationale**: Single location simplifies mental model, eliminates duplicated -secureTempDir() in two packages, removes the cleanup-tmp SessionEnd hook -entirely. .context/state/ is already gitignored and project-scoped. - -**Consequence**: All 18 callers updated. Tests switch from XDG_RUNTIME_DIR -mocking to CTX_DIR + rc.Reset(). Hook lifecycle drops from 4 events to 3 -(SessionEnd removed). - ---- - -## [2026-03-01-222733] PersistentPreRunE init guard with three-level exemption - -**Status**: Accepted - -**Context**: ctx commands handled missing .context/ inconsistently — some -caught errors, some got confusing file-not-found messages, some produced empty -output - -**Decision**: PersistentPreRunE init guard with three-level exemption - -**Rationale**: Single PersistentPreRunE on root command gives one clear error. -Three-level exemption (hidden commands, annotated commands, grouping commands) -covers all edge cases without per-command boilerplate - -**Consequence**: Boundary violation now returns an error instead of os.Exit(1), -making it testable. The subprocess-based boundary test was simplified to a -direct error assertion - ---- - -## [2026-03-01-161457] Global encryption key at ~/.ctx/.ctx.key - -**Status**: Superseded by [2026-03-02] global key simplification - -**Context**: Key stored next to ciphertext (.context/.ctx.key) was a security -antipattern and broke in worktrees. The slug-based per-project key system at -~/.local/ctx/keys/ was over-engineered for the common case (one user, one -machine, one key). - -**Decision**: Single global key at ~/.ctx/.ctx.key. Project-local override via -.ctxrc key_path or .context/.ctx.key. - -**Rationale**: One key per machine covers 99% of users. Per-project slug -filenames and three-tier resolution added complexity without clear benefit. -~/.ctx/ is the natural home (matches ~/.claude/ convention). Tilde expansion in -.ctxrc key_path fixes a standalone bug. - -**Consequence**: Auto-migration promotes legacy keys (project-local, -~/.local/ctx/keys/) to ~/.ctx/.ctx.key. Deleted KeyDir(), ProjectKeySlug(), -ProjectKeyPath(). ResolveKeyPath simplified to two params. 15+ doc files -updated. - ---- - -## [2026-03-01-112544] Heartbeat token telemetry: conditional fields, not always-present - -**Status**: Accepted - -**Context**: Adding tokens, context_window, usage_pct to heartbeat payloads. -First prompt of a session has no JSONL usage data yet. - -**Decision**: Heartbeat token telemetry: conditional fields, not always-present - -**Rationale**: Token fields are only included in the template ref when tokens > -0. This avoids misleading pct=0% on the first heartbeat and keeps payloads clean -for receivers that filter on field presence. - -**Consequence**: Webhook consumers must handle heartbeats both with and without -token fields. The message string also varies (with/without tokens=N pct=N% -suffix). - ---- - -## [2026-03-01-092613] Hook log rotation: size-based with one previous generation, matching eventlog pattern - -**Status**: Accepted - -**Context**: .context/logs/ files grow unbounded (~200KB after one month); -needed a cap - -**Decision**: Hook log rotation: size-based with one previous generation, -matching eventlog pattern - -**Rationale**: Architectural symmetry with eventlog, O(1) size check vs O(n) -line counting, diagnostic logs don't need deep history (webhooks cover serious -setups) - -**Consequence**: Each log file caps at ~2MB (current + .1). config.LogMaxBytes = -1MB, same as EventLogMaxBytes - ---- - -## [2026-03-01-090124] Promote 6 private skills to bundled plugin skills; keep 7 project-local - -**Status**: Accepted - -**Context**: Reviewed all 13 _ctx-* private skills to determine which are -universally useful for any ctx user vs specific to the ctx codebase or personal -infra. - -**Decision**: Promote 6 private skills to bundled plugin skills; keep 7 -project-local - -**Rationale**: Promote if the skill benefits any ctx-powered project without -project-specific hardcoding. Keep private if it references this repo's Go -internals, personal infra, or language-specific tooling. Promote list: _ctx-spec -(generic scaffolding), _ctx-brainstorm (design facilitation), _ctx-verify (claim -verification), _ctx-skill-create (skill authoring), _ctx-link-check (doc link -audit), _ctx-permission-sanitize (Claude Code permissions audit). Keep list: -_ctx-audit (Go/ctx checks), _ctx-qa (Go Makefile), _ctx-backup (SMB infra), -_ctx-release/_ctx-release-notes (ctx release workflow), _ctx-update-docs (ctx -package mapping), _ctx-absorb (borderline, revisit later). - -**Consequence**: Six skills move from .claude/skills/ to -internal/assets/claude/skills/ and become available to all ctx users via ctx -init. Cross-references between skills need updating (e.g., /_ctx-brainstorm -becomes /ctx-brainstorm). The seven remaining private skills stay project-local. - ---- - -## [2026-02-27-230718] Context window detection: JSONL-first fallback order - -**Status**: Accepted - -**Context**: check-context-size defaults to 200k but user runs 1M-context model, -causing false 110% warnings. JSONL contains the model name which maps to actual -window size. - -**Decision**: Context window detection: JSONL-first fallback order - -**Rationale**: effective_window = detect_from_jsonl(model) ?? -ctxrc.context_window ?? 200_000. JSONL is ground truth (reflects actual model in -use); ctxrc is fallback for first-hook-of-session or unknown models; 200k is -safe last resort. Having ctxrc override JSONL would artificially restrict the -check when a user forgets to update their config after switching models. - -**Consequence**: Most users get correct window automatically. ctxrc -context_window becomes a fallback, not an override. Task exists for -implementation. - ---- - -## [2026-02-27-002830] Context injection architecture v2 (consolidated) - -**Status**: Accepted - -**Consolidated from**: 3 decisions (2026-02-26) - -- **Diagram extraction**: ARCHITECTURE.md contained ~600 lines of ASCII/Mermaid - diagrams (~12K tokens). Extracted to 5 architecture-dia-*.md files outside - FileReadOrder. Agents get verbal summaries at session start; diagrams - available on demand. Total injection dropped 53% (20K→9.5K tokens). -- **Auto-injection replaces directives**: Soft instructions have ~75-85% - compliance ceiling because "don't apply judgment" is itself evaluated by - judgment. The v2 context-load-gate injects content directly via - `additionalContext` — agents never choose whether to comply. Injection - strategy: CONSTITUTION, CONVENTIONS, ARCHITECTURE, AGENT_PLAYBOOK verbatim; - DECISIONS, LEARNINGS index-only; TASKS mention-only. Total ~7,700 tokens. See: - `specs/context-load-gate-v2.md`. -- **Imperative framing**: Advisory framing allowed agents to assess relevance - and skip files. Imperative framing with unconditional compliance checkpoint - removes the escape hatch. Verbatim relay is fallback safety net, not primary - instruction. - ---- - -## [2026-02-26-200001] .context/state/ directory for project-scoped runtime state - -**Status**: Accepted - -New gitignored directory under `context_dir` resolution for ephemeral -project-scoped state. Follows `.context/logs/` precedent — added to -`config.GitignoreEntries` and root `.gitignore`. - -First use: injection oversize flag written by context-load-gate when injected -tokens exceed the configurable `injection_token_warn` threshold (`.ctxrc`, -default 15000). The check-context-size VERBATIM hook reads the flag and nudges -the user to run `/ctx-consolidate`. - -See: `specs/injection-oversize-nudge.md`. - ---- - -## [2026-02-26-100001] Hook and notification design (consolidated) - -**Status**: Accepted - -**Consolidated from**: 4 decisions (2026-02-12 to 2026-02-24) - -- Tone down proactive content suggestion claims in docs rather than add more - hooks. Already have 9 UserPromptSubmit hooks; adding another risks fatigue. - Conversational prompting already works. -- Hook commands must use structured JSON output - (hookSpecificOutput.additionalContext) instead of plain text, because Claude - Code treats plain text as ignorable ambient context. -- Drop prompt-coach hook entirely: zero useful tips fired, output channel - invisible to user, orphan temp file accumulation. The prompting guide already - covers best practices. -- De-emphasize /ctx-journal-normalize from the default journal pipeline. The - normalize skill is expensive and nondeterministic; programmatic normalization - handles most cases. Skill remains available for targeted per-file use. - ---- - -## [2026-02-26-100002] ctx init and CLAUDE.md handling (consolidated) - -**Status**: Accepted - -**Consolidated from**: 3 decisions (2026-01-20) - -- `ctx init` handles CLAUDE.md intelligently: creates if missing, backs up and - offers merge if existing, uses marker comment for idempotency. The `--merge` - flag enables non-interactive append. -- `ctx init` always generates `.claude/hooks/` alongside `.context/` with no - flag needed. Other AI tools ignore `.claude/`; Claude Code users get seamless - zero-config experience. -- Core tool stays generic and tool-agnostic, with optional Claude Code - enhancements via `.claude/hooks/`. Other AI tools can be supported similarly - (`ctx hook cursor`, etc.). - ---- - -## [2026-02-26-100004] Task and knowledge management (consolidated) - -**Status**: Accepted - -**Consolidated from**: 4 decisions (2026-01-27 to 2026-02-18) - -- Tasks must include explicit deliverables, not just implementation steps. - Parent tasks define WHAT the user gets; subtasks decompose HOW to build it. - Without explicit deliverables, AI optimizes for checking boxes. -- Use reverse-chronological order (newest first) for DECISIONS.md and - LEARNINGS.md. Ensures most recent items are read first regardless of token - budget. -- Add quick reference index to DECISIONS.md: compact table at top allows - scanning; agents can grep for full timestamp to jump to entry. Auto-updated on - `ctx add decision`. -- Knowledge scaling via archive path for decisions and learnings: follow the - task archive pattern, move old entries to `.context/archive/`, extend `ctx - compact --archive` to cover all three file types. - ---- - -## [2026-02-26-100005] Agent autonomy and separation of concerns (consolidated) +## [2026-04-25-014704] Use t.Setenv for subprocess env in tests, not append(os.Environ(), ...) **Status**: Accepted -**Consolidated from**: 3 decisions (2026-01-21 to 2026-01-28) +**Context**: TestBinaryIntegration spawns subprocesses; the prior helper did append(os.Environ(), CTX_DIR=...) to override the developer-shell value. Wrong abstraction. -- Removed AGENTS.md from project root. Consolidated on CLAUDE.md (auto-loaded) + - .context/AGENT_PLAYBOOK.md as the canonical agent instruction path. Projects - using ctx should not create AGENTS.md. -- ~~Separate orchestrator directive from agent tasks~~ (superseded 2026-03-25: - IMPLEMENTATION_PLAN.md removed — TASKS.md is the single source of truth for - work items, AGENT_PLAYBOOK.md covers agent behavior). -- No custom UI -- IDE is the interface. UI is a liability; IDEs already excel at - file browsing, search, markdown editing, and git integration. Focus CLI - efforts on good markdown output. +**Decision**: Use t.Setenv for subprocess env in tests, not append(os.Environ(), ...) ---- - -## [2026-02-26-100006] Security and permissions (consolidated) - -**Status**: Accepted - -**Consolidated from**: 4 decisions (2026-01-21 to 2026-02-24) - -- Keep CONSTITUTION.md minimal: only truly inviolable rules (security, - correctness, process invariants). Style preferences go in CONVENTIONS.md. - Overly strict constitution gets ignored. -- Centralize constants with semantic prefixes in `internal/config/config.go`: - `Dir*` for directories, `File*` for paths, `Filename*` for names, - `UpdateType*` for entry types. Single source of truth, compile-time typo - checks. -- Hooks use `ctx` from PATH, not hardcoded absolute paths. Standard Unix - practice; portable across machines/users. `ctx init` checks PATH availability - before proceeding. -- Drop absolute-path-to-ctx regex from block-dangerous-commands shell script. - The block-non-path-ctx Go subcommand already covers this with better patterns; - duplicating creates two sources of truth. - ---- +**Rationale**: t.Setenv mutates the live process env, exec.Cmd with nil Env inherits it, and cleanup is automatic at test end. One line replaces the helper. -## [2026-02-27-002831] Webhook and notification design (consolidated) - -**Status**: Accepted - -**Consolidated from**: 3 decisions (2026-02-22 to 2026-02-26) - -- **Session attribution**: All webhook payloads must include session_id. Reading - it from stdin costs nothing and enables multi-agent diagnostics. All run - functions take stdin parameter; tests use createTempStdin. -- **Opt-in events**: Notify events are opt-in, not opt-out. EventAllowed returns - false for nil/empty event lists. The correct default for notifications is - silence. `ctx notify test` bypasses the filter as a special case. -- **Shared encryption key**: Webhook URLs encrypted with the shared .ctx.key - (AES-256-GCM), not a dedicated key. One key, one gitignore entry, one rotation - cycle. Notify is a peer of scratchpad — both store user secrets encrypted at - rest. +**Consequence**: Helper deleted, six call sites simplified, no env-dedup logic to maintain. Pattern reusable for other subprocess tests. --- -## [2026-02-11] Remove .context/sessions/ storage layer and ctx session command +## [2026-04-25-014704] Tighten state.Dir / rc.ContextDir to (string, error) with sentinel errors **Status**: Accepted -**Context**: The session/recall/journal system had three overlapping storage -layers: `~/.claude/projects/` (raw JSONL transcripts, owned by Claude Code), -`.context/sessions/` (JSONL copies + context snapshots), and `.context/journal/` -(enriched markdown from `ctx recall import`). The recall pipeline reads directly -from `~/.claude/projects/`, making `.context/sessions/` a dead-end write sink -that nothing reads from. The auto-save hook copied transcripts to a directory -nobody consumed. The `ctx session save` command created context snapshots that -git already provides through version history. This was ~15 Go source files, a -shell hook, ~20 config constants, and 30+ doc references supporting -infrastructure with no consumers. +**Context**: Old single-return form returned ('', nil) when CTX_DIR was undeclared. Callers that filtered only on err != nil joined empty stateDir with relative names and wrote state files into CWD instead of .context/state/. -**Decision**: Remove `.context/sessions/` entirely. Two stores remain: raw -transcripts (global, tool-owned in `~/.claude/projects/`) and enriched journal -(project-local in `.context/journal/`). - -**Rationale**: Dead-end write sinks waste code surface, maintenance effort, and -user attention. The recall pipeline already proved that reading directly from -`~/.claude/projects/` is sufficient. Context snapshots are redundant with git -history. Removing the middle layer simplifies the architecture from three stores -to two, eliminates an entire CLI command tree (`ctx session`), and removes a -shell hook that fired on every session end. - -**Consequence**: Deleted `internal/cli/session/` (15 files), removed auto-save -hook, removed `--auto-save` from watch, removed pre-compact auto-save from -compact, removed `/ctx-save` skill, updated ~45 documentation files. Four -earlier decisions superseded (SessionEnd hook, Auto-Save Before Compact, Session -Filename Format, Two-Tier Persistence Model). Users who want session history use -`ctx journal source`/`ctx journal import` instead. - ---- +**Decision**: Tighten state.Dir / rc.ContextDir to (string, error) with sentinel errors +**Rationale**: Returning a sentinel ErrDirNotDeclared makes the empty-path case unrepresentable in a 'looks fine' branch. Forces every caller through the same explicit gate. -*Module-specific, already-shipped, and historical decisions: -[decisions-reference.md](decisions-reference.md)* +**Consequence**: All callers needed migration; tests had to declare CTX_DIR explicitly. In return, the filepath.Join('', rel) trap is closed by construction. diff --git a/.context/GLOSSARY.md b/.context/GLOSSARY.md index 1e5d3a09c..c8575b829 100644 --- a/.context/GLOSSARY.md +++ b/.context/GLOSSARY.md @@ -15,48 +15,4 @@ DO NOT UPDATE FOR: ## Domain Terms -| Term | Definition | -|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Context | The set of `.context/*.md` files that give AI agents persistent project knowledge across sessions. Not a generic word; when capitalized, refers specifically to the ctx system. | -| Context packet | The token-budgeted markdown blob assembled by `ctx agent`. Contains prioritized excerpts from context files, sized to fit the agent's context window. | -| Context file | Any `.md` file in `.context/` that ctx manages (CONSTITUTION, TASKS, DECISIONS, etc.). Each has a defined purpose and priority. | -| Constitution | The set of inviolable rules in `CONSTITUTION.md`. Distinct from conventions: constitution rules cannot be bent; violating one means the task is wrong. | -| Convention | A project pattern or standard in `CONVENTIONS.md`. Conventions are strong recommendations that can be bent with justification; contrast with constitution rules. | -| Drift | When context files diverge from the actual codebase state. Types: dead path references, stale task counts, missing required files, potential secrets. Detected by `ctx drift`. | -| Dead path | A backtick-enclosed file path in ARCHITECTURE.md or CONVENTIONS.md that references a file no longer on disk. A drift warning type. | -| Staleness | When context files have not been updated to reflect recent code changes. Specific indicator: >10 completed tasks in TASKS.md signals the file needs compaction. | -| Read order | The priority sequence in which context files are loaded and presented to agents. Defined by `config.FileReadOrder`. Higher priority files are loaded first and survive token budget cuts. | -| Token budget | Maximum estimated token count for assembled context. Default 8000. Configurable via `CTX_TOKEN_BUDGET`, `.ctxrc`, or `--budget` flag. Uses 4-chars-per-token heuristic. | -| Curated tier | The `.context/*.md` files: manually maintained, token-budgeted, loaded by `ctx agent`. Contrast with full-dump tier. | -| Full-dump tier | The `.context/journal/` directory: imported session transcripts. Not auto-loaded; used for archaeology when curated context is insufficient. Browse with `ctx journal source`. | -| Compaction | The process of archiving completed tasks and cleaning up context files. Run via `ctx compact`. Moves completed tasks to archive; preserves phase structure. | -| Entry header | The timestamped heading format used in DECISIONS.md and LEARNINGS.md: `## [YYYY-MM-DD-HHMMSS] Title`. Parsed by `config.RegExEntryHeader`. | -| Index table | The auto-generated markdown table at the top of DECISIONS.md and LEARNINGS.md (between `` and `` markers). Updated by `ctx add` and `ctx decision/learnings reindex`. | -| Readback | A structured summary where the agent plays back what it knows (last session, active tasks, recent decisions) so the user can confirm correct context was loaded. From aviation: pilots repeat ATC instructions back to confirm they heard correctly. In ctx, triggered by "do you remember?" or `/ctx-remember`. | -| Ralph Loop | An iterative autonomous AI development workflow that uses `.context/loop.md` as a directive. Separate from ctx but complementary: Ralph drives the loop, ctx provides the memory. | -| Skill | A Claude Code Agent Skill: a markdown file in `.claude/skills/` that teaches the agent a specialized workflow. Invoked via `/skill-name`. | -| Live skill | The project-local copy of a skill in `.claude/skills/`. Can be edited by the user or agent. Contrast with template skill. | -| Template skill | The embedded copy of a skill in `internal/assets/claude/skills/`. Deployed on `ctx init`. Source of truth for the default version. | -| Hook | A Claude Code lifecycle script in `.claude/hooks/`. Fires on events: PreToolUse, UserPromptSubmit, SessionEnd. Generated by `ctx init`. | -| Consolidation | A code-quality sweep checking for convention drift: magic strings, predicate naming, file size, dead exports, etc. Run via `/consolidate` skill. Distinct from compaction (which is context-level). | -| 3:1 ratio | Heuristic for consolidation timing: consolidate after every 3 feature/bugfix sessions. Prevents convention drift from compounding. | -| E/A/R classification | Expert/Activation/Redundant taxonomy for evaluating skill quality. Good skill = >70% Expert knowledge, <10% Redundant with what the model already knows. | -| DescKey | A string constant in `config/embed/text/` that maps to a user-facing message in embedded YAML. All output text is looked up by DescKey, enabling future i18n. Adding a new message requires a DescKey constant, a YAML entry, and a write/err function. | -| Governance warning | An advisory message the MCP server appends to tool responses based on session state (e.g., "context not loaded", "drift not checked", "time to persist"). Never blocks tool execution; only nudges. | -| Entry block | A parsed unit from DECISIONS.md or LEARNINGS.md: the timestamped header plus all content until the next header. Used by index generation and compaction. Parsed by `internal/index.ParseEntryBlocks()`. | -| Journal pipeline | The 5-stage processing chain for imported session transcripts: exported -> enriched (YAML frontmatter) -> normalized (soft-wrap, clean JSON) -> fences_verified -> locked. Tracked in `.context/journal/.state.json`. | -| Slug format | The encoding used to derive a filesystem path from a project's absolute path. Used by `internal/memory` to locate Claude Code's MEMORY.md. Convention: replace `/` with `-`, prefix with `-`. | -| Boundary validation | The security check ensuring all resolved file paths stay under the project root. Prevents path traversal attacks. Enforced by `internal/validate` and `internal/io/Safe*` functions. | -| cmd/root + core/ | The package taxonomy for CLI commands (Decision 2026-03-06). `cmd/root/cmd.go` defines the Cobra command; `cmd/root/run.go` implements the handler; `core/` holds reusable logic. Grouping commands use `parent.Cmd()` instead. | -| Safe* functions | The guarded I/O wrappers in `internal/io` (SafeReadFile, SafeWriteFile, etc.) that apply path validation, symlink rejection, and deny-list filtering. Direct `os.ReadFile`/`os.WriteFile` calls are banned by audit. | -| Throttle marker | A date-stamped file in `.context/state/` that prevents hook checks from running more than once per day. The marker's mtime is compared to today's date; if same day, the check is skipped. | - ## Abbreviations - -| Abbreviation | Expansion | -|--------------|---------------------------------------------------------------------------------------------------------| -| ctx | Context (the CLI tool and the system it manages) | -| rc | Runtime configuration (from Unix `.xxxrc` convention); refers to `.ctxrc` and the `internal/rc` package | -| assets | Embedded assets; the `internal/assets` package containing go:embed templates and plugin files | -| CWD | Current working directory; used in session matching to correlate sessions with projects | -| JSONL | JSON Lines; the format Claude Code uses for session transcripts (one JSON object per line) | diff --git a/.context/HANDOVER-2026-04-22.md b/.context/HANDOVER-2026-04-22.md new file mode 100644 index 000000000..09040b555 --- /dev/null +++ b/.context/HANDOVER-2026-04-22.md @@ -0,0 +1,213 @@ +# Session Handover — 2026-04-22 + +This file captures state from session `d6889b7c` (branch +`feat/explicit-context-dir`) so the next session can pick up +without re-investigating. Delete this file after it's been read. + +## Branch State + +- Branch: `feat/explicit-context-dir` (0 commits ahead of main — + everything is in the working tree). +- **328 files changed**, 4402 insertions, 4652 deletions. +- `go build ./...` clean, `make lint` 0 issues, `go test ./...` + exit 0 as of end of session. +- No commits were made. **First order of business next session: + decide the commit strategy.** Logical splits that make sense: + + 1. Agent-docs rewrite (AGENT_PLAYBOOK, CLAUDE.md, gate, internal + assets — watermelon-rind removal + triage error policy). + 2. Explicit-context-dir hardening in `internal/**` (resolver + plumbing, ContextChild removal, KeyPath signature change, + ctxrcPath rename, DirLine/AppendDir propagation). + 3. FullPreamble 5-value return + 16 hook/callsite migration. + 4. RequireContextDir command-entry sweep (25 RunE gates). + 5. `ctx backup` full deprecation (per `specs/deprecate-ctx-backup.md`). + 6. Pre-existing build-fix: missing `DescKeyWriteSnapshotUpdated` / + `Saved` constants added (un-breaks `internal/write/restore`). + + Splitting (2) from (3) may be impractical — the callsite diff + is tangled. One "explicit-context-dir hardening" commit + one + "ctx backup removal" commit is the minimum defensible split. + One giant commit also works and may be honest given the churn. + +## What Changed, By Theme + +### 1. Agent docs rewrite + +Rewrote `ctx`'s agent-facing prose to drop prohibitions about +behaviors agents wouldn't otherwise invent (watermelon-rind +pattern) and to split the old blanket "relay ctx errors verbatim +and stop" into an **invocation-error vs everything-else triage**. + +Touched: `CLAUDE.md`, `.context/AGENT_PLAYBOOK.md`, +`.context/AGENT_PLAYBOOK_GATE.md`, `internal/assets/claude/CLAUDE.md`, +`internal/assets/context/AGENT_PLAYBOOK.md`, same gate copy. + +### 2. Explicit-context-dir hardening + +Sharpened the "explicit declaration or nothing" model: + +- **Deleted `resolve.ContextChild`**. Inlined all 10 callers with + explicit `rc.ContextDir()` + `errors.Is(err, ErrDirNotDeclared)` + + `filepath.Join` (no string concat). +- **`rc.KeyPath()`** now returns `(string, error)`. Propagates + resolver failures instead of silently handing `""` to + `crypto.ResolveKeyPath` (which used to either filepath-join a + CWD-relative path or fall through to a global key — both bugs + this branch was built to kill). +- **`rc.ctxRcPath`** → `(string, error)` (plus renamed from + `ctxRcPath` to `ctxrcPath` to clear the stutter audit). +- **`resolve.DirLine` / `resolve.AppendDir`** now return + `(string, error)`. Noisy-TUI warn log stays (intentional, + documented) — return channel added so non-rendering callers + can propagate. +- **`notify.LoadWebhook`**, **`message.OverridePath`**, + **`message.HasOverride`**, **`hub.LoadBodies`**, + **`merge.LoadKey`**, **`store.KeyPath`**, + **`context/validate.Exists`**, **`sync.CheckNewDirectories`** + all propagate `ErrDirNotDeclared` instead of swallowing into + `(zero, nil)` pairs. +- **`knowledge.CheckHealth(sessionID, ctxDir)`**, + **`health.ReadMapTracking(ctxDir)`**, + **`hubsync.Connected(ctxDir)`**, + **`oversizeContent(ctxDir)`** now take `ctxDir` as a parameter + from the calling hook (dead internal resolver call eliminated). +- **`coreArchive.BackupProject` CWD-based resolution**: would + have been a latent bug but the whole archive package went away + with `ctx backup`. + +### 3. FullPreamble extended + +`internal/cli/system/core/check/full_preamble.go` now returns +`(input, sessionID, ctxDir, stateDir, ok)`. All 16 callers +updated. 4 hooks (`check_persistence`, `check_memory_drift`, +`check_ceremony`, `check_journal`) had their redundant +`errors.Is(ctxErr, ErrDirNotDeclared)` blocks deleted and use the +preamble's `ctxDir` directly. The 3 non-FullPreamble hooks +(`context_load_gate`, `check_context_size`, `heartbeat`) had +their `ErrDirNotDeclared` branches replaced with a defensive +"unreachable but log loudly" fallback after the `state.Initialized` +gate. + +**Pending follow-up** (already in TASKS.md): refactor +`FullPreamble` to return a `Preamble` struct instead of a +5-value tuple. Ugly positional return is tolerated for now. + +### 4. RequireContextDir command-entry sweep + +Added `rc.RequireContextDir()` gates at the top of 25 command +RunE entry points so users get the rich multi-line error instead +of a terse sentinel propagated through library layers. +Commands touched: +`add`, `agent`, `backup` (conditional on scope — then the whole +command died; see theme 5), `change`, `compact`, `drift`, +`journal schema check`, `load`, `memory unpublish` (already +gated via callee — only `SilenceUsage` added), `message {edit, +list, reset, show}`, `notify {setup, test}`, `pad {add, edit, +export, merge, mv, normalize, rm, root, show, tag}`, `sync`, +`watch`. + +Decision heuristic for which commands got gated: anything that +writes/reads files under `.context/` or calls a library that +does. Exempt: `init`, `activate`, `deactivate`, `bootstrap`, +`version`, `help` — they handle not-declared themselves. + +### 5. `ctx backup` full deprecation + +Executed `specs/deprecate-ctx-backup.md` end-to-end: + +- **Deleted**: `internal/cli/backup/`, + `internal/cli/system/cmd/check_backup_age/`, + `internal/cli/system/core/archive/`, `internal/write/backup/`, + `internal/exec/gio/`, + `internal/assets/hooks/messages/check-backup-age/`, + `.claude/skills/_ctx-backup/`, `docs/cli/backup.md`. +- **Trimmed**: `internal/config/archive/` to task-archive + constants only; `internal/err/backup/` to four generic + constructors (`Create`, `CreateArchiveDir`, `WriteArchive`, + `ContextDirNotFound`) that `init`, task archival, and + bootstrap still use — package name kept as a historical + label rather than churning the non-backup callers. +- **Surgical edits**: `bootstrap/group.go`, `cli/system/system.go`, + `config/env/env.go`, `config/hook/hook.go`, `entity/system.go`, + `cli/system/doc.go`, and every `internal/assets/commands/**.yaml`. +- **Tests updated**: `registry_test.go` count `32 → 31`; + `watch_test.go` expects `"no context directory"` instead of + the old `"ctx init"` suggestion. +- **All shipped docs updated**: `docs/cli/index.md`, + `docs/cli/system.md`, `docs/home/common-workflows.md`, + `docs/home/contributing.md`, `docs/recipes/customizing-hook-messages.md`, + `docs/recipes/hook-sequence-diagrams.md`, + `docs/recipes/hook-output-patterns.md`, + `internal/cli/system/README.md`, `zensical.toml`, + `CONTRIBUTING-SKILLS.md`. +- **New**: `docs/operations/runbooks/backup-strategy.md` — the + migration path (rsync, cron, Time Machine, Borg/restic). + +Historical artifacts intentionally left alone (per explicit +call): `.context/journal-site/**`, `.context/DECISIONS.md`, +`.context/decisions-reference.md`, `specs/released/**`, +`specs/deprecate-ctx-backup.md` itself, other spec drafts in +`specs/**`, `ideas/**`. Editing those would rewrite history. + +Reminder [12] — "had to stop sync-to-asgard because broadcom +mirror repo is huge; solve it to resume backups" — now has a +different resolution path: there is no `ctx backup` anymore. +The runbook suggests external tools (rsync, Borg, etc.) and +hub for knowledge. Worth reviewing whether the reminder should +be dismissed or reframed. + +### 6. Pre-existing build break (fixed while owning the branch) + +`internal/write/restore/permission.go` referenced +`DescKeyWriteSnapshotUpdated` and `DescKeyWriteSnapshotSaved` +that never existed in any `internal/config/embed/text/*.go` +file. Caused a hard build failure mid-session. Added both to +`internal/config/embed/text/restore.go`. Corresponding YAML +entries are **not** added (write.yaml has no `write.snapshot-*` +keys yet) — the restore flow may print empty strings until +those entries are authored. **Worth verifying** next session: +either add the YAML entries or confirm this feature was +stubbed-but-incomplete before my session. + +## Persisted Memories (global, not project-local) + +Three feedback memories saved in +`~/.claude/projects/-Users-volkan-Desktop-WORKSPACE-ctx/memory/`: + +- `feedback_no_watermelon_rinds.md` — don't warn agents off + behaviors they wouldn't invent. +- `feedback_no_preserve_old_behavior.md` — "preserves existing + behavior" is not a justification for leaving silent-skip code. +- `feedback_no_phase_deferral.md` — phasing sweeps into "Round 1 + now, Round 2 later" is deferral; do the sweep in one pass. + +## What Was NOT Touched + +- **Reminders [4] through [11]** are standing todos from earlier + sessions, out of scope for this one. [12] is partially + resolved by the backup deprecation (see theme 5). +- **Ideas and spec drafts** that reference `ctx backup` — left + intentionally as historical artifacts. +- **Journal import / enrichment backlog** (129 unimported, + 435 unenriched at session start) — not addressed. +- **Knowledge-file growth warnings** (DECISIONS.md 79 entries, + LEARNINGS.md 103, CONVENTIONS.md 272 lines) — consolidation + not attempted this session. + +## Quick Gotchas For Next Session + +- `go build ./...` still works, but the diff is enormous. + `go test -count=1 ./...` to rule out cache issues before + claiming green. +- The `state/` directory at the repo root was deleted manually + earlier in this session and did NOT reappear under normal + use — the earlier "state leaks outside `.context/state/`" + concern looks resolved. If `state/` shows up in `git status` + during next session, investigate before deleting. +- When committing, **do not** let a pre-commit hook drop the + new memories or this handover. +- `/ctx-wrap-up` was not run this session despite the + heuristic. If you run it, it will suggest capturing much of + this file's content into LEARNINGS.md / DECISIONS.md — + de-dupe carefully. diff --git a/.context/LEARNINGS.md b/.context/LEARNINGS.md index d8df325bb..e967c8248 100644 --- a/.context/LEARNINGS.md +++ b/.context/LEARNINGS.md @@ -17,1568 +17,36 @@ DO NOT UPDATE FOR: | Date | Learning | |----|--------| -| 2026-04-13 | GPG signing from non-TTY contexts requires pinentry-mac (or equivalent) | -| 2026-04-13 | Load average measures a queue, not CPU utilization | -| 2026-04-13 | rc.ContextDir() is the single source of truth — fix the resolver, not callers | -| 2026-04-09 | Pad index shifting is a real UX bug in batch operations | -| 2026-04-08 | fmt.Fprintf to strings.Builder silently discards errors | -| 2026-04-08 | AST audit tests must cover unexported functions too | -| 2026-04-06 | Agents ignore system-reminder content without explicit relay instructions | -| 2026-04-04 | Format-verb strings are localizable text, not exempt from magic string checks | -| 2026-04-04 | Agents add allowlist entries to make tests pass — guard every exemption | -| 2026-04-03 | Subagent scope creep and cleanup (consolidated) | -| 2026-04-03 | Bulk rename and replace_all hazards (consolidated) | -| 2026-04-03 | Import cycles and package splits (consolidated) | -| 2026-04-03 | Lint suppression and gosec patterns (consolidated) | -| 2026-04-03 | Skill lifecycle and promotion (consolidated) | -| 2026-04-03 | Cross-cutting change ripple (consolidated) | -| 2026-04-03 | Dead code detection (consolidated) | -| 2026-04-03 | desc.Text() is the single highest-connectivity symbol in the codebase | -| 2026-04-01 | Raw I/O migration unlocks downstream checks for free | -| 2026-04-01 | go/packages respects build tags — darwin-only violations invisible on Linux | -| 2026-04-01 | Copilot CLI skills need a sync mechanism to prevent drift from ctx skills | -| 2026-04-01 | Contributor PRs based on older code reintroduce removed features | -| 2026-03-31 | Magic string cleanup compounds: each pass reveals the next layer | -| 2026-03-31 | Force-loaded behavioral prose gets ignored — action-gating hooks don't | -| 2026-03-31 | Legacy key directory cleanup was specified but not automated | -| 2026-03-31 | Convention audits must check cmd/ purity, not just types and docstrings | -| 2026-03-31 | JSON Schema default fields cause linter errors with some validators | -| 2026-03-30 | Architecture diagrams drift silently during feature additions | -| 2026-03-30 | Python-generated doc.go files need gofmt — formatter strips bare // padding lines | -| 2026-03-30 | lint-docstrings.sh greedy sed hid all return-type violations | -| 2026-03-25 | Machine-generated CLAUDE.md content consumes per-turn budget without proportional value | -| 2026-03-25 | Template improvements don't propagate to existing projects | -| 2026-03-24 | lint-drift false positives from conflating constant namespaces | -| 2026-03-24 | git describe --tags follows ancestry, not global tag list | -| 2026-03-23 | Typography detection script needs exclusion lists for intentional uses | -| 2026-03-23 | Splitting core/ into subpackages reveals hidden structure | -| 2026-03-23 | Higher-order callbacks in param structs are a code smell | -| 2026-03-20 | Commit messages containing script paths trigger PreToolUse hooks | -| 2026-03-18 | Lazy sync.Once per-accessor is a code smell for static embedded data | -| 2026-03-17 | Write package output census: 69 trivial/simple, 38 consolidation candidates, 18 complex | -| 2026-03-16 | Docstring tasks require reading CONVENTIONS.md Documentation section first | -| 2026-03-16 | Convention enforcement needs mechanical verification, not behavioral repetition | -| 2026-03-16 | One-liner method wrappers hide dependencies without adding value | -| 2026-03-16 | Agents reliably introduce gofmt issues during bulk renames | -| 2026-03-15 | Contributor PRs need post-merge follow-up commits for convention alignment | -| 2026-03-15 | Grep for callers must cover entire working tree before deleting functions | -| 2026-03-14 | Stderr error messages are user-facing text that belongs in assets | -| 2026-03-14 | Hardcoded _alt suffixes create implicit language favoritism | -| 2026-03-13 | sync-why mechanism existed but was not wired to build | -| 2026-03-12 | Project-root files vs context files are distinct categories | -| 2026-03-12 | Constants belong in their domain package not in god objects | -| 2026-03-07 | Always search for existing constants before adding new ones | -| 2026-03-07 | SafeReadFile requires split base+filename paths | -| 2026-03-06 | Stale directory inodes cause invisible files over SSH | -| 2026-03-06 | Stats sort uses string comparison on RFC3339 timestamps with mixed timezones | -| 2026-03-06 | Claude Code supports PreCompact and SessionStart hooks that ctx does not use | -| 2026-03-06 | Package-local err.go files invite broken windows from future agents | -| 2026-03-05 | State directory accumulates silently without auto-prune | -| 2026-03-05 | Global tombstones suppress hooks across all sessions | -| 2026-03-05 | Claude Code has two separate memory systems behind feature flags | -| 2026-03-05 | Blog post editorial feedback is higher-leverage than drafting | -| 2026-03-04 | CONSTITUTION hook compliance is non-negotiable — don't work around it | -| 2026-03-02 | Hook message registry test enforces exhaustive coverage of embedded templates | -| 2026-03-02 | Existing Projects is ambiguous framing for migration notes | -| 2026-03-02 | Claude Code JSONL model ID does not distinguish 200k from 1M context | -| 2026-03-01 | Gosec G306 flags test file WriteFile with 0644 permissions | -| 2026-03-01 | Converting PersistentPreRun to PersistentPreRunE changes exit behavior | -| 2026-03-01 | Test HOME isolation is required for user-level path functions | -| 2026-03-01 | Task descriptions can be stale in reverse — implementation done but task not marked complete | -| 2026-03-01 | Model-to-window mapping requires ordered prefix matching | -| 2026-03-01 | TASKS.md template checkbox syntax inside HTML comments is parsed by RegExTaskMultiline | -| 2026-03-01 | Hook logs had no rotation; event log already did | -| 2026-02-28 | ctx pad import, ctx pad export, and ctx system resources make three hack scripts redundant | -| 2026-02-28 | Getting-started docs assumed Claude Code as the only agent | -| 2026-02-28 | Plugin reload script must rebuild cache, not just delete it | -| 2026-02-27 | site/ directory must be committed with docs changes | -| 2026-02-27 | Doctor token_budget vs context_window confusion | -| 2026-02-27 | Drift detector false positives on illustrative code examples | -| 2026-02-27 | Context injection and compliance strategy (consolidated) | -| 2026-02-26 | Webhook silence after ctxrc profile swap is the most common notify debugging red herring | -| 2026-02-26 | Documentation drift and auditing (consolidated) | -| 2026-02-26 | Agent context loading and task routing (consolidated) | -| 2026-02-26 | Go testing patterns (consolidated) | -| 2026-02-26 | PATH and binary handling (consolidated) | -| 2026-02-26 | Task management and exit criteria (consolidated) | -| 2026-02-26 | Agent behavioral patterns (consolidated) | -| 2026-02-26 | Hook compliance and output routing (consolidated) | -| 2026-02-26 | ctx add and decision recording (consolidated) | -| 2026-02-24 | CLI tools don't benefit from in-memory caching of context files | -| 2026-02-22 | Hook behavior and patterns (consolidated) | -| 2026-02-22 | UserPromptSubmit hook output channels (consolidated) | -| 2026-02-22 | Linting and static analysis (consolidated) | -| 2026-02-22 | Permission and settings drift (consolidated) | -| 2026-02-22 | Gitignore and filesystem hygiene (consolidated) | -| 2026-01-28 | IDE is already the UI | +| 2026-04-25 | Confident code comments can pull an LLM away from first-principles knowledge | +| 2026-04-25 | filepath.Join('', rel) returns rel as CWD-relative, not error | +| 2026-04-25 | Parallel go test ./... packages can race on ~/.claude/settings.json | ---- - -## [2026-04-13-153618] GPG signing from non-TTY contexts requires pinentry-mac (or equivalent) - -**Context**: git commit failed from Claude Code's shell with 'gpg: signing -failed: No such file or directory' — the default pinentry-curses cannot open a -TTY in agent-invoked shells. Manual commits from a real terminal worked fine. - -**Lesson**: GPG's default curses pinentry requires an interactive TTY. In -non-TTY contexts (Claude Code, CI, scripts, cron), signing fails silently-ish. -The fix is to configure a GUI pinentry that uses the OS keychain: brew install -pinentry-mac; echo 'pinentry-program $(brew --prefix)/bin/pinentry-mac' >> -~/.gnupg/gpg-agent.conf; gpgconf --kill gpg-agent. Once the passphrase is saved -in Keychain, signing works from any context. - -**Application**: If agents or CI need to sign commits, configure pinentry-mac -(macOS) or pinentry-gtk/pinentry-qt (Linux) with the OS keychain, not -pinentry-curses. This is a one-time setup per machine. - ---- - -## [2026-04-13-153618] Load average measures a queue, not CPU utilization - -**Context**: The 'Load Xx CPU count' resource alert fired at 1.74x while htop -showed per-core utilization well under 50% and idle cores. Load average counts -runnable + uninterruptible-sleep processes, smoothed over 1/5/15 minutes. - -**Lesson**: Load average and CPU% measure different things. High load with low -CPU% typically means many short-lived processes or I/O-bound work (e.g., go test -spawning hundreds of parallel test binaries). The 1-minute average is too -reactive for dev machines that periodically run test suites — 5-minute smooths -transient spikes without hiding sustained pressure. - -**Application**: For alerting thresholds based on system load, prefer 5-minute -over 1-minute averages. 1-minute is useful for interactive debugging; 5-minute -is better for automated alerts that should not fire on normal build/test -activity. - ---- - -## [2026-04-13-153618] rc.ContextDir() is the single source of truth — fix the resolver, not callers - -**Context**: When ctx init failed with a boundary error, my first instinct was -to have init bypass rc.ContextDir() and use filepath.Join(cwd, dir.Context) -directly. Volkan shut that down: rc.ContextDir() encodes invariants (team -shares, symlinks, network mounts, .ctxrc overrides) that individual commands -cannot reason about. - -**Lesson**: Resolution chains with multiple fallbacks are contracts. If one -command bypasses the chain, it silently diverges from every other command's -notion of 'the context directory.' When a resolver produces a wrong answer for a -specific case, fix the resolver — don't let callers opt out. - -**Application**: Any time you see rc.ContextDir(), rc.RC(), or similar central -resolvers producing a bad result, the fix belongs in the resolver itself (or in -its input data like .ctxrc). Caller-side bypasses create drift. - ---- - -## [2026-04-09-001323] Pad index shifting is a real UX bug in batch operations - -**Context**: ctx pad rm 10; rm 11; rm 12 deleted wrong entries because indices -shifted after each deletion - -**Lesson**: Any ID-based system where users chain operations needs stable IDs. -Look-then-act is safe for single ops; look-then-batch-act breaks with shifting -indices - -**Application**: Both pad and remind now use stable IDs with batch delete and -range support. Apply same pattern to any future numbered-list subsystem - ---- - -## [2026-04-08-074612] fmt.Fprintf to strings.Builder silently discards errors - -**Context**: golangci-lint errcheck allows fmt.Fprintf to strings.Builder -because Write never fails, but project convention says zero silent discard - -**Lesson**: Linter coverage gaps exist where language guarantees mask -conventions. AST tests fill the gap - -**Application**: Created TestNoUncheckedFmtWrite to enforce fmt.Fprintf error -handling. Use if _, err := fmt.Fprintf(...) with log.Warn on the error path - ---- - -## [2026-04-08-074604] AST audit tests must cover unexported functions too - -**Context**: TestDocCommentStructure only checked exported functions, so -agent-written helpers in format.go had no godoc enforcement - -**Lesson**: Convention enforcement tests must default to scanning all documented -functions. Use explicit opt-outs (test files) not opt-ins (exported only) - -**Application**: When adding AST audit tests, scan all functions. We fixed -TestDocCommentStructure to drop the IsExported gate and fixed 84 violations - ---- - -## [2026-04-06-204226] Agents ignore system-reminder content without explicit relay instructions - -**Context**: Provenance line (Session: abc | Branch: main @ hash) was emitted by -hook but agents in other projects silently ignored it. The line appeared in the -system-reminder but the agent treated it as internal metadata. - -**Lesson**: Claude Code surfaces hook stdout as system-reminder tags. Agents -only relay content that has explicit display instructions. IMPORTANT: means pay -attention internally. Display this line verbatim means show to user. Without the -instruction, even correct output is invisible to the user. - -**Application**: Any hook output intended for the user must include an explicit -relay instruction like Display this line verbatim at the start of your response. -Do not rely on IMPORTANT: alone — it signals internal priority, not -user-facing output. - ---- - -## [2026-04-04-025813] Format-verb strings are localizable text, not exempt from magic string checks - -**Context**: Strings like '%d entries checked' were passing TestNoMagicStrings -because the format-verb exemption was too broad - -**Lesson**: Any string containing English words alongside format directives is -user-facing text that belongs in YAML assets - -**Application**: Removed format-verb, URL-scheme, HTML-entity, and err/ -exemptions from TestNoMagicStrings - ---- - -## [2026-04-04-025805] Agents add allowlist entries to make tests pass — guard every exemption - -**Context**: Found that every exemption map/allowlist in audit tests is a -tempting shortcut for agents - -**Lesson**: Added DO NOT widen guard comments to all 10 exemption data -structures across 7 test files - -**Application**: Every new audit test with an exemption must include the guard -comment. Review PRs for drive-by allowlist additions. - ---- - -## [2026-04-03-180000] Subagent scope creep and cleanup (consolidated) - -**Consolidated from**: 4 entries (2026-03-06 to 2026-03-23) - -- Subagents reliably rename functions, restructure files, change import aliases, - and modify function signatures beyond their stated scope — even narrowly - scoped tasks like fixing em-dashes in comments -- Subagents create new files during refactors but consistently fail to delete - the originals — always audit for stale files, duplicate definitions, and - orphaned imports afterward -- After any agent-driven refactor: run `git diff --stat` and `git diff - --name-only HEAD`, revert anything outside the intended scope, and check for - stale package declarations before building - ---- - -## [2026-04-03-180000] Bulk rename and replace_all hazards (consolidated) - -**Consolidated from**: 3 entries (2026-03-15 to 2026-03-20) - -- `replace_all` on short tokens (e.g. `core.`, function names) matches inside - longer identifiers and function definitions — `remindcore.` becomes - `remindtidy.`, `func HumanAgo` becomes `func format.DurationAgo` (invalid Go) -- `sed` insert-before-first-match does not understand Go import aliases — the - alias attaches to whatever line sed inserts, not the original target -- For function renames: delete the old definition separately rather than using - replace_all. For bulk import additions: check for aliased imports first and - handle them separately, or use goimports - ---- - -## [2026-04-03-180000] Import cycles and package splits (consolidated) - -**Consolidated from**: 5 entries (2026-03-06 to 2026-03-22) - -- Types in god-object files (e.g. hook/types.go with 15+ types from 8 domains) - create circular dependencies — move types to their owning domain package -- Tests in parent package X cannot import X/sub packages that import X back — - move tests to the sub-package they exercise -- Variable shadowing causes cascading failures after splits: `dir`, `file`, - `entry` are common Go variable names that collide with new sub-package names - — run `go test ./...` before committing splits -- When moving constants between packages, change imports and all references in a - single atomic write so the linter never sees an inconsistent state -- Import cycle rule: the package providing implementation logic must own the - shared types; the facade package aliases them (e.g. `entry.Params` aliases - `add/core.EntryParams`) - ---- - -## [2026-04-03-180000] Lint suppression and gosec patterns (consolidated) - -**Consolidated from**: 4 entries (2026-03-04 to 2026-03-19) - -- Rename constants to avoid gosec G101 false positives (Tokens->Usage, - Passed->OK) instead of adding nolint/nosec/path exclusions — exclusions - break on file reorganization -- `nolint:goconst` for trivial values normalizes magic strings — use config - constants instead of suppressing the linter -- `nolint:errcheck` in tests teaches agents to spread the pattern to production - code — use `t.Fatal(err)` for setup, `defer func() { _ = f.Close() }()` for - cleanup -- golangci-lint v2 ignores inline nolint directives for some linters — use - config-level `exclusions.rules` for gosec patterns, fix the code instead of - suppressing errcheck - ---- - -## [2026-04-03-180000] Skill lifecycle and promotion (consolidated) - -**Consolidated from**: 4 entries (2026-03-01 to 2026-03-14) - -- Internal skill renames and promotions require synchronized updates across 6+ - layers: SKILL.md frontmatter, internal cross-references, external docs, - embed_test.go expected list, recipe/reference docs, and plugin cache rebuild + - session restart -- Skill behavior changes ripple through hook messages, fallback strings in Go - code, doc descriptions, and Makefile hints — grep for the skill name across - the entire repo -- Skills without a trigger mechanism (no user invocation, no hook loading) are - dead code — audit skills for reachability -- After promoting skills: grep -r for the old name across the whole tree, run - plugin-reload.sh, restart session to verify autocomplete, and clean stale - Skill() entries from settings.local.json - ---- - -## [2026-04-03-180000] Cross-cutting change ripple (consolidated) - -**Consolidated from**: 4 entries (2026-02-19 to 2026-03-01) - -- Path changes (e.g. key file location) ripple across 15+ doc files and 2 skills - — grep broadly (not just code) and budget for 15+ file touches -- Removing embedded asset directories requires synchronized cleanup across 5+ - layers: embed directive, accessor functions, callers, tests, config constants, - build targets, documentation — work outward from the embed -- Absorbing shell scripts into Go commands creates a discoverability gap — - update contributing.md, common-workflows.md, and CLI index as part of the - absorption checklist -- A feature without docs is invisible to users: always check feature page, - cli-reference.md, relevant recipes, and zensical.toml nav after implementing a - new CLI subcommand - ---- - -## [2026-04-03-180000] Dead code detection (consolidated) - -**Consolidated from**: 3 entries (2026-03-15 to 2026-03-30) - -- Dead packages can build and test green while being completely unreachable — - detection requires checking bootstrap registration, not just build success - (e.g. internal/cli/recall/ existed with tests but was never wired into the - command tree) -- Files created by `ctx init` that no agent, hook, or skill ever reads are dead - on arrival — verify there is at least one consumer before adding to init - scaffolding -- When touching legacy compat code, first ask whether the legacy path has real - users — if not, delete it entirely rather than improving it (MigrateKeyFile - had 5 callers and test coverage but zero users) - ---- - -## [2026-04-03-133244] desc.Text() is the single highest-connectivity symbol in the codebase - -**Context**: GitNexus enrichment during architecture analysis revealed -desc.Text() (internal/assets/read/desc/desc.go:75) has 30+ direct callers -spanning every architectural layer (MCP handler, format, index, tidy, trace, -memory, sysinfo, io) and participates in 53 execution flows. - -**Lesson**: TestDescKeyYAMLLinkage is the most critical guard in the codebase -— it protects the symbol with the widest blast radius. If YAML text loading -breaks, the entire CLI and MCP server output blank strings silently (no crash, -no warning). - -**Application**: Treat desc.Text() as a frozen API — add new functions rather -than modifying the existing signature. Any change to config/embed/text or -assets/read/desc should be followed by running the linkage audit. Monitor this -symbol during major refactors. - ---- - -## [2026-04-01-233250] Raw I/O migration unlocks downstream checks for free - -**Context**: TestNoRawPermissions had zero violations because the raw I/O -migration moved all octal literals into internal/io/ which already used -config/fs constants - -**Lesson**: Chokepoint migrations have cascading benefits — centralizing one -concern (file I/O) automatically resolves other drift (raw permissions) - -**Application**: Prioritize chokepoint migrations (io, exec, write, err) before -smaller checks that depend on them - ---- - -## [2026-04-01-233248] go/packages respects build tags — darwin-only violations invisible on Linux - -**Context**: TestNoExecOutsideExecPkg could not detect violations in _darwin.go -files when running on Linux - -**Lesson**: AST checks using go/packages only see files matching the current -GOOS. Cross-platform violations need either multi-GOOS CI or a go/parser -fallback - -**Application**: When writing audit checks for code with build tags, fix the -violations regardless (code correctness) but note that test coverage is -platform-dependent - ---- - -## [2026-04-01-074419] Copilot CLI skills need a sync mechanism to prevent drift from ctx skills - -**Context**: 5 Copilot CLI skills were condensed versions of ctx skills, -independently maintained with no drift detection - -**Lesson**: Any time the same content exists in two locations without a sync -mechanism, it will drift silently - -**Application**: make sync-copilot-skills added to build deps, make -check-copilot-skills added to audit target - ---- - -## [2026-04-01-074418] Contributor PRs based on older code reintroduce removed features - -**Context**: PR #45 brought back prompt templates, PROMPT.md, and -IMPLEMENTATION_PLAN.md that were explicitly removed in March - -**Lesson**: When resolving contributor merge conflicts, check decisions history -for intentional removals — do not assume the PR content is additive - -**Application**: Cross-reference DECISIONS.md before accepting PR content that -adds files or features - ---- - -## [2026-03-31-224247] Magic string cleanup compounds: each pass reveals the next layer - -**Context**: What started as fix 4 fmt.Fprintf(os.Stderr) calls expanded to -over-tokenized format strings, magic hex perms, unstandardized TOML parsing -tokens, missing docstrings on new constants — each fix exposed adjacent -violations - -**Lesson**: Mechanical cleanup is fractal. The first sweep finds the obvious -violations, but fixing them puts adjacent code under scrutiny. Budget for 2-3x -the initial estimate - -**Application**: When scoping cleanup tasks, do not commit to done in one pass. -Commit after each layer and let the user decide when to stop - ---- - -## [2026-03-31-182054] Force-loaded behavioral prose gets ignored — action-gating hooks don't - -**Context**: AGENT_PLAYBOOK was force-injected at ~14k tokens every session. -Agent routinely skipped its Context Readback directive when the user's first -message was a concrete task. Meanwhile, hooks that gate actions (qa-reminder, -specs-nudge, block-dangerous-commands) were consistently followed because they -fire at the moment of violation. - -**Lesson**: Prose instructions compete with the user's immediate request and -lose. Hooks that intercept actions at execution time are enforceable. More -injected content means less attention per token — slim injection to only what -must be internalized before any action. - -**Application**: When adding agent directives, prefer action-gating hooks over -injected prose. If it must be injected, keep it small and directive-only. -Reserve force-injection for hard rules (CONSTITUTION) and distilled actionable -checklists (gate file). - ---- - -## [2026-03-31-112534] Legacy key directory cleanup was specified but not automated - -**Context**: ~/.local/ctx/keys/ accumulated 584 orphan keys from test runs -before the v0.8.0 migration to ~/.ctx/.ctx.key - -**Lesson**: Migration specs that call for manual cleanup of old paths should -include an automated step — either in the migration code itself or as a -post-release cleanup task. Tests that write to global paths must isolate HOME. - -**Application**: When writing migration specs, always include automated cleanup -of the old path. When writing tests that touch user-level directories, verify -HOME is isolated via t.Setenv. - ---- - -## [2026-03-31-005112] Convention audits must check cmd/ purity, not just types and docstrings - -**Context**: Placed needsSpec helper in cmd/root/run.go instead of -core/entry/predicate.go. Missed it because the audit checklist only covered -types and docstrings - -**Lesson**: cmd/ directories must contain only Cmd() and Run*() — all helper -functions, unexported logic, and types belong in core/. Added TestCmdDirPurity -compliance test to enforce this mechanically - -**Application**: The compliance test now catches this automatically. 28 -pre-existing violations grandfathered in the allowlist - ---- - -## [2026-03-31-005110] JSON Schema default fields cause linter errors with some validators - -**Context**: ctxrc.schema.json had default: values on 16 fields that triggered -incompatible type errors in the user's linter - -**Lesson**: Move default values into the description string instead of using the -default keyword — Go rc.*() accessors handle the actual defaults - -**Application**: When adding new .ctxrc fields, document defaults in the -description, never use default: in the schema - ---- - -## [2026-03-30-075941] Architecture diagrams drift silently during feature additions - -**Context**: During the journal-recall merge, architecture-dia-build.md listed -23 CLI packages but 31 existed. 8 packages added over months without updating -the diagram. - -**Lesson**: Exhaustive lists and counts in architecture docs go stale every time -a package is added. The drift is invisible because nobody re-counts. - -**Application**: After adding a new CLI package, grep architecture diagrams for -package counts and directory listings. Consider adding a drift-check comment -that validates the count programmatically. - ---- - -## [2026-03-30-003734] Python-generated doc.go files need gofmt — formatter strips bare // padding lines - -**Context**: Batch-generated doc.go files used blank // lines for padding, which -gofmt removes as unnecessary whitespace - -**Lesson**: Programmatic Go file generation must produce substantive content -lines, not blank comment padding — gofmt enforces this - -**Application**: Always run gofmt after any scripted Go file generation - ---- - -## [2026-03-30-003707] lint-docstrings.sh greedy sed hid all return-type violations - -**Context**: sed 's/.*) //' consumed return type parens, leaving { — functions -with return types were invisible to the script for months - -**Lesson**: Greedy regex in shell scripts can silently suppress entire -categories of lint violations — test with edge cases, not just happy paths - -**Application**: When writing sed-based lint checks, test with multi-paren -signatures (func Foo() (string, error)) - ---- - -## [2026-03-25-234039] Machine-generated CLAUDE.md content consumes per-turn budget without proportional value - -**Context**: GitNexus injected 121 lines (61% of CLAUDE.md) with auto-generated -skill pointers like 'Work in the Watch area (39 symbols)' — generic index data -loaded on every conversation turn - -**Lesson**: CLAUDE.md is prime real estate — every token competes with -project-specific instructions. Auto-generated content belongs in on-demand -skills, not in always-loaded files - -**Application**: Audit CLAUDE.md periodically for content that could be -delivered via skills instead. Prefer a one-line pointer over inline content for -companion tools - ---- - -## [2026-03-25-173338] Template improvements don't propagate to existing projects - -**Context**: 5 of 8 context files in the ctx project itself had stale/missing -comment headers — templates evolved but non-destructive init never re-synced -them - -**Lesson**: Any template change is invisible to existing users until they run -ctx init --force - -**Application**: Added drift detection (checkTemplateHeaders) to ctx drift. -Consider surfacing this during ctx status too. - ---- - -## [2026-03-24-001001] lint-drift false positives from conflating constant namespaces - -**Context**: lint-drift.sh checked all string constants in embed/cmd/*.go -against commands.yaml, but Use* constants are cobra syntax strings, not YAML -lookup keys - -**Lesson**: Shell grep on constant values cannot distinguish constant types; -only DescKey* constants are YAML keys. AST-based analysis is needed for -type-aware checks - -**Application**: Already captured in specs/ast-audit-tests.md; the lint-drift -fix is shipped in v0.8.0 - ---- - -## [2026-03-24-000959] git describe --tags follows ancestry, not global tag list - -**Context**: Release notes skill diffed against v0.3.0 instead of v0.6.0 because -the release branch diverged before v0.6.0 was tagged - -**Lesson**: git describe --tags --abbrev=0 follows reachability from HEAD; use -git tag --sort=-v:refname | head -1 for the latest tag globally - -**Application**: Any script or skill that needs the latest release should use -sorted tag list, not describe - ---- - -## [2026-03-23-165611] Typography detection script needs exclusion lists for intentional uses - -**Context**: detect-ai-typography.sh flagged config/token/delim.go (intentional -delimiter constants) and test files (test data containing em-dashes) - -**Lesson**: Detection scripts for convention enforcement need exclusion patterns -for files where the flagged patterns are intentional data, not prose - -**Application**: Add exclusion patterns proactively when creating detection -scripts; *_test.go and constant-definition files are common false positive -sources - ---- - -## [2026-03-23-003544] Splitting core/ into subpackages reveals hidden structure - -**Context**: init core/ was a flat bag of domain objects — splitting into -backup/, claude/, entry/, merge/, plan/, plugin/, project/, prompt/, tpl/, -validate/ exposed duplicated logic, misplaced types, and function-pointer -smuggling that were invisible in the flat layout - -**Lesson**: Flat core/ packages hide coupling — circular dependency resolution -during splits naturally groups related items, increases cohesion, and surfaces -objects that don't belong - -**Application**: When a core/ package grows, split it into subpackages even if -it creates temporary circular deps — resolving those deps is the design work -that reveals the right structure - ---- - -## [2026-03-23-003353] Higher-order callbacks in param structs are a code smell - -**Context**: MergeParams.UpdateFn and DeployParams.ListErr/ReadErr were function -pointers where all callers passed thin wrappers varying only by a text key - -**Lesson**: If all callers pass thin wrappers around the same pattern -(fmt.Errorf with different keys), the callback is just data in disguise - -**Application**: When a struct field is a function pointer, check if all callers -vary only by a string key — if so, replace the callback with the key and let -the consumer do the dispatch - ---- - -## [2026-03-20-160112] Commit messages containing script paths trigger PreToolUse hooks - -**Context**: Git commit message body contained a path to a shell script under -the hack directory which matched a hook pattern that blocks direct script -invocation - -**Lesson**: Hooks scan all Bash tool input including heredoc content used for -commit messages, not just the command itself - -**Application**: Rephrase commit messages and ctx add content to avoid paths -that match hook deny patterns, use generic references instead of literal file -paths - ---- - -## [2026-03-18-133457] Lazy sync.Once per-accessor is a code smell for static embedded data - -**Context**: assets package had 4 sync.Once guards, 4 exported maps, 4 Load*() -functions, and a wrapper desc package — all to lazily load YAML from embed.FS -that never mutates. Every accessor call went through sync.Once + global map + -wrapper indirection. - -**Lesson**: When data is static and loaded from embedded bytes, scatter-loading -with per-accessor sync.Once is over-engineering. A single Init() called eagerly -at startup is simpler, and one sync.Once on Init() itself provides the test -safety net. Exported maps that exist only for wrapper packages to reach are a -sign the abstraction boundary is wrong. - -**Application**: Prefer eager Init() in main.go for static embedded data. Keep -maps unexported. Accessors do plain map lookups. If a wrapper package exists -solely to break a cycle caused by exported state, delete the wrapper and -unexport the state. - ---- - -## [2026-03-17-105637] Write package output census: 69 trivial/simple, 38 consolidation candidates, 18 complex - -**Context**: Full audit of internal/write/ (26 files, 160 functions, 337 Println -calls) to evaluate whether block template consolidation is worth a systematic -refactor. - -**Lesson**: Only 30% of write functions benefit from output consolidation. The -sweet spot is multi-line (16) and conditional (22) functions. - -**Application**: Check function category before consolidating. Trivial/simple -stay as-is. Conditional functions need pre-computation before block templates. -Loop-based complex functions stay imperative. Don't bulk-refactor. - ---- - -## [2026-03-16-114227] Docstring tasks require reading CONVENTIONS.md Documentation section first - -**Context**: Agent was asked to review docstrings in server.go but skipped -convention loading, missed incomplete Parameter/Returns sections, and needed -three hints to recall the known issue - -**Lesson**: Any task involving docstrings, comments, or documentation formatting -is a convention-sensitive task — read CONVENTIONS.md (Documentation section) -and LEARNINGS.md (for known gaps) before reviewing or writing - -**Application**: On any docstring/comment task: (1) load CONVENTIONS.md -Documentation section, (2) check LEARNINGS.md for related entries, (3) audit all -functions in scope against the convention template, not just the ones in the -diff - ---- - -## [2026-03-16-104146] Convention enforcement needs mechanical verification, not behavioral repetition - -**Context**: Godoc Parameters/Returns sections were missed repeatedly across -sessions despite memory entries and feedback - -**Lesson**: System-level brevity instructions outcompete context-injected -conventions. Memory shifts probability (~40% to ~70%) but doesn't create -invariants. The competing pressures are architectural, not a recall problem. - -**Application**: Invest in linter rules or PreToolUse gates for -mechanically-checkable conventions. Reserve behavioral nudges for judgment calls -that can't be linted. See ideas/spec-convention-enforcement.md for the -three-tier strategy. - ---- - -## [2026-03-16-022650] One-liner method wrappers hide dependencies without adding value - -**Context**: checkBoundary() and loadContext() were methods on Handler that just -called validation.ValidateBoundary and context.Load with h.ContextDir - -**Lesson**: If a method only passes a struct field to a stdlib function, inline -it — the wrapper obscures the real dependency - -**Application**: Before extracting a helper method, check if it just forwards a -field to another function. If so, call the function directly. - ---- - -## [2026-03-16-022642] Agents reliably introduce gofmt issues during bulk renames - -**Context**: Subagents renamed consequences->consequence across 75+ files but -left formatting errors in 12 Go files - -**Lesson**: Always run gofmt -l after agent-driven refactors before trusting the -build - -**Application**: Add gofmt -w pass as a standard step after any agent-driven -bulk edit - ---- - -## [2026-03-15-101342] Contributor PRs need post-merge follow-up commits for convention alignment - -**Context**: PR #42 (MCP v0.2) addressed bulk of review feedback but left ~12 -inline strings, no embed_test coverage, and substring matching in -containsOverlap - -**Lesson**: Merging with known gaps is fine when the gaps are mechanical, but -the follow-up must be immediate — track in ideas/done/ with a review status -doc - -**Application**: For future contributor PRs: create ideas/pr{N}-review-status.md -during review, merge when architecture is sound, fix convention gaps in a -same-day follow-up commit - ---- - -## [2026-03-15-040642] Grep for callers must cover entire working tree before deleting functions - -**Context**: Deleted 7 err/prompt functions as dead code, but callers existed in -unstaged refactoring files — caused build failures - -**Lesson**: When the working tree has unstaged changes from a prior session, -grep hits only committed+staged code; must grep the full tree or build-test -before declaring functions dead - -**Application**: Always run make build after deleting functions, even if grep -shows zero callers - ---- - -## [2026-03-14-180903] Stderr error messages are user-facing text that belongs in assets - -**Context**: Added fmt.Fprintf(os.Stderr) error reporting to event log, -initially with inline strings - -**Lesson**: Any string that reaches the user, including stderr warnings, routes -through assets.TextDesc() for i18n readiness - -**Application**: When adding stderr output, create text.yaml entries and asset -keys first - ---- - -## [2026-03-14-131202] Hardcoded _alt suffixes create implicit language favoritism - -**Context**: Session parser had session_prefix_alt hardcoding Turkish as a -special case alongside English default - -**Lesson**: Naming a constant _alt and hardcoding one non-English language as a -built-in default discriminates by giving that language special status. The -pattern doesn't scale (alt_2? alt_3?) and signals that adding languages requires -code changes. - -**Application**: When a feature needs multi-value support, use configurable -lists from the start — not hardcoded pairs with _alt suffixes. Default to a -single canonical value; all extensions are user-configured equally. - ---- - -## [2026-03-13-151952] sync-why mechanism existed but was not wired to build - -**Context**: assets/why/ had drifted from docs/ — the sync targets existed in -the Makefile but build did not depend on sync-why - -**Lesson**: Freshness checks that are not in the critical path will be -forgotten. Wire them as build prerequisites, not optional audit steps - -**Application**: Any derived or copied asset should be a prerequisite of build, -not just audit - ---- - -## [2026-03-12-133008] Project-root files vs context files are distinct categories - -**Context**: Tried moving ImplementationPlan constant to config/ctx assuming it -was a context file. (Note: IMPLEMENTATION_PLAN.md was removed in 2026-03-25 as a -dead file — no agent consumer.) - -**Lesson**: Files created by ctx init in the project root (Makefile) are -scaffolding, not context files loaded via ReadOrder. They belong in config/file, -not config/ctx - -**Application**: Before moving a file constant, check whether it is in ReadOrder -(context) or created by init (project-root) - ---- - -## [2026-03-12-133007] Constants belong in their domain package not in god objects - -**Context**: file.go held agent scoring constants, budget percentages, cooldown -durations — none related to file config - -**Lesson**: When a constant is only used by one domain (e.g. agent scoring), it -should live in that domain's config package - -**Application**: Check callers before placing constants; if all callers are in -one domain, the constant belongs there - ---- - -## [2026-03-07-221151] Always search for existing constants before adding new ones - -**Context**: Added ExtJsonl constant to config/file.go but ExtJSONL already -existed with the same value, causing a duplicate - -**Lesson**: Grep for the value (e.g. '.jsonl') across config/ before creating a -new constant — naming variations (camelCase vs ALLCAPS) make duplicates easy -to miss - -**Application**: Before adding any new constant to internal/config, search by -value not just by name - ---- - -## [2026-03-07-221148] SafeReadFile requires split base+filename paths - -**Context**: During system/core cleanup, persistence.go passed a full path to -validation.SafeReadFile which expects (baseDir, filename) separately - -**Lesson**: Use filepath.Dir(path) and filepath.Base(path) to split full paths -when adapting os.ReadFile calls to SafeReadFile - -**Application**: When converting os.ReadFile to SafeReadFile, always check -whether the existing code has a full path or separate components - ---- - -## [2026-03-06-141506] Stale directory inodes cause invisible files over SSH - -**Context**: Files created by Claude Code hooks were visible inside the VM but -not from the SSH terminal - -**Lesson**: If a directory is recreated (e.g. by auto-prune), an SSH shell -holding the old directory inode will not see new files — ls returns no such -file even though cat with the full path works from other shells - -**Application**: After ctx system prune or any state directory recreation, SSH -sessions need cd-dot or re-login to pick up the new inode - ---- - -## [2026-03-06-141504] Stats sort uses string comparison on RFC3339 timestamps with mixed timezones - -**Context**: ctx system stats showed only old sessions, hiding the current one - -**Lesson**: RFC3339 string comparison breaks when entries mix UTC (Z) and offset -(-08:00) formats — 13:00-08:00 sorts before 18:00Z lexicographically despite -being later in absolute time - -**Application**: Always parse to time.Time before comparing RFC3339 timestamps; -never rely on lexicographic sort - ---- - -## [2026-03-06-184820] Claude Code supports PreCompact and SessionStart hooks that ctx does not use - -**Context**: context-mode proves both hooks work in production across 5 -platforms - -**Lesson**: ctx's hook architecture only uses UserPromptSubmit, PreToolUse, and -PostToolUse — two lifecycle events are untapped - -**Application**: PreCompact snapshot plus SessionStart re-injection would -eliminate post-compaction disorientation without any new persistence layer since -ctx agent already generates the content - ---- - -## [2026-03-06-050125] Package-local err.go files invite broken windows from future agents - -**Context**: Found err.go files in 5 CLI packages with heavily duplicated error -constructors (errFileWrite, errMkdir, errZensicalNotFound repeated across -packages) - -**Lesson**: Centralizing errors in internal/err eliminates duplication and -prevents agents from continuing the pattern of adding local err.go files when -they see one exists - -**Application**: New error constructors go to internal/err/errors.go. No err.go -files in CLI packages. - ---- - -## [2026-03-05-205422] State directory accumulates silently without auto-prune - -**Context**: Found 234 files in .context/state/ from weeks of sessions with no -cleanup mechanism - -**Lesson**: Session tombstones are write-only. Without auto-prune, the state -directory grows unbounded. Added autoPrune(7) to context-load-gate so cleanup -happens once per session at startup. - -**Application**: Auto-prune is now wired into session start via -context-load-gate. Manual prune still available via ctx system prune for -aggressive cleanup. - ---- - -## [2026-03-05-205419] Global tombstones suppress hooks across all sessions - -**Context**: Memory drift nudge used memory-drift-nudged with no session ID in -filename - -**Lesson**: Any tombstone file intended to be session-scoped must include the -session ID in its filename, otherwise it suppresses across all concurrent and -future sessions. Use the UUID pattern so prune can clean them up. - -**Application**: Audit all tombstone files for session-scoping; fixed -memory-drift, but backup-reminded, ceremony-reminded, check-knowledge, -journal-reminded, version-checked, ctx-wrapped-up still have this bug - ---- - -## [2026-03-05-042157] Claude Code has two separate memory systems behind feature flags - -**Context**: Filesystem and behavioral analysis of Claude Code v2.1.69 - -**Lesson**: Claude Code has two separate memory systems behind feature flags. -Auto memory writes MEMORY.md to disk (user-visible, toggleable via settings). -Session memory is a separate background extraction pipeline with compaction and -team sync (push/pull model). The two systems serve different purposes and are -independently feature-flagged. - -**Application**: ctx memory bridge targets auto memory (MEMORY.md on disk). -Session memory is API-side and not directly accessible. Full findings in -ideas/claude-code-project-directory-structure.md. - ---- - -## [2026-03-05-023941] Blog post editorial feedback is higher-leverage than drafting - -**Context**: Draft of Agent Memory Is Infrastructure was publication-quality on -first pass; user editorial feedback (structural emphasis, rhetorical sharpening, -amnesia/archaeology bridge) elevated it significantly more than initial -generation - -**Lesson**: For narrative content, the first draft captures the argument; the -editorial pass captures the voice. Both are necessary but the editorial pass has -disproportionate impact on quality. - -**Application**: For future blog posts, invest more in the editorial cycle -(structural feedback then targeted refinements) rather than trying to nail voice -on first generation. - ---- - -## [2026-03-04-105239] CONSTITUTION hook compliance is non-negotiable — don't work around it - -**Context**: After make build, ran ./ctx deps --help which was blocked by -block-non-path-ctx. Instead of asking user to install, tried cp ctx ~/bin/ — -escalating workarounds. - -**Lesson**: When a hook blocks an action, the correct response is to follow the -hook's instruction (ask the user to sudo make install), not to find creative -bypasses. - -**Application**: Always ask the user to install when testing a freshly built -binary. Never attempt alternative install paths to circumvent a hook. - ---- - -## [2026-03-02-165039] Hook message registry test enforces exhaustive coverage of embedded templates - -**Context**: Adding billing.txt to embedded assets without a registry entry -caused TestRegistryCoversAllEmbeddedFiles to fail immediately - -**Lesson**: Every new .txt file under internal/assets/hooks/messages/ must have -a corresponding entry in registry.go — the test acts as an exhaustive -bidirectional check - -**Application**: When adding new hook message variants, update the registry -entry before running tests - ---- - -## [2026-03-02-123613] Existing Projects is ambiguous framing for migration notes - -**Context**: A doc admonition said Existing Projects: if you have an older key -at X, it auto-migrates. Every project is existing once installed — the framing -does not tell you how far behind you need to be. - -**Lesson**: Version-anchored framing (Key Folder Change v0.7.0+) is clearer than -relative framing (Existing Projects, Legacy). State the version boundary and the -concrete action. - -**Application**: When writing migration notes, anchor to a version number and -give copy-pasteable commands, not vague auto-handled assurances. - ---- - -## [2026-03-02-005217] Claude Code JSONL model ID does not distinguish 200k from 1M context - -**Context**: Heartbeat hook was reporting 16% usage at 162k tokens because it -assumed claude-opus-4-6 always has 1M context window - -**Lesson**: The JSONL model field is identical for both variants (both report -claude-opus-4-6). The 1M context requires a beta header, not a different model -ID. The user's model selection is stored in ~/.claude/settings.json with a [1m] -suffix when 1M is active. - -**Application**: Auto-detect context window from ~/.claude/settings.json model -field containing [1m]. Default to 200k for all Claude models. The .ctxrc -context_window setting is a no-op for Claude Code users. - ---- - -## [2026-03-01-222739] Gosec G306 flags test file WriteFile with 0644 permissions - -**Context**: New tests used os.WriteFile(..., 0o644) for temp context files; -lint flagged all three occurrences - -**Lesson**: Gosec enforces 0600 max on WriteFile even in test code. Use 0o600 -for test temp files - -**Application**: Default to 0o600 for os.WriteFile in tests; only use wider -permissions when testing permission behavior specifically - ---- - -## [2026-03-01-222738] Converting PersistentPreRun to PersistentPreRunE changes exit behavior - -**Context**: Boundary violation test used subprocess pattern because original -code called os.Exit(1) - -**Lesson**: With PersistentPreRunE, errors propagate through Cobra Execute() -return — no os.Exit call. Subprocess-based tests that expected exit codes need -converting to direct error assertions - -**Application**: When converting PreRun to PreRunE in Cobra commands, audit all -tests that relied on os.Exit behavior - ---- - -## [2026-03-01-161459] Test HOME isolation is required for user-level path functions - -**Context**: After adding ~/.ctx/.ctx.key as global key location, test suites -wrote real files to the developer home directory - -**Lesson**: Any code that uses os.UserHomeDir() needs t.Setenv(HOME, tmpDir) in -tests — especially test helpers called by many tests (like setupEncrypted and -helper) - -**Application**: When adding features that write to user-level paths (~/.ctx/, -~/.config/), always add HOME isolation to test setup functions first - ---- - -## [2026-03-01-133014] Task descriptions can be stale in reverse — implementation done but task not marked complete - -**Context**: ctx recall sync task said 'command is not registered in Cobra' but -the code was fully wired and all tests passed. The task description was stale. - -**Lesson**: Tasks can become stale in the opposite direction from docs: -implementation gets completed but the task is not updated. Always verify with -ctx --help before assuming work remains. - -**Application**: Before starting implementation on a 'code exists but not wired' -task, run the command first to check if it already works. - ---- - -## [2026-03-01-124921] Model-to-window mapping requires ordered prefix matching + +## [2026-04-25-014704] Confident code comments can pull an LLM away from first-principles knowledge -**Context**: Implementing modelContextWindow() for the three-tier context window -fallback. Claude model IDs use nested prefixes (claude-sonnet-4-5 vs -claude-sonnet-4-20250514). +**Context**: cli_test.go had a comment claiming 'parent's t.Setenv doesn't propagate to exec'd children unless we build it into cmd.Env' which is wrong. I patched the helper's CTX_DIR dedup instead of questioning the helper itself, despite knowing t.Setenv semantics. -**Lesson**: A switch with ordered HasPrefix cases (most specific first) is -cleaner and safer than iterating separate prefix lists. The catch-all 'claude-*' -returns 200k for unrecognized Claude models. +**Lesson**: A comment that explains why a stdlib mechanism 'doesn't work' is doing extra rhetorical work to talk a reader out of the obvious approach. That's exactly when to verify from first principles instead of trusting the surrounding-code frame. -**Application**: When adding new model families to modelContextWindow() in -session_tokens.go, add the most specific prefix first to avoid shadowing shorter -prefixes. +**Application**: When an existing comment justifies a non-canonical approach contradicting stdlib knowledge: pause, verify against memory of the actual API before patching within the existing frame. --- -## [2026-03-01-095709] TASKS.md template checkbox syntax inside HTML comments is parsed by RegExTaskMultiline +## [2026-04-25-014704] filepath.Join('', rel) returns rel as CWD-relative, not error -**Context**: Template had example checkboxes (- [x], - [ ]) in HTML comments -that the line-based regex matched as real tasks, causing -TestArchiveCommand_NoCompletedTasks to fail +**Context**: Recurring orphan jsonl-path- appeared at project root. Older state.Dir() returned ('', nil) when CTX_DIR was undeclared, so filepath.Join('', 'jsonl-path-XXX') = 'jsonl-path-XXX', writing relative to CWD. -**Lesson**: RegExTaskMultiline is line-based and has no awareness of HTML -comment blocks — checkbox-like patterns inside comments get counted as real -tasks +**Lesson**: Functions returning a path-string must never return ('', nil). Sentinel errors force callers to gate, closing the silent CWD-relative write. -**Application**: Use backtick-quoted or indented references instead of actual -checkbox syntax in template comments. When adding examples to TASKS.md -templates, avoid patterns that match regExTaskPattern +**Application**: Audit any (string, error) path-returner that historically had a ('', nil) shortcut. Closed for state.Dir and rc.ContextDir; check remaining resolvers. --- -## [2026-03-01-092611] Hook logs had no rotation; event log already did +## [2026-04-25-014704] Parallel go test ./... packages can race on ~/.claude/settings.json -**Context**: Investigated .context/logs/ and .context/state/ file management - -**Lesson**: eventlog already rotates at 1MB with one previous generation. -logMessage() in state.go was pure append-only with no size check. - -**Application**: When adding new log sinks, follow the established rotation -pattern (size-based, single previous generation) - ---- - -## [2026-02-28-184758] ctx pad import, ctx pad export, and ctx system resources make three hack scripts redundant - -**Context**: Audited hack/ scripts against ctx CLI surface - -**Lesson**: As ctx CLI grew, several hack scripts became wrappers around -built-in commands (pad-import.sh -> ctx pad import, pad-export-blobs.sh -> ctx -pad export, resource-watch.sh -> watch -n5 ctx system resources) - -**Application**: Periodically audit hack/ for scripts that ctx has absorbed - ---- - -## [2026-02-28-184647] Getting-started docs assumed Claude Code as the only agent - -**Context**: The installation section opened with 'A full ctx installation has -two parts' — binary + Claude Code plugin — leaving non-Claude-Code users -without a clear path - -**Lesson**: Installation docs should lead with the universal requirement (the -binary) and present agent-specific integration as conditional - -**Application**: When writing docs for multi-tool projects, frame the common -denominator first, then branch by tool - ---- - -## [2026-02-28-150701] Plugin reload script must rebuild cache, not just delete it - -**Context**: hack/plugin-reload.sh was deleting -~/.claude/plugins/cache/activememory-ctx/ without repopulating it. Claude Code's -installed_plugins.json still referenced the cache path, so the plugin appeared -enabled but hooks.json was missing — all plugin hooks silently stopped firing. - -**Lesson**: Claude Code snapshots plugin hooks from the cache directory at -session startup. If the cache is deleted, plugin hooks vanish silently with no -error. The reload script must rebuild the cache from source assets -(internal/assets/claude/) after clearing it, and warn that a session restart is -required. - -**Application**: Always rebuild the plugin cache in hack/plugin-reload.sh. When -debugging hooks that don't fire, check ~/.claude/plugins/cache/ first — a -missing hooks.json is the most likely cause. - ---- - -## [2026-02-27-231228] site/ directory must be committed with docs changes - -**Context**: The site/ directory contains generated HTML served directly from -the repo (no CI build step). Multiple sessions have committed docs/ changes -without the corresponding site/ output, or ignored site/ as 'generated noise'. - -**Lesson**: site/ is intentionally tracked in git — there is no GitHub Pages -workflow or CI step to build it. When docs change, the regenerated site/ HTML -must be staged and committed alongside the source. - -**Application**: Always git add site/ when committing changes under docs/. Never -gitignore site/. - ---- - -## [2026-02-27-230741] Doctor token_budget vs context_window confusion - -**Context**: ctx doctor reported context size against token_budget (8k) instead -of context_window (200k), making 22k tokens look alarming. - -**Lesson**: token_budget (ctx agent output trim target) and context_window -(model capacity) serve different purposes. Health checks about context fitting -should use context_window, with warning threshold proportional (e.g., 20% of -window). - -**Application**: Doctor now uses rc.ContextWindow() with 20% threshold and shows -per-file token breakdown for actionable insight into which files are heavy. - ---- - -## [2026-02-27-230738] Drift detector false positives on illustrative code examples - -**Context**: ctx drift flagged 23 warnings for backtick-quoted paths in -CONVENTIONS.md and ARCHITECTURE.md that were prose examples (loader.go, -session/run.go, sync.Once), not real file references. - -**Lesson**: Path reference detection should verify the top-level directory -exists on disk before flagging. Bare filenames and paths under non-existent -directories are almost always examples in documentation. - -**Application**: The fix checks os.Stat(topDir) on the first path component. -Future drift checks on documentation-heavy files should use the same heuristic. - ---- - -## [2026-02-27-002830] Context injection and compliance strategy (consolidated) - -**Consolidated from**: 3 entries (2026-02-26) - -- Verbal summaries with linked diagram files cut ARCHITECTURE.md from ~12K to - ~3.8K tokens. Extract diagrams to linked files outside FileReadOrder; keep - prose summaries inline. The 4-chars-per-token estimator is accurate — - optimize content, not the estimator. -- Soft instructions have a ~75-85% compliance ceiling because "don't apply - judgment" is itself evaluated by judgment. When 100% compliance is required, - don't instruct — inject via `additionalContext`. Reserve soft instructions - for ~80% acceptable compliance. -- Once ~7K tokens are auto-injected (fait accompli), the agent's rationalization - inverts from "skip to save effort" to "marginal cost is trivial." Front-load - highest-value content as injection, then use sunk cost to motivate on-demand - reads for the remainder. - ---- - -## [2026-02-26-003854] Webhook silence after ctxrc profile swap is the most common notify debugging red herring - -**Context**: Spent time investigating why webhooks weren't firing — checked -binary version, hook configs, notify.Send internals. Actual cause was .ctxrc -swapped to prod profile (notify commented out) earlier in session. - -**Lesson**: When webhooks stop, check .ctxrc profile first (`ctx config -status`). Also: not all tool uses trigger webhook-sending hooks — Read only -triggers context-load-gate (one-shot) and ctx agent (no webhook). qa-reminder -requires Edit matcher. - -**Application**: Before debugging notify internals, run `ctx config status` and -verify the event would actually match a hook with notify.Send. - ---- - -## [2026-02-26-100000] Documentation drift and auditing (consolidated) - -**Consolidated from**: 6 entries (2026-01-29 to 2026-02-24) - -- CLI reference docs can outpace implementation: ctx remind had no CLI, ctx - recall sync had no Cobra wiring, key file naming diverged between docs and - code. Always verify with `ctx --help` before releasing docs. -- Structural doc sections (project layouts, command tables, skill counts) drift - silently. Add `` markers above any - section that mirrors codebase structure. -- Agent sweeps for style violations are unreliable (8 found vs 48+ actual). - Always follow agent results with targeted grep and manual classification. -- ARCHITECTURE.md missed 4 core packages and 4 CLI commands. The /ctx-drift - skill catches stale paths but not missing entries — run /ctx-architecture - after adding new packages or commands. -- Documentation audits must compare against known-good examples and - pattern-match for the COMPLETE standard, not just presence of any comment. -- Dead link checking belongs in /consolidate's check list (check 12), not as a - standalone concern. When a new audit concern emerges, check if it fits an - existing audit skill first. - ---- - -## [2026-02-26-100002] Agent context loading and task routing (consolidated) - -**Consolidated from**: 5 entries (2026-01-20 to 2026-01-25) - -- `ctx agent` is optimized for task execution (filters pending tasks, surfaces - constitution, token-budget aware). Manual file reading is better for - exploratory/memory questions (session history, timestamps, completed tasks). -- On "Do you remember?" questions, immediately read .context/ files and run `ctx - journal source --limit 5`. Never ask "would you like me to check?" — that is - the obvious intent. -- .context/ is NOT a Claude Code primitive. Only CLAUDE.md and - .claude/settings.json are auto-loaded. The .context/ directory requires a hook - or explicit CLAUDE.md instruction to be discovered. -- ~~Orchestrator (IMPLEMENTATION_PLAN.md) and agent (.context/TASKS.md) task - lists must be separate.~~ (Superseded 2026-03-25: IMPLEMENTATION_PLAN.md - removed. TASKS.md is the single task source.) -- Only CLAUDE.md is auto-loaded by Claude Code. Projects using ctx should rely - on the CLAUDE.md -> AGENT_PLAYBOOK.md chain, not AGENTS.md. - ---- - -## [2026-02-26-100005] Go testing patterns (consolidated) - -**Consolidated from**: 7 entries (2026-01-19 to 2026-02-26) - -- Compiler-driven refactoring misses test files: `go build ./...` catches - production callsite breaks but not test files. Always run `go test ./...` - after signature changes. -- All runCmd() returns must be consumed in tests: even setup calls need `_, _ = - runCmd(...)` to satisfy errcheck. -- Set `color.NoColor = true` in a package-level init function to disable ANSI - codes for CLI test string assertions. -- Recall CLI tests isolate via HOME env var: `t.Setenv("HOME", tmpDir)` with - `.claude/projects/` structure gives full isolation from real session data. -- `formatDuration` accepts an interface with a Minutes method, not time.Duration - directly. Use a stubDuration struct for testing. -- CI tests need `CTX_SKIP_PATH_CHECK=1` env var because init checks if ctx is in - PATH. -- CGO must be disabled for ARM64 Linux (`CGO_ENABLED=0`) — CGO causes - cross-compilation issues with `-m64` flag. - ---- - -## [2026-02-26-100006] PATH and binary handling (consolidated) - -**Consolidated from**: 3 entries (2026-01-21 to 2026-02-17) - -- Always use `ctx` from PATH, never `./dist/ctx-linux-arm64` or `go run - ./cmd/ctx`. Check `which ctx` if unsure. -- Hooks must use PATH, not hardcoded paths. `ctx init` checks if ctx is in PATH - before proceeding. Tests can skip with `CTX_SKIP_PATH_CHECK=1`. -- Agent must never place binaries in any bin directory (not via cp, mv, or go - install). Build with `make build`, then ask the user to run the privileged - install step. Hooks in block-dangerous-commands.sh enforce this. - ---- - -## [2026-02-26-100007] Task management and exit criteria (consolidated) - -**Consolidated from**: 4 entries (2026-01-21 to 2026-02-17) - -- Specs get lost without cross-references from TASKS.md. Three-layer defense: - (1) playbook instruction, (2) spec reference in Phase header, (3) bold - breadcrumb in first task. -- Subtask completion is implementation progress, not delivery. Parent tasks - should have explicit deliverables; don't close until deliverable is verified. -- Exit criteria must include verification: integration tests (binary executes - correctly), coverage targets, and smoke tests. "All tasks checked off" does - not equal "implementation works." -- Reports graduate to ideas/done/ only after all items are tracked or resolved. - Cross-reference every item against TASKS.md and the codebase before moving. - ---- - -## [2026-02-26-100008] Agent behavioral patterns (consolidated) - -**Consolidated from**: 5 entries (2026-01-25 to 2026-02-22) - -- Interaction pattern capture risks softening agent rigor. Do not build implicit - user-modeling from session history. Rely on explicit, human-reviewed context - (learnings, conventions, hooks) for behavioral shaping. -- Chain-of-thought prompting improves agent reasoning accuracy (17.7% to 78.7%). - Added "Reason Before Acting" to AGENT_PLAYBOOK.md and reasoning nudges to 7 - skills. -- Say "project conventions" not "idiomatic X" to ensure Claude looks at project - files first rather than triggering training priors (stdlib conventions). -- Autonomous "YOLO mode" is effective for feature velocity but accumulates - technical debt (magic strings, monolithic tests, hardcoded paths). Schedule - periodic consolidation sessions. -- Trust the binary output over source code analysis. A single ambiguous CLI - output is not proof of absence — re-run the exact command before claiming - something is missing. - ---- - -## [2026-02-26-100009] Hook compliance and output routing (consolidated) - -**Consolidated from**: 3 entries (2026-02-22 to 2026-02-25) - -- Plain-text hook output is silently ignored by the agent. Claude Code parses - hook stdout starting with `{` as JSON directives; plain text is disposable. - All hooks should return JSON via `printHookContext()`. -- Hook compliance degrades on narrow mid-session tasks (~15-25% partial skip - rate). Root cause: CLAUDE.md's "may or may not be relevant" system reminder - competes with hook authority. Fix: CLAUDE.md explicitly elevates hook - authority. The mandatory checkpoint relay block is the compliance canary. -- No reliable agent-side before-session-end event exists. SessionEnd fires after - the agent is gone. Mid-session nudges and explicit /ctx-wrap-up are the only - reliable persistence mechanisms. - ---- - -## [2026-02-26-100010] ctx add and decision recording (consolidated) - -**Consolidated from**: 4 entries (2026-01-27 to 2026-02-14) - -- `ctx add learning` requires `--context`, `--lesson`, `--application` flags. - `ctx add decision` requires `--context`, `--rationale`, `--consequence`. A - bare string only sets the title and the command will fail without required - flags. -- Structured entries with Context/Lesson/Application are more useful than - one-liners. Agents are guided via AGENT_PLAYBOOK.md. -- Always complete decision record sections — placeholder text like "[Add - context here]" is a code smell. Decisions without rationale lose their value - over time. -- Slash commands using `!` bash syntax require matching permissions in - settings.local.json. When adding new /ctx-* commands, ensure ctx init - pre-seeds the required `Bash(ctx :*)` permissions. - ---- - -## [2026-02-24-032945] CLI tools don't benefit from in-memory caching of context files - -**Context**: Discussed whether ctx should read and cache LEARNINGS.md, -DECISIONS.md etc. in memory - -**Lesson**: ctx is a short-lived CLI process, not a daemon. Context files are -tiny (few KB), sub-millisecond to read. Cache invalidation complexity exceeds -the read cost. Caching only makes sense if ctx becomes a long-lived process (MCP -server, watch daemon). - -**Application**: Don't add caching layers to ctx's file reads. If an MCP server -mode is ever added, revisit then. - ---- - -## [2026-02-22-120000] Hook behavior and patterns (consolidated) - -**Consolidated from**: 8 entries (2026-01-25 to 2026-02-17) - -- Hook scripts receive JSON via stdin (not env vars); parse with - `HOOK_INPUT=$(cat)` then jq -- Hook key names are case-sensitive: `PreToolUse` and `SessionEnd` (not - `PreToolUseHooks`) -- Use `$CLAUDE_PROJECT_DIR` in hook paths, never hardcode absolute paths -- Hook regex can overfit: `ctx` as binary vs directory name differ; anchor - patterns to command-start positions with `(^|;|&&|\|\|)\s*` -- grep patterns match inside quoted arguments — test with `ctx add learning - "...blocked words..."` to verify no false positives -- Hook scripts can silently lose execute permission; verify with `ls -la - .claude/hooks/*.sh` after edits -- Two-tier output is sufficient: unprefixed (agent context, may or may not - relay) and `IMPORTANT: Relay VERBATIM` (guaranteed relay); don't add new - severity prefixes -- Repeated injection causes agent repetition fatigue; use `--session $PPID - --cooldown 10m` and pair with a readback instruction - ---- - -## [2026-02-22-120001] UserPromptSubmit hook output channels (consolidated) - -**Consolidated from**: 2 entries (2026-02-12) - -- UserPromptSubmit hook stdout is prepended as AI context (not shown to user); - stderr with exit 0 is swallowed entirely -- User-visible output requires `{"systemMessage": "..."}` JSON on stdout - (warning banner) or exit 2 (blocks prompt) -- There is no non-blocking user-visible output channel for this hook type -- Design hooks for their actual audience: AI-facing = plain stdout, user-facing - = systemMessage JSON - ---- - -## [2026-02-22-120002] Linting and static analysis (consolidated) - -**Consolidated from**: 7 entries (2026-01-25 to 2026-02-20) - -- Full pre-commit gate: (1) `CGO_ENABLED=0 go build ./cmd/ctx`, (2) - `golangci-lint run`, (3) `CGO_ENABLED=0 go test` — all three, every time -- Own the codebase: fix pre-existing lint issues even if you didn't introduce - them -- gosec G301/G306: use 0o750 for dirs, 0o600 for files everywhere including - tests -- gosec G304 (file inclusion): safe to suppress with `//nolint:gosec` in test - files using `t.TempDir()` paths -- golangci-lint errcheck: use `cmd.Printf`/`cmd.Println` in Cobra commands - instead of `fmt.Fprintf` -- `defer os.Chdir(x)` fails errcheck; use `defer func() { _ = os.Chdir(x) }()` -- golangci-lint Go version mismatch in CI: use `install-mode: goinstall` to - build linter from source - ---- - -## [2026-02-22-120006] Permission and settings drift (consolidated) - -**Consolidated from**: 4 entries (2026-02-15) - -- Permission drift is distinct from code drift — settings.local.json is - gitignored, no review catches stale entries -- `Skill()` permissions don't support name prefix globs — list each skill - individually -- Wildcard trusted binaries (`Bash(ctx:*)`, `Bash(make:*)`), but keep git - commands granular (never `Bash(git:*)`) -- settings.local.json accumulates session debris; run periodic hygiene via - `/sanitize-permissions` and `/ctx-drift` - ---- - -## [2026-02-22-120008] Gitignore and filesystem hygiene (consolidated) - -**Consolidated from**: 3 entries (2026-02-11 to 2026-02-15) - -- Gitignored directories are invisible to `git status`; stale artifacts persist - indefinitely — periodically `ls` gitignored working directories -- Add editor artifacts (*.swp, *.swo, *~) to .gitignore alongside IDE - directories from day one -- Gitignore entries for sensitive paths are security controls, not documentation - — never remove during cleanup sweeps - ---- - -## [2026-01-28-051426] IDE is already the UI - -**Context**: Considering whether to build custom UI for .context/ files - -**Lesson**: Discovery, search, and editing of .context/ markdown files works -better in VS Code/IDE than any custom UI we'd build. Full-text search, -git integration, extensions - all free. - -**Application**: Don't reinvent the editor. Let users use their preferred IDE. - ---- +**Context**: make test runs packages in parallel processes. Fourteen test files invoked initialize.Cmd().Execute(), which read-modify-writes ~/.claude/settings.json without HOME isolation. +**Lesson**: Under load the races materialized as flaky 'FAIL coverage: [no statements]' in cli/watch/core. Run alone the package passed; under parallel make test it failed intermittently. -*Module-specific, niche, and historical learnings: -[learnings-reference.md](learnings-reference.md)* +**Application**: testctx.Declare now sets HOME alongside CTX_DIR. Centralized fix; future tests automatically isolate user-home writes. diff --git a/.context/TASKS.md b/.context/TASKS.md index 1a112ca92..77fcbbc98 100644 --- a/.context/TASKS.md +++ b/.context/TASKS.md @@ -25,2054 +25,12 @@ TASK STATUS LABELS: `#in-progress`: currently being worked on (add inline, don't move task) --> -### Misc +### Phase 1: [Name] `#priority:high` +- [ ] Task 1 +- [ ] Task 2 -- [ ] If context is not initialized, hooks should not run. Right now they run - and give a "context diretory outside project root" (that's a side effect). But - the issue is the project does not have a .context folder and we don't detect - it. **Progress 2026-04-13**: Boundary side effect resolved by git-anchored walk - (commit e24941d2). `state.Initialized()` guards added to `check_resource` and - `check_backup_age` — the two user-visible relay-nag hooks that were missing - them. **Remaining**: audit other missing-guard hooks (`mark_journal`, - `mark_wrapped_up`, `pause`, `resume`, `bootstrap`) per-hook. Safety hooks - (`block_dangerous_command`, `block_non_path_ctx`) intentionally run regardless. +### Phase 2: [Name] `#priority:medium` +- [ ] Task 1 +- [ ] Task 2 -- [x] Move `ctx bootstrap` back to `ctx system bootstrap` (hidden). Bootstrap is - agent-only plumbing — no human types it interactively. It was incorrectly - promoted to top-level in the namespace cleanup. Move the package back to - `internal/cli/system/cmd/bootstrap/`, restore - `UseSystemBootstrap`/`DescKeySystemBootstrap` constants, re-add `Hidden: - true`, update CLAUDE.md templates and skills back to `ctx system bootstrap`, - remove from `docs/cli/bootstrap.md` and `docs/cli/index.md` Diagnostics group, - remove from `zensical.toml` nav. Spec: specs/cli-namespace-cleanup.md - #priority:high #added:2026-04-11 #done:2026-04-12 - -- [x] Rename `ctx stats` to `ctx usage`. "Stats for what?" — the current name - lost its anchor when promoted from `ctx system stats`. `ctx usage` - communicates intent: "show me my token usage." `ctx session stats` was - considered but rejected as premature — a parent with one child is worse than - a flat command. Revisit when `ctx session` has 2+ children. Spec: - specs/cli-namespace-cleanup.md #priority:medium #added:2026-04-11 - #done:2026-04-12 - -- [x] Rename `ctx resource` to `ctx sysinfo`. Without the `system` prefix, - "resource" sounds like it manages project resources (files, assets, - infrastructure). It's actually a system-health snapshot: memory, swap, disk, - CPU load. `sysinfo` matches the internal package name (`internal/sysinfo`) and - is unambiguous. `health` was considered but rejected — too similar to `ctx - doctor` and `ctx doctor health` reads wrong. Same rename pattern. Spec: - specs/cli-namespace-cleanup.md #priority:medium #added:2026-04-11 - #done:2026-04-12 - -- [x] Remove `ctx dep`. Utility is marginal: agents rarely need a flat - dependency inventory to make decisions, and `go list -m all` / `npm ls` - already cover the use case. Doesn't clear the "would I miss it if it - vanished?" bar. Removed command, all support packages, docs, and recipes. - #priority:low #added:2026-04-11 #done:2026-04-12 - -- [x] Introduce `ctx hook` parent command — consolidate hook-related - user-facing commands under a single namespace: `ctx hook message - list/show/edit/reset` (currently `ctx message`), `ctx hook notify` (currently - `ctx notify`), `ctx hook pause` / `ctx hook resume` (currently top-level `ctx - pause` / `ctx resume`). "What are we pausing?" — hooks. The current - top-level `pause` loses that context. Clarifies the `ctx trigger` vs `ctx - hook` distinction: `trigger` = user-authored scripts, `hook` = plugin-shipped - machinery + its user-facing controls. Future children: `ctx hook status` - (which hooks fired recently), `ctx hook test` (dry-run), `ctx hook event` - (currently `ctx event`). Same rename pattern as previous namespace cleanups. - Spec: specs/cli-namespace-cleanup.md #priority:medium #added:2026-04-11 - #done:2026-04-12 - -### Agents - -- [ ] Add `ctx explore` command — scaffolds `.arch-explorer/` in a workspace - directory with manifest.json, PROMPT.md (from - `hack/agents/architecture-explorer.md`), run-log.md, and a README. Similar to - `ctx init` but for multi-repo architecture exploration. The prompt template - lives in `hack/agents/architecture-explorer.md` and ships embedded. - #priority:low #added:2026-04-13 - -### Runbooks - -- [ ] Create `hack/runbooks/release-checklist.md` — canonical pre-release - sequence: run codebase-audit, docs-semantic-audit, sanitize-permissions, `make - test`, bump version, generate release notes, tag, push. Today this lives in - the operator's head + scattered across docs/operations/. Cross-link with - `_ctx-release` skill. #priority:high #added:2026-04-11 - -- [ ] Create `hack/runbooks/breaking-migration.md` — template for users - upgrading across breaking CLI renames. What commands changed, how to - regenerate CLAUDE.md (`ctx init --force`), how to update personal scripts and - hook configs. One instance per breaking release, or a generic template with a - per-release appendix. #priority:medium #added:2026-04-11 - -- [ ] Create `hack/runbooks/hub-deployment.md` — linear runbook for setting up - a ctx Hub for a team: generate admin token, distribute, register clients, - verify sync, configure TLS (when H-01/H-02 land). Consolidates pieces - currently scattered across hub recipes. #priority:medium #added:2026-04-11 - -- [ ] Create `hack/runbooks/new-contributor.md` — onboarding sequence: clone - → `ctx init` → `make build && sudo make install` → verify hooks (`claude - mcp list`) → run first session → verify context persistence. Currently - scattered across README, contributing.md, and setup docs. #priority:medium - #added:2026-04-11 - -- [ ] Create `hack/runbooks/plugin-release.md` — plugin-specific release - procedure: update hooks.json, bump version, test against fresh Claude Code - install, publish to marketplace, verify `claude mcp list` shows updated - version. Not covered by the general release checklist. #priority:low - #added:2026-04-11 - -### Misc - -- [ ] Human: Do a documentation audit for AI-generated artifacts. #important - #not-urgent - -- [ ] Human: test `ctx init` on a fresh ubuntu install. - -- [ ] Improve hub failover client: distinguish auth errors - (Unauthenticated/PermissionDenied) from connection errors. Fail fast on auth - failures instead of cycling through all peers with the same invalid token. - #priority:low #added:2026-04-08-194612 - -- [ ] Add file locking to ctx connect sync state to prevent concurrent sync - races. Two sync processes (hook + manual) can both load the same LastSequence, - process the same entries, and write duplicate content to .context/shared/. - #priority:medium #added:2026-04-08-194557 - -- [ ] Fix fanout broadcast entry loss: non-blocking send drops entries to slow - listeners silently. Log when entries are dropped. Consider per-listener - backpressure or disconnect-on-lag. Buffer of 64 is too small for busy hubs. - #priority:medium #added:2026-04-08-194542 - -- [ ] Prevent duplicate client registration in hub store: RegisterClient should - reject if ProjectName already exists. Add token revocation support (delete - client by ID/project). Currently tokens are valid forever with no way to - disable compromised ones. #priority:medium #added:2026-04-08-194529 - -- [ ] Fix hub cluster: NewCluster result is discarded (not stored on Server), so - Raft runs but leadership status is never queryable. Store cluster reference on - Server, wire IsLeader/LeaderAddr into Status RPC and hub status command. - #priority:medium #added:2026-04-08-194511 - -- [ ] Use crypto/subtle.ConstantTimeCompare for hub token validation instead of - string equality. Current Store.ValidateToken uses == which is vulnerable to - timing attacks. Also replace O(n) linear scan with a map[string]*ClientInfo - for O(1) lookup. #priority:high #added:2026-04-08-194458 - -- [ ] Fix silent error suppression in hub: (1) ctx add --share silently ignores - publish failures — warn user on failure, (2) hubsync hook swallows all - errors — log to event system, (3) replication loop drops errors silently — - add structured logging for debug. #priority:high #added:2026-04-08-194443 - -- [ ] Add input validation to hub Publish handler: reject empty ID, validate - Type against allowed set (decision/learning/convention/task), enforce Content - length limit (1MB), require non-empty Origin. Prevents garbage data and DoS - via unbounded content. #priority:high #added:2026-04-08-194430 - -- [ ] Fix ctx connect listen: currently only does initial sync then blocks on - ctx.Done() without ever calling the Listen RPC. Must stream entries in - real-time via the server-streaming Listen RPC, writing to .context/shared/ as - entries arrive. #priority:high #added:2026-04-08-194415 - -- [x] Remove any superpowers library references and implement all needed - workflow mechanisms (brainstorm, plan, execute, review, subagent dispatch) - natively in ctx. No external plugin libraries should be used — ctx must be - self-contained. Clean up docs/superpowers/ directory and any remaining - references. #priority:high #added:2026-04-06-121002 #done:2026-04-06 - -- [ ] SMB mount path support: add `CTX_BACKUP_MOUNT_PATH` env var so - `ctx backup` can use fstab/systemd automounts instead of requiring GVFS. - Spec: specs/smb-mount-path-support.md #priority:medium - #added:2026-04-04-010000 - -### Architecture Docs - -- [ ] Publish architecture docs to docs/: copy ARCHITECTURE.md, - DETAILED_DESIGN domain files, and CHEAT-SHEETS.md to docs/reference/. - Sanitize intervention points into docs/contributing/. - Exclude DANGER-ZONES.md and ARCHITECTURE-PRINCIPAL.md (internal only). - Spec: specs/publish-architecture-docs.md #priority:medium - #added:2026-04-03-150000 - -- [ ] Update ctx-architecture skill to append discovered terms to GLOSSARY.md - during Phase 3. Additive only, max 10 terms per run, project-specific only, - alphabetical insertion, skip if GLOSSARY.md empty. Print added terms in - convergence report. Spec: specs/publish-architecture-docs.md #priority:low - #added:2026-04-03-153000 - -### Code Cleanup Findings - - -- [x] Extend flagbind helpers (IntFlag, DurationFlag, DurationFlagP, StringP, - BoolP) and migrate ~50 call sites to unblock TestNoFlagBindOutsideFlagbind - #added:2026-04-01-233250 - -- [ ] Implement journal compaction: Elastic-style tiered storage with tar.gz - backup. Spec: specs/journal-compact.md #added:2026-03-31-110005 - -- [x] Refactor 28 grandfathered cmd/ purity violations found by - TestCmdDirPurity: move unexported helpers, exported non-Cmd/Run functions, - and types from cmd/ directories to core/. See grandfathered map in - compliance_test.go for the full list. #priority:medium - #added:2026-03-31-005115 - - -- [x] PD.4.5: Update AGENT_PLAYBOOK.md — add generic "check available skills" - instruction #priority:medium #added:2026-03-25-203340 - -**PD.5 — Validate:** - - -### Phase -3: DevEx - -- [x] Plugin enablement gap: Ref: - `ideas/plugin-enablement-gap.md`. Local-installed plugins get - registered in `installed_plugins.json` but not auto-added to - `enabledPlugins`, so slash commands are invisible in non-ctx - projects. - -- [x] Add cobra Example fields to CLI commands via - examples.yaml #added:2026-03-20-163413 - -- [x] Add CLI YAML drift detection test: verify flag names in - examples.yaml match actual registered flags, and Use: patterns - in commands.yaml match Use constants. Structural linkage is - already tested; this covers content-level drift. Semantic - accuracy (does the description match behavior?) needs periodic - LLM audit — not automatable. #priority:medium #added:2026-04-05 - -- [-] Create ctx-docstrings skill: audit and fix docstrings - against CONVENTIONS.md Documentation section. Superseded by - TestDocCommentStructure compliance test (68 grandfathered). - #added:2026-03-20-163413 - #added:2026-03-16-114445 - -### Phase -2: Task completion nudge: - -- [x] Move 6 grandfathered cross-package MCP types to entity/ #session:cc97cb0d - #branch:main #commit:e8d5c60a #added:2026-04-08-074620 - -- [ ] Design UserPromptSubmit hook that runs `make audit` at - session start and surfaces failures as a consolidation-debt - warning before the agent acts on stale assumptions. - Project-level hook (not bundled in ctx), configurable via - .ctxrc or settings.json. Related: consolidation nudge hook - spec. #added:2026-03-23-223500 - -- [ ] Design UserPromptSubmit hook that runs go build and - surfaces compilation errors before the agent acts on stale - assumptions #added:2026-03-23-120136 - -- [ ] Architecture Mapping (Enrichment): - **Context**: Skill that incrementally builds and maintains - ARCHITECTURE.md and DETAILED_DESIGN.md. Coverage tracked in - map-tracking.json. Spec: `specs/ctx-architecture.md` - - [x] Create ctx-architecture-enrich skill: takes existing - /ctx-architecture principal-mode artifacts as baseline, runs - comprehensive enrichment pass via GitNexus MCP (blast radius - verification, registration site discovery, execution flow - tracing, domain clustering comparison, shallow module - deep-dive). Spec: `ideas/spec-architecture-enrich.md`. - Reference implementation: kubernetes-service enrichment pass - 2026-03-25. #added:2026-03-25-120000 - -- [ ]: ctx-architecture-failure-analysis - **Context**: Adversarial analysis skill that identifies where - a codebase will silently betray you. Requires - `ctx-architecture` artifacts as input (ARCHITECTURE.md, - DETAILED_DESIGN*.md, map-tracking.json). Does its own - targeted deep reads focusing on mutation points, shared - mutable state, error swallowing, concurrency, implicit - ordering, missing enforcement, and scaling cliffs. Uses - available tooling (GitNexus, Gemini Search) to - cross-reference patterns. - - Produces `DANGER-ZONES.md` — a ranked inventory of silent - failure points with: location, failure mode, blast radius, - detection gap, and suggested fix. Two tiers: "most likely to - cause production incidents" and "less likely but equally - dangerous." - - Distinct from a security threat model (which would be - `ctx-threat-model` — a separate skill for auth bypass, - injection, privilege escalation, supply chain). This skill - focuses on correctness: race conditions, ordering - assumptions, cache staleness, fan-out amplification, - non-atomic ownership, inverted logic, force-delete orphans, - global state mutation. - - - [x] Design SKILL.md for ctx-architecture-failure-analysis: - inputs (architecture artifacts), analysis phases, output - format (DANGER-ZONES.md), quality checklist - #added:2026-03-25-060000 - - [x] Define the adversarial analysis framework: categories - of silent failure (concurrency, ordering, cache, - amplification, ownership, error swallowing, global state) - with heuristics for each #added:2026-03-25-060000 - - [x] Implement skill with GitNexus integration: use impact - analysis for blast radius estimation, use context for - shared-state detection #added:2026-03-25-060000 - - [x] Add Gemini Search integration: cross-reference - discovered patterns against known failure modes in similar - systems. #added:2026-03-25-060000 - -- [-] ctx-architecture-extend - Skipped: extension point analysis is covered by /ctx-architecture - DETAILED_DESIGN (per-module) and /ctx-architecture-enrich - (registration sites). A fourth skill fragments the pipeline - without enough distinct value. Three is the right number: - map, enrich, hunt. - **Context**: Companion to `ctx-architecture` and - `ctx-failure-analysis`, completing a trilogy: how does it - work → where will it break → where does it grow. Reads - architecture artifacts → identifies registration patterns - (interfaces, factory functions, plugin systems, ordered - slices, scheme registrations) → traces recent additions via - git log to confirm which extension points are actually used - → produces `EXTENSION-POINTS.md` ranked by frequency, with - exact file locations, function signatures, and the typical - feature pattern (e.g., "most features require a variable + - a mutator + a machine-agent task"). - - Valuable for onboarding ("I need to add feature X, where do - I start?") and architecture review ("are we adding features - in the right places?"). - - - [-] Design SKILL.md for ctx-extension-map - Skipped: parent task skipped. - #added:2026-03-25-062000 - - [-] Define extension point detection heuristics - Skipped: parent task skipped. - #added:2026-03-25-062000 - - [-] Add git log frequency analysis - Skipped: parent task skipped. - #added:2026-03-25-062000 - - [-] Integrate with GitNexus for registration sites - Skipped: parent task skipped. - #added:2026-03-25-062000 - -### Phase CT: Companion Tool Integration - -Session-start checks, suppressibility, and registry for companion MCP tools. - -- [ ] ctx-remember preflight: verify ctx binary in PATH, - plugin installed and enabled, binary version matches plugin - version #priority:medium #added:2026-03-25-234514 - -- [ ] Design suppressible companion check system: .ctxrc - configures which companion tools to check (one search MCP, - one graph MCP), smoke tests only run for configured tools, - not auto-discovered. Keeps bootstrap fast and predictable. - #priority:medium #added:2026-03-25-234516 - -- [ ] Add per-tool suppression for ctx-remember checks: allow - suppressing individual preflight checks (ctx binary, plugin, - search MCP, graph MCP) via .ctxrc fields, not just - companion_check: false blanket toggle - #priority:low #added:2026-03-25-234518 - -### Phase CLI-FIX: CLI Infrastructure Fixes - -- [x] Bug: ctx add task appends to the last Phase section instead of a dedicated - location. Tasks added via CLI land inside whatever Phase happens to be last in - TASKS.md, breaking Phase structure. Fix: add mandatory --section flag to ctx - add - task. If the named section does not exist, create it. If --section is - omitted, error with message. Heading level fixed from ## to ### to match - TASKS.md structure. - #priority:high #added:2026-03-25-234813 #done:2026-04-06 - -### Phase BLOG: Blog Posts - -- [ ] Write blog post about architecture analysis + enrichment two-pass design - after dogfooding run on ctx itself. Cover: the 5.2x depth observation, - constraint-as-feature principle, watermelon-rind anti-pattern, and results - from the ctx self-analysis. #priority:medium #added:2026-03-25-233650 - -- [ ] Blog post: "Writing a CONSTITUTION for your AI agent" — showcase ctx's - CONSTITUTION.md as a pattern for hard invariants that agents cannot violate. - Cover: why advisory rules fail (agents game qualifiers), what belongs in a - constitution vs conventions, the spec-at-commit enforcement story from this - session, examples of good rules (absolute, binary, no interpretation needed). - Include a recipe for writing your own. - #priority:medium #added:2026-03-27-115500 - -- [ ] Recipe: "How to write a good CONSTITUTION.md" — practical guide with - categories (security, quality, process, structure), anti-patterns (vague - qualifiers, unenforced rules), enforcement mechanisms (hooks, commit gates), - and a starter template. #priority:medium #added:2026-03-27-115500 - -- [ ] Import grouping compliance test: parse all .go files, verify imports - follow stdlib — external — ctx three-group ordering. Add to - internal/compliance/. Catches violations that goimports misses (it merges - external and ctx into one group). #priority:medium #added:2026-03-27-120000 - -- [ ] drift check should notify if claude permissions have insecure stuff in it. - -- [ ] task: sync workspace to ARI_INBOX - -### Phase -1: Hack Script Absorption - -Absorb remaining `hack/` scripts into Go subcommands. Eliminates shell -dependencies, improves portability, and makes the skill layer call `ctx` -directly instead of `make` targets. - -### Phase 0.9: Suppress Nudges After Wrap-Up - -Spec: `specs/suppress-nudges-after-wrap-up.md`. Read the spec before starting -any P0.9 task. - -**Phase 3 — Skill integration:** - -- [-] P0.9.2: Split cli-reference.md — moved to Future - #added:2026-02-24-204208 - -- [-] P0.9.3: Investigate proactive content suggestions — moved to Future - #added:2026-02-24-185754 - -### Phase 0.8: RSS/Atom Feed Generation (`ctx site feed`) - -Spec: `specs/rss-feed.md`. Read the spec before starting any P0.8 task. - -### Phase 0.4: Hook Message Templates - -Spec: `specs/future-complete/hook-message-templates.md`. Read the spec before -starting any P0.4 task. - -**Phase 2 — Discoverability + documentation:** - -Spec: `specs/future-complete/hook-message-customization.md`. - -- [ ] Migrate hook message templates from .txt files to YAML - localization #added:2026-03-20-163801 - -### Phase 0.4.9: Injection Oversize Nudge - -Spec: `specs/injection-oversize-nudge.md`. Read the spec before starting -any P0.4.9 task. - -### Phase 0.4.10: Context Window Token Usage - -Spec: `specs/context-window-usage.md`. Read the spec before starting any -P0.4.10 task. - -### Phase 0.5 Cleanup - -* Human: internal/recall/parser requires a serious refactoring; for example - the parser object and its private and public methods need to go to its own - package and other helper functions need to go to a different adjacent package. -* Human: internal/notify/notify.go requires refactoring (all functions bagged in - one file; types need to go to types.go per convention etc etc) -* Human: split err package into sub packages. - - -- [ ] Refactor site/cmd/feed: extract helpers and types to core/, make Run - public #added:2026-03-21-074859 - -- [ ] Add Use* constants for all cobra subcommand Use - strings #added:2026-03-20-184639 - -- [ ] Systematic audit: extract all magic flag name strings across CLI commands - into config/flag constants #added:2026-03-20-175155 - -- [-] Move generic string helpers from cli/add/core/strings.go to - internal/format — file no longer exists, helpers already moved or deleted - #added:2026-03-20-175046 - -- [ ] Add missing flag name constants (priority, section, file) and priority - level constants (high, medium, low) to config/flag #added:2026-03-20-170842 - -### Phase 0: Ideas - -**User-Facing Documentation** (from `ideas/done/REPORT-7-documentation.md`): -Docs are feature-organized, not problem-organized. Key structural improvements: - -**Agent Team Strategies** (from `ideas/REPORT-8-agent-teams.md`): -8 team compositions proposed. Reference material, not tasks. Key takeaways: - -- [ ] Scan all config/**/* constants and catalog which ones should be ctxrc - entries for user configurability #priority:medium #added:2026-03-22-095552 - -- [ ] Update user-facing documentation for changed CLI flag - shorthands #added:2026-03-21-102755 - -- [ ] Add Unicode-aware slugification for non-ASCII - content #added:2026-03-21-070953 - -- [ ] Make TitleSlugMaxLen configurable via .ctxrc #added:2026-03-21-070944 - -- [ ] Spec and implement CRLF-to-LF newline normalization for journal and - context files #added:2026-03-20-224845 - -- [ ] Test ctx on Windows — validate build, init, agent, drift, journal - pipeline #added:2026-03-20-224835 - -- [ ] Evaluate Windows support for sysinfo.Collect and path - handling #added:2026-03-20-194930 - -- [ ] Make doctor thresholds configurable via .ctxrc #added:2026-03-20-194923 - -- [ ] Evaluate cross-platform path handling in change/core/scan.go — git - always - uses "/" but UniqueTopDirs should consider filepath.ToSlash for Windows - robustness #added:2026-03-20-182103 - -- [ ] Replace English-only Pluralize helper in change/core/detect.go with - i18n-safe approach #added:2026-03-20-180502 - -- [ ] Replace ASCII-only alnum check in agent/core/score.go with - unicode.IsLetter/IsDigit #added:2026-03-20-175943 - -### Phase S-0: Memory Bridge Groundwork - -Prerequisites that unblocked the memory bridge phases. - - -### Phase MB: Memory Bridge Foundation (`ctx memory`) - -Spec: `specs/memory-bridge.md`. Read the spec before starting any MB task. - -Bridge Claude Code's auto memory (MEMORY.md) into `.context/` with discovery, -mirroring, and drift detection. Foundation for future import/publish phases. - -### Phase MI: Memory Import Pipeline (`ctx memory import`) - -Spec: `specs/memory-import.md`. Read the spec before starting any MI task. - -Import entries from Claude Code's MEMORY.md into structured `.context/` files -using heuristic classification. Builds on Phase MB foundation (discover, -mirror, state). - -- [-] MI.future: `--interactive` mode for agent-assisted classification — - skipped: `--dry-run` covers review; agents can use `ctx add` directly for - overrides; interactive CLI prompts don't compose with agent workflows - -### Phase S-3: Blog Post — "Agent Memory is Infrastructure" - -Spec: `specs/blog-agent-memory-infrastructure.md`. - - -### Phase MP: Memory Publish (`ctx memory publish`) - -Spec: `specs/memory-publish.md`. Read the spec before starting any MP task. - -Push curated context from `.context/` into Claude Code's MEMORY.md so the agent -sees structured project context on session start without needing hooks. - -### Phase 9: Context Consolidation Skill `#priority:medium` - -**Context**: `/ctx-consolidate` skill that groups overlapping entries by keyword -similarity and merges them with user approval. Originals archived, not deleted. -Spec: `specs/context-consolidation.md` -Ref: https://github.com/ActiveMemory/ctx/issues/19 (Phase 3) - -- [ ] Implement consolidation nudge hook: count sessions since last - consolidation, nudge after 6. Spec: - `specs/consolidation-nudge-hook.md` #added:2026-03-23-223000 - -- [ ] Auto-record consolidation baseline commit: `/ctx-consolidate` and `ctx - system mark-consolidation` should stamp HEAD hash + date into - `.context/state/consolidation.json` only on first invocation (write-once until - reset). Subsequent consolidation sessions preserve the original baseline. The - baseline resets only when the consolidation nudge counter resets (i.e., when a - new feature cycle begins). This way multi-pass consolidation keeps the true - starting point. Related: - `specs/consolidation-nudge-hook.md` #added:2026-03-23-224000 - -### Phase EM: Extension Map Skill (`/ctx-extension-map`) - -question: is this done; or needs planning? - -### Phase WC: Write Consolidation - -Baseline commit: `4ec5999` (Auto-prune state directory on session start). -Goal: consolidate user-facing messages into `internal/write/` as the central -output package. All CLI commands should route printed output through -this package. - -- [ ] Migrate moc.go hardcoded strings to YAML or Go - templates #added:2026-03-20-214922 - -- [ ] Design terminal-aware truncation for CLI output #added:2026-03-20-184509 - -### Phase SP: Configurable Session Prefixes - -Spec: `specs/session-prefixes.md`. Read the spec before starting any SP task. - -Replace hardcoded `session_prefix` / `session_prefix_alt` pair with a -user-extensible `session_prefixes` list in `.ctxrc`. Parser vocabulary -is not i18n text — it belongs in runtime config. - -### Phase EH: Error Handling Audit - -Systematic audit of silently discarded errors across the codebase. -Many call sites use `_ =` or `_, _ =` to discard errors without -any feedback. Some are legitimate (best-effort cleanup), most are -lazy escapes that hide failures. - - -- [ ] EH.1: Catalogue all silent error discards — recursive walk of - `internal/` - for patterns: `_ = `, `_, _ = `, `//nolint:errcheck`, bare `return` after - error-producing calls. Group by category: - (a) file close in defer — often legitimate but should log on failure - (b) file write/read — data loss risk, must surface - (c) os.Remove/Rename — state corruption risk - (d) fmt.Fprint to stderr — truly best-effort, acceptable - Commands: `grep -rn '_ =' internal/`, `grep -rn - 'nolint:errcheck' internal/` - Output: spreadsheet in `.context/` with file, line, expression, category, - and recommended action (log-stderr, return-error, acceptable-as-is). - DoD: every `_ =` in the codebase is categorised and has a - recommended action - #priority:high #added:2026-03-14 - -- [ ] EH.2: Address category (b) — file write/read discards. These risk silent - data loss. Fix: return the error, or at minimum emit to stderr with - `fmt.Fprintf(os.Stderr, "ctx: ...: %v\n", err)` following the pattern - established in `internal/log/event.go`. - DoD: no write/read error is silently discarded - #priority:high #added:2026-03-14 - -- [ ] EH.3: Address category (a) — file close in defer. Most are `defer func() - { _ = f.Close() }()`. For read-only files, close errors are rare but - should still surface. For write/append files, close can fail if the - final flush fails — these are data loss. Fix: `if err := f.Close(); - err != nil { fmt.Fprintf(os.Stderr, "ctx: close %s: %v\n", path, err) }`. - DoD: all defer-close sites log failures to stderr - #priority:medium #added:2026-03-14 - -- [ ] EH.4: Address category (c) — os.Remove/Rename discards. These are state - operations (rotation, pruning, temp file cleanup). Silent failure leaves - stale state. Fix: stderr warning at minimum; for rotation/rename, consider - returning the error. - DoD: no Remove/Rename error is silently discarded - #priority:medium #added:2026-03-14 - -- [ ] EH.5: Validate — `grep -rn '_ =' internal/` returns only category (d) - entries (fmt.Fprint to stderr) and entries explicitly annotated as - acceptable. Run `make lint && make test` to confirm no regressions. - DoD: grep output is clean or fully annotated; CI green - #priority:high #added:2026-03-14 - -- [ ] Add AST-based lint test to detect exported functions with no external - callers #added:2026-03-21-070357 - -- [ ] Audit exported functions used only within their own package and make them - private #added:2026-03-21-070346 - -- [ ] Audit and remove side-effect output from error-returning - functions #added:2026-03-20-212212 - -### Phase ET: Error Package Taxonomy (`internal/err/`) - -`errors.go` is 1995 lines with 188 functions in a single file. Split into -domain-grouped files. No API changes — same package, same function signatures, -just file reorganization. - -Taxonomy (from prefix analysis): - -| File | Prefixes / Domain | ~Count | -|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------| -| `memory.go` | Memory*, Discover* | 17 | -| `parser.go` | Parser* | 7 | -| `crypto.go` | Crypto*, Encrypt*, Decrypt*, GenerateKey, SaveKey, LoadKey, NoKeyAt | 14 | -| `task.go` | Task*, NoTaskSpecified, NoTaskMatch, NoCompletedTasks | 8 | -| `journal.go` | LoadJournalState*, SaveJournalState*, ReadJournalDir, NoJournalDir, NoJournalEntries, ScanJournal, UnknownStage, StageNotSet | 10 | -| `session.go` | Session*, FindSessions, NoSessionsFound, All*, Ambiguous* | 8 | -| `pad.go` | Edit*, Blob*, ReadScratchpad, OutFlagRequiresBlob, NoConflict*, Resolve* | 10 | -| `recall.go` | Reindex*, Stats*, EventLog* | 6 | -| `fs.go` | Read*, Write*, Open*, Stat*, File*, Mkdir*, CreateDir, DirNotFound, NotDirectory, Boundary* | 30 | -| `backup.go` | Backup*, CreateBackup*, CreateArchive* | 6 | -| `prompt.go` | Prompt*, NoPromptTemplate, ListTemplates, ReadTemplate, NoTemplate | 7 | -| `hook.go` | Embedded*, Override*, UnknownHook, UnknownVariant, MarkerNotFound | 6 | -| `skill.go` | Skill* | 2 | -| `config.go` | UnknownProfile, ReadProfile, UnknownFormat, UnknownProjectType, InvalidTool, UnsupportedTool, NotInitialized, ContextNotInitialized, ContextDirNotFound, FlagRequires* | 12 | -| `errors.go` | Remaining general-purpose: WorkingDirectory, CtxNotInPath, ReadInput, InvalidDate*, Reminder*, Drift*, Git*, Webhook*, etc. | ~25 | - -- [ ] Add freshness_files to .ctxrc defaults seeded by ctx init — currently - the - freshness config is only in the gitignored .ctxrc, so new clones don't get it. - Consider a .ctxrc.defaults pattern or seeding via ctx init template. - #priority:medium #added:2026-03-14-105143 - -- [ ] SEC.1: Security-sensitive file change hook — PostToolUse on Edit/Write - matching security-critical paths (.claude/settings.local.json, - .claude/settings.json, CLAUDE.md, .claude/CLAUDE.md, - .context/CONSTITUTION.md). Three actions: (1) nudge user in-session, (2) relay - to webhook for out-of-band alerting (autonomous loops), (3) append to - dedicated security log (.context/state/security-events.jsonl) for forensics. - Separate from general event log. Spec needed. #priority:high #added:2026-03-13 - -- [ ] O.5: Session timeline view — add --sessions flag to ctx system events. - Per-session breakdown of eval/fired counts with hook list. See - ideas/spec-hook-observability.md Phase 5 #added:2026-03-12-145401 - -- [ ] O.4: Doctor hook health check — surface hook activity in ctx doctor - output - (active/evaluated-never-fired/never-evaluated). See - ideas/spec-hook-observability.md Phase 4 #added:2026-03-12-145401 - -- [ ] O.3: Skip reason logging — add eventlog.Skip() with standard reason - constants (paused, throttled, condition-not-met). Instrument 19 hook - early-exit paths. See ideas/spec-hook-observability.md Phase - 3 #added:2026-03-12-145401 - -- [ ] O.2: Event summary view — add --summary flag to ctx system events. - Aggregates eval/fired counts per hook, shows last-eval/last-fired timestamps, - lists never-evaluated hooks. See ideas/spec-hook-observability.md Phase - 2 #added:2026-03-12-145401 - -- [ ] O.1: Hook eval logging — wrap hook cobra commands to log 'eval' events - on - every invocation. Refactor Run() signatures from os.Stdin to io.Reader - (peek+replay pattern). Adds eventlog.Eval(), EventTypeEval constant. See - ideas/spec-hook-observability.md Phase 1 #added:2026-03-12-145401 - -- [ ] Companion intelligence recommendation: implement spec from - ideas/spec-companion-intelligence.md — ctx doctor companion detection, ctx - init recommendation tip, ctx agent awareness in - packets #added:2026-03-12-133008 - -- [ ] Add configurable assets layer: allow users to plug their own YAML files - for localization (language selection, custom text overrides). Currently all - user-facing text is hardcoded in commands.yaml; need a mechanism to load - user-provided YAML that overlays or replaces built-in text. This enables i18n - without forking. #priority:low #added:2026-03-07-233756 - -- [-] Cleanup internal/cli/system/core/persistence.go: move 10 (base for - ParseInt) to config constant — not actionable, 10 is stdlib decimal base - convention, not a magic number #priority:low #added:2026-03-07-220825 - -- [-] Cleanup internal/cli/system/core/session_tokens.go: move SessionStats from - state.go to types.go — file and type no longer exist, refactored away - #priority:low #added:2026-03-07-220825 - - - - -- [ ] SMB mount path support: add `CTX_BACKUP_MOUNT_PATH` env var so - `ctx backup` can use fstab/systemd automounts instead of requiring GVFS. - Spec: specs/smb-mount-path-support.md #priority:medium - #added:2026-04-04-010000 - -- [ ] Make AutoPruneStaleDays configurable via ctxrc. Currently hardcoded to 7 - days in config.AutoPruneStaleDays; add a ctxrc key (e.g., auto_prune_days) and - fallback to the default. #priority:low #added:2026-03-07-220512 - -- [ ] Refactor check_backup_age/run.go: move consts (lines 23-24) to config, - magic directories (line 59) to config, symbolic constants for strings (line - 72), messages to assets (lines 79, 90-91), extract non-Run functions to - system/core, fix docstrings #priority:medium #added:2026-03-07-180020 - -- [ ] Add ctxrc support for recall.list.limit to make the default --limit for - recall list configurable. Currently hardcoded as config.DefaultRecallListLimit - (20). #priority:low #added:2026-03-07-164342 - -- [ ] Extract journal/core into a standalone journal parser package — - functionally isolated enough for its own package rather than remaining as - core/ #added:2026-03-07-093815 - -- [ ] Move PluginInstalled/PluginEnabledGlobally/PluginEnabledLocally from - initialize to internal/claude — these are Claude Code plugin detection - functions, not init-specific #added:2026-03-07-091656 - -- [ ] Move guide/cmd/root/run.go text to assets, listCommands to separate file + - internal/write #added:2026-03-07-090322 - -- [ ] Move drift/core/sanitize.go strings to assets #added:2026-03-07-090322 - -- [ ] Move drift/core/out.go output functions to internal/write per - convention #added:2026-03-07-090322 - -- [ ] Move drift/core/fix.go fmt.Sprintf strings to assets — user-facing - output - text for i18n #added:2026-03-07-090322 - -- [ ] Move drift/cmd/root/run.go cmd.Print* output strings to internal/write per - convention #added:2026-03-07-084152 - -- [ ] Extract doctor/core/checks.go strings — 105 inline Name/Category/Message - values to assets (i18n) and config (Name/Category - constants) #added:2026-03-07-083428 - -- [ ] Split deps/core builders into per-ecosystem packages — go.go, node.go, - python.go, rust.go are specific enough for their own packages under deps/core/ - or deps/builders/ #added:2026-03-07-082827 - -- [ ] Audit git graceful degradation — verify all exec.Command(git) call sites - degrade gracefully when git is absent, per project guide - recommendation #added:2026-03-07-081625 - -- [ ] Fix 19 doc.go quality issues: system (13 missing subcmds), agent (phantom - refs), load/loop (header typo), claude (stale migration note), 13 minimal - descriptions (pause, resume, task, notify, decision, learnings, remind, - context, eventlog, index, rc, recall/parser, - task/core) #added:2026-03-07-075741 - -- [ ] Move cmd.Print* output strings in compact/cmd/root/run.go to - internal/write per convention #added:2026-03-07-074737 - -- [ ] Extract changes format.go rendering templates to assets — headings, - labels, and format strings are user-facing text for - i18n #added:2026-03-07-074719 - -- [ ] Lift HumanAgo and Pluralize to a common package — reusable time - formatting, used by changes and potentially - status/recall #added:2026-03-07-074649 - -- [ ] Extract isAlnum predicate for localization — currently ASCII-only in - agent - keyword extraction (score.go:141) #added:2026-03-07-073900 - -- [ ] Make stopwords configurable via .ctxrc — currently embedded in assets, - domain users need custom terms #added:2026-03-07-073900 - -- [ ] Make recency scoring thresholds and relevance match cap configurable via - .ctxrc — currently hardcoded in config (7/30/90 days, cap - 3) #added:2026-03-07-073900 - -- [ ] Make DefaultAgentCooldown configurable via .ctxrc — currently hardcoded - at - 10 minutes in config #added:2026-03-07-073106 - -- [ ] Make TaskBudgetPct and ConventionBudgetPct configurable via .ctxrc — - currently hardcoded at 0.40 and 0.20 in config #added:2026-03-07-072714 - -- [ ] Localization inventory: audit config constants, write package templates, - and assets YAML for i18n mapping — low priority, most users are - English-first - developers #added:2026-03-06-192419 - -- [ ] Consider indexing tasks and conventions in TASKS.md and CONVENTIONS.md - (currently only decisions and learnings have index - tables) #added:2026-03-06-190225 - -- [ ] Implement journal compaction: Elastic-style tiered storage with tar.gz - backup. Spec: specs/journal-compact.md #added:2026-03-31-110005 - -- [ ] Validate .ctxrc against ctxrc.schema.json at load time — schema is - embedded but never enforced, doctor does field-level checks without using - it #added:2026-03-06-174851 - - -- [ ] Add PostToolUse session event capture. Append lightweight event records - (tool name, files touched, timestamp) to .context/state/session-events.jsonl - on significant PostToolUse events (file edits, git operations, errors). Not - SQLite — just JSONL append. This feeds the PreCompact snapshot hook with - richer input so it can report what the agent was actively working on, not just - static file state. #added:2026-03-06-185126 - -- [ ] Add next-step hints to ctx agent and ctx status output. Append actionable - suggestions based on context health (e.g. stale tasks, high completion ratio, - drift findings). Pattern learned from GitNexus self-guiding agent - workflows. #added:2026-03-06-184829 - -- [ ] Implement PreCompact and SessionStart hooks for session continuity across - compaction. Wire ctx agent --budget 4000 to both events: PreCompact outputs - context packet before compaction so compactor preserves key info; SessionStart - re-injects context packet so fresh/post-compact sessions start oriented. Two - thin ctx system subcommands, two entries in hooks.json. See - ideas/gitnexus-contextmode-analysis.md for design - rationale. #added:2026-03-06-184825 - -- [ ] Audit fatih/color removal across ~35 files — removed from recall/run.go, - recall/lock.go, write/validate.go; ~30 files remain. Separate consolidation - pass. #added:2026-03-06-050140 - -- [ ] Audit remaining 2006-01-02 usages across codebase — 5+ files still use - the - literal instead of config.DateFormat. Incremental - migration. #added:2026-03-06-050140 - -- [ ] WC.2: Audit CLI packages for direct fmt.Print/Println usage — candidates - for migration #added:2026-03-06 - -### Phase WC2: Write Output Block Consolidation - -Spec: `specs/write-output-consolidation.md`. Read the spec before starting any -WC2 task. - -Consolidate multi-line imperative `cmd.Println` sequences in `internal/write/` -into pre-computed single-print block patterns. Separates conditional logic from -I/O and replaces 4-8 individual Tpl\* constants per function with one -block template. - -- [ ] WC2.1: Tier 1 — Consolidate multi-line functions with no conditionals: - `InfoInitNextSteps`, `InfoObsidianGenerated`, `InfoJournalSiteGenerated`, - `InfoDepsNoProject`, `ArchiveDryRun`, `ImportScanHeader`. Add `TplXxxBlock` - YAML entries, wire through embed.go + config.go, remove replaced individual - constants. #added:2026-03-17 -- [ ] WC2.2: Tier 2a — Consolidate conditional functions in info.go: - `InfoLoopGenerated` (pre-compute iterLine). Prove the pre-computation pattern - on the function that motivated this spec. #added:2026-03-17 -- [ ] WC2.3: Tier 2b — Consolidate conditional functions in - sync/recall/notify: - `SyncResult`, `CtxSyncHeader`, `CtxSyncAction`, `SessionMetadata`, - `TestResult`, `SyncDryRun`, `PruneSummary`. Each needs 1-3 pre-computed - strings before the single print call. #added:2026-03-17 -- [ ] WC2.4: Constant cleanup — verify all replaced individual `TplXxx*` - config - vars, `TextDescKey*` constants, and YAML entries are removed. Run `make lint` - and `go test ./internal/write/...` to confirm no - regressions. #added:2026-03-17 -- [ ] WC2.5: Update CONVENTIONS.md — add a "Write Package Output" subsection - documenting the pre-compute-then-print pattern for future functions with 4+ - Printlns and conditionals. #added:2026-03-17 - -## MCP-related - -### Phase MCP-V3: MCP v0.3 Expansion - -- [ ] Add drift check: MCP prompt coverage vs bundled skills — programmatic - check comparing config/mcp/prompt constants against assets.ListSkills() to - detect skills without MCP prompt equivalents. Pair with the tool coverage - drift check. @CoderMungan #priority:medium #added:2026-03-15-120519 - -- [ ] MCP v0.3: expand MCP prompts to cover more skills — current 5 prompts - (session-start, add-decision, add-learning, reflect, checkpoint) are a subset - of ~30 bundled skills. Evaluate which skills benefit from protocol-native MCP - prompt equivalents. Decision 2026-03-06 established 'Skills stay CLI-based; - MCP Prompts are the protocol equivalent.' @CoderMungan - #priority:medium #added:2026-03-15-120519 - -- [ ] Add drift check: MCP tool coverage vs CLI commands — programmatic check - that compares registered MCP tool names (config/mcp/tool) against ctx CLI - subcommands to detect newly added CLI commands without MCP equivalents. Could - be a drift detector check or a compliance test. @CoderMungan - #priority:medium #added:2026-03-15-120116 - -- [ ] MCP v0.3: expose additional CLI commands as MCP tools — candidates: - ctx_load (full context packet), ctx_agent (token-budgeted packet), ctx_reindex - (rebuild indices), ctx_sync (reconcile docs/code), ctx_doctor (health check). - Evaluate which provide value over the protocol vs requiring terminal - interaction. @CoderMungan #priority:medium #added:2026-03-15-120025 - -- [ ] Make MCP defaults configurable via .ctxrc — add mcp_recall_limit, - mcp_truncate_len, mcp_truncate_content_len, mcp_min_word_len, - mcp_min_word_overlap fields to .ctxrc schema; expose via rc.MCP*() with - fallback to config/mcp/cfg defaults; update tools.go to read from rc instead - of cfg constants. @CoderMungan #priority:medium #added:2026-03-15-114700 - -- [ ] MCP tools.go cleanup pass: magic strings, duplicated fragments, nested - templates. Lines: 461:481 + 186:196 duplicated code; 335 magic number; 382:385 - nested TextDescs → single template; 390+851 magic time literal; 443+499+800 - magic words; 557+892+902 magic numbers; 590+638 nested TextDesc templating; - 820 prefixed %s; 854 suffix %s #priority:high #added:2026-03-15-110429 - -### Phase MCP-SAN: MCP Server Input Sanitization - -[ ] Assignee: @CoderMungan -- https://github.com/ActiveMemory/ctx/issues/49 - -### Phase MCP-COV: MCP Test Coverage - -[ ] Assignee: @CoderMungan -- https://github.com/ActiveMemory/ctx/issues/50 - -## Later - -### Phase PR: State Pruning (`ctx system prune`) - -Clean stale per-session state files from `.context/state/`. Files with UUID -session ID suffixes accumulate ~6-8 per session with no cleanup. Strategy: -age-based — prune files older than N days (default 7). - -- [ ] Regenerate site/ for state-maintenance recipe - (docs/recipes/state-maintenance.md added but site not - rebuilt) #added:2026-03-05-205425 - -- [ ] Audit remaining global tombstones for session-scoping: - backup-reminded, ceremony-reminded, check-knowledge, - journal-reminded, version-checked, ctx-wrapped-up all have - the same cross-session suppression bug as - memory-drift-nudged #added:2026-03-05-205425 - -- [ ] F.2: ctx journal import (remote) — import Claude Code - session JSONLs from local or remote (~/.claude/projects/) - into local ~/.claude/projects/. Pure Go: local copy with - os.CopyFS-style walk, remote via os/exec ssh+scp (no rsync - dependency). --source flag accepts local path or user@host. - --dry-run shows what would be copied. Skips existing files - (content-addressed by UUID filenames). Enables journal export - from sessions that ran on other machines. - #added:2026-03-05-141912 - -- [ ] P0.5: Blog: "Building a Claude Code Marketplace Plugin" - — narrative from session history, journals, and git diff of - feat/plugin-conversion branch. Covers: motivation (shell - hooks to Go subcommands), plugin directory layout, - marketplace.json, eliminating make plugin, bugs found during - dogfooding (hooks creating partial .context/), and the fix. - Use /ctx-blog-changelog with branch diff as source material. - #added:2026-02-16-111948 -- [ ] P9.2: Test manually on this project's LEARNINGS.md (20+ entries). - #priority:medium #added:2026-02-19 -- [ ] P0.8.1: Install golangci-lint on the integration server #for-human - #priority:medium #added:2026-02-23 #added:2026-02-23-170213 -- [ ] PM.3: Review hook diagnostic logs after a long session. Check - `.context/logs/check-persistence.log` and - `.context/logs/check-context-size.log` to verify hooks fire correctly. - Tune nudge frequency if needed. #priority:medium #added:2026-02-09 -- [ ] PM.4: Run `/consolidate` to address codebase drift. Considerable drift has - accumulated (predicate naming, magic strings, hardcoded permissions, - godoc style). #priority:medium #added:2026-02-06 -- [ ] Improve test coverage for core packages at 0% #added:2026-03-20-164324 - -- [ ] PM.7: Aider/Cursor parser implementations: the recall architecture was - designed for extensibility (tool-agnostic Session type with - tool-specific parsers). Adding basic Aider and Cursor parsers would - validate the parser interface, broaden the user base, and fulfill - the "works with any AI tool" promise. Aider format is simpler than - Claude Code's. #priority:medium #source:report-6 #added:2026-02-17 - -## Future - -- [ ] P0.8.5: Enable webhook notifications in worktrees. Currently `ctx notify` - silently fails because `.context.key` is gitignored and absent in - worktrees. For autonomous runs with opaque worktree agents, notifications - are the one feature that would genuinely be useful. Possible approaches: - resolve the key via `git rev-parse --git-common-dir` to find the main - checkout, or copy the key into worktrees at creation time (ctx-worktree - skill). #priority:medium #added:2026-02-22 -- [ ] P0.9.2: Split cli-reference.md (1633 lines) into command group pages: - cli-overview, cli-init-status, cli-context, cli-recall, cli-tools, - cli-system — - each page covers a natural command group with its subcommands and flags - #added:2026-02-24-204208 -- [ ] P0.9.3: Investigate proactive content suggestions: - docs/recipes/publishing.md claims - agents suggest blog posts and journal rebuilds at natural moments, but no hook - or playbook mechanism exists to trigger this — either wire it up (e.g. - post-task-completion nudge) or tone down the docs to match reality - #added:2026-02-24-185754 -- [ ] PG.1: Add agent/tool compatibility matrix to prompting guide — - document which - patterns degrade gracefully when agents lack file access, CLI tools, or - ctx integration. Treat as a "works best with / degrades to" table. - #priority:medium #added:2026-02-25 -- [ ] PG.2: Add versioning/stability note to prompting guide — "these - principles are - stable; examples evolve" + doc date in frontmatter. Needed once the guide - becomes canonical and people start quoting it. - #priority:low #added:2026-02-25 -- [ ] P0.1: Brainstorm: Standardize drift-check comment format and - integrate with - `/ctx-drift` — formalize ad-hoc `` markers, teach - drift skill to parse/execute them, publish pattern in docs/recipes. Benefits - tooling/CLI but AI handles ad-hoc fine for now. - #priority:medium #added:2026-02-28 -- [ ] F.1: MCP server integration: expose context as tools/resources via Model - Context Protocol. Would enable deep integration with any - MCP-compatible client. #priority:low #source:report-6 -- [ ] Q.1: Docstring cross-reference audit — compliance test that - flags docstrings - mentioning domains that don't match their callers. Start with `write/**`, - extend to all `internal/`. Spec: `specs/docstring-cross-reference-audit.md` - #priority:medium #added:2026-03-17 - -- [ ] Migrate Sprintf-based templates (tpl_*.go) to Go text/template or embedded - template files — ObsidianReadme, LoopScript, and other multi-line format - strings that can't move to YAML #added:2026-03-18-163629 - -- [ ] Split internal/assets/embed_test.go — tests that call read/ packages - must - move to their respective read/ package to avoid import - cycles #added:2026-03-18-192914 - -- [ ] Improve recall/core format tests — replace hardcoded string assertions - (e.g. Contains Tokens) with semantic checks that verify structure and values, - not label text #added:2026-03-19-194645 - -### Phase BT: Build Tooling — `cmd/ctxctl` - -Replace shell-based build scripts (Makefile shell -expansions, `hack/build-all.sh`, -`hack/release.sh`, `hack/tag.sh`, `sync-*`/`check-*` targets) with a first-class -Go binary at `cmd/ctxctl`. Shares internal packages with `ctx` (version, assets, -embed FS). Installable: `go -install github.com/ActiveMemory/ctx/cmd/ctxctl@latest`. -Eliminates `jq` build dependency. Testable, cross-platform. - -- [ ] Bug: release script versions.md table insertion fails silently. The sed - pattern on line 133 uses `$` anchor but the actual Markdown table header has - column padding spaces before the trailing `|`. The row is never inserted. Fix: - relax the header match pattern or switch to a simpler approach (e.g., insert - after the separator line directly). Also verify the "latest stable" sed - handles trailing `).\n` correctly. #priority:high #added:2026-03-23-221500 - -- [ ] Replace hack/lint-drift.sh with AST-based Go tests in internal/audit/. - Spec: `specs/ast-audit-tests.md` #added:2026-03-23-210000 - -- [ ] Rewrite lint-style scripts in Go as ctxctl subcommands — - blocked: prerequisite ctxctl does not exist yet. Deferred. - #added:2026-03-29-082958 - -Dividing line: `ctx` is the user/agent tool, `ctxctl` is -the maintainer/contributor -tool. If a developer clones the repo and needs to build, test, release, -or validate -— that's `ctxctl`. If a user is working in a project and needs context — -that's `ctx`. - -Strong fits beyond build/release: -- `ctxctl plugin package` — package .claude-plugin for marketplace publishing -- `ctxctl plugin validate` — validate plugin.json, hooks.json, skill structure -- `ctxctl doctor` — contributor pre-flight (Go version, tools, GPG, hooks); - absorbs `hack/gpg-fix.sh` and `hack/gpg-test.sh` -- `ctxctl changelog` — deterministic release notes from git log - -Reasonable fits if project grows: -- `ctxctl test smoke` — replaces the shell pipeline in `make smoke` -- `ctxctl site build/serve` — wraps zensical + feed generation -- `ctxctl mcp register` — replaces `hack/gemini-search.sh` and future - MCP registrations - -Not a fit (keep in `ctx`): -- Anything user-facing in a project context (status, agent, drift, recall) -- Anything Claude Code hooks call — hooks must call `ctx`, not `ctxctl` - -- [ ] Design `ctxctl` CLI surface: `ctxctl sync`, `ctxctl build`, `ctxctl - release`, `ctxctl check`, `ctxctl tag` #added:2026-03-25-050000 -- [ ] Implement `ctxctl sync` — stamps VERSION into plugin.json + syncs why - docs; replaces `sync-version`, `sync-why` #added:2026-03-25-050000 -- [ ] Implement `ctxctl check` — drift checks: version sync, why docs, - lint-drift, lint-docs; replaces `check-*` targets #added:2026-03-25-050000 -- [ ] Implement `ctxctl build` — cross-platform builds with version stamping; - replaces `build-all.sh` #added:2026-03-25-050000 -- [ ] Implement `ctxctl release` — full release flow (sync, build, tag, - checksums); replaces `release.sh` + `tag.sh` #added:2026-03-25-050000 -- [ ] Simplify Makefile to thin wrappers: `make build` → `go run ./cmd/ctxctl - build` #added:2026-03-25-050000 -- [ ] Remove `jq` build dependency once ctxctl handles JSON - natively #added:2026-03-25-050000 - -- [ ] Implement MCP warm-up in /ctx-remember session ceremony — when a - graph/RAG - tool is configured in .ctxrc, run one orientation query at session start to - build procedural familiarity. Spec: - `ideas/spec-mcp-warm-up-ceremony.md` #added:2026-03-25-120000 - -- [ ] Update ctx doctor to check for graph tool availability — detect if a - graph/RAG MCP is configured in .ctxrc, verify connection status, recommend - installation if missing #added:2026-03-25-120000 - -- [ ] Explore pluggable graph tool interface — replace hardcoded GitNexus - references in skill text with configurable .ctxrc graph_tool key. Skills use - template placeholder instead of literal tool names. Define minimum interface - contract (query, context, impact). Spec: - `ideas/spec-mcp-warm-up-ceremony.md` #added:2026-03-25-120000 - -- [x] HUB-1: Define hub.proto — gRPC service definition with Register, - Publish, Sync, Listen, Status RPCs. Generate Go code. Spec: - specs/context-hub.md #priority:high #added:2026-04-06-113020 #done:2026-04-06 - -- [x] HUB-2: Implement internal/hub/store.go — JSONL append-only entry storage - with sequence assignment, type filtering, and since-sequence queries. Spec: - specs/hub_implementation.md #priority:high #added:2026-04-06-113021 - #done:2026-04-06 - -- [x] HUB-3: Implement internal/hub/auth.go — admin token generation on first - run, client token issuance via Register RPC, gRPC interceptor for Bearer token - validation. Spec: specs/context-hub.md #priority:high #added:2026-04-06-113022 - #done:2026-04-06 - -- [x] HUB-4: Implement internal/hub/server.go — gRPC server with Register, - Publish, Sync RPCs. Wire auth interceptor, JSONL store, TLS support. Spec: - specs/context-hub.md #priority:high #added:2026-04-06-113024 #done:2026-04-06 - -- [x] HUB-5: Implement ctx serve --shared CLI command — starts gRPC hub server - on specified port, generates admin token on first run, supports - --tls-cert/--tls-key flags. Spec: specs/context-hub.md #priority:high - #added:2026-04-06-113030 #done:2026-04-06 - -- [x] HUB-6: Implement internal/hub/client.go — gRPC client with Register, - Sync, Publish, Listen methods. Connection config encrypted storage via - internal/crypto (same pattern as notify). Spec: specs/context-hub.md - #priority:high #added:2026-04-06-113032 #done:2026-04-06 - -- [x] HUB-7: Implement ctx connect register — one-time registration with hub, - stores encrypted connection config in .context/.connect.enc. Spec: - specs/context-hub.md #priority:high #added:2026-04-06-113033 #done:2026-04-06 - -- [x] HUB-8: Implement ctx connect subscribe — set entry type filters - (decisions, learnings, conventions), persist in local connection config. Spec: - specs/context-hub.md #priority:medium #added:2026-04-06-113035 - #done:2026-04-07 - -- [x] HUB-9: Implement ctx connect sync — initial full pull of matching - entries from hub, write to .context/shared/ as markdown files with origin - tags, record last-seen sequence in .sync-state.json. Spec: - specs/context-hub.md #priority:medium #added:2026-04-06-113041 - #done:2026-04-07 - -- [x] HUB-10: Implement ctx connect publish and --share flag — push local - entries to hub. Add --share flag to ctx add so entries go to local file AND - hub simultaneously. Spec: specs/context-hub.md #priority:medium - #added:2026-04-06-113043 #done:2026-04-07 - -- [x] HUB-11: Implement Listen RPC with fan-out — server-streaming RPC that - pushes new entries to connected clients in real-time. ctx connect listen with - auto-reconnect on disconnect. Spec: specs/context-hub.md #priority:medium - #added:2026-04-06-113044 #done:2026-04-07 - -- [x] HUB-12: Implement ctx connect status — show server address, connection - state, last sync time, subscription config, entry counts by type. Includes - hub-side Status RPC. Spec: specs/context-hub.md #priority:medium - #added:2026-04-06-113046 #done:2026-04-07 - -- [x] HUB-13: Implement ctx agent --include-shared — add Tier 8 budget for - shared knowledge in agent packet assembly. Shared entries from - .context/shared/ included when --include-shared flag is passed. Spec: - specs/context-hub.md #priority:medium #added:2026-04-06-113053 - #done:2026-04-07 - -- [x] HUB-14: Implement --daemon flag for ctx serve --shared — background - process with PID file, --stop to kill, graceful shutdown. Required for - federation. Spec: specs/hub-federation.md #priority:medium - #added:2026-04-06-113054 - -- [x] HUB-15: Integrate hashicorp/raft for leader election — Raft-lite: use - Raft ONLY for master election, not data consensus. --peers flag for cluster - membership. Single-node mode auto-elects. Spec: specs/hub-federation.md - #priority:medium #added:2026-04-06-113056 - -- [x] HUB-16: Implement master-to-follower replication — master pushes entries - to followers via gRPC stream. Followers catch up via sequence-based sync on - reconnect. Spec: specs/hub-federation.md #priority:medium - #added:2026-04-06-113058 - -- [x] HUB-17: Implement client failover — clients maintain ordered peer list, - auto-reconnect to new master on connection failure. Follower redirects client - to current master address. Spec: specs/hub-federation.md #priority:medium - #added:2026-04-06-113104 - -- [x] HUB-18: Implement ctx hub status/peer/stepdown — cluster status display - (role, peers, sync state, entries, uptime), runtime peer add/remove, graceful - leadership transfer. Spec: specs/hub-federation.md #priority:low - #added:2026-04-06-113106 - -- [x] HUB-19: Update compliance test — add internal/hub/ to allowed-net-import - list alongside internal/notify/. Core packages remain network-free. Spec: - specs/hub_implementation.md #priority:high #added:2026-04-06-113107 - -- [x] HUB-20: End-to-end integration test — spin up hub, register 2 clients, - publish from one, verify sync on other. Test --share flag, Listen stream, and - reconnect behavior. Spec: specs/context-hub.md #priority:medium - #added:2026-04-06-113109 - -- [x] HUB-2a: Implement hub client registry and meta persistence — - clients.json for registered client tokens/project names, meta.json for - sequence counter and hub metadata. Separate from entries.jsonl. Spec: - specs/context-hub.md #priority:high #added:2026-04-06-114131 - -- [x] HUB-9a: Implement shared file renderer — convert Entry objects to - markdown with origin tags and date headers, create/append to - .context/shared/*.md files. Reused by both ctx connect sync and ctx connect - listen. Spec: specs/context-hub.md #priority:medium #added:2026-04-06-114131 - -- [x] HUB-21: Unit tests for internal/hub/ — store (append, query, rotation), - auth (token generation, validation, interceptor), client (connect, reconnect), - renderer (markdown output). Each package tested independently. Spec: - specs/hub_implementation.md #priority:medium #added:2026-04-06-114131 - -- [x] HUB-22: Documentation — create docs/cli/connect.md and docs/cli/serve.md - for new commands, update docs/cli/agent.md for --include-shared flag and - --shared-budget option. Spec: specs/context-hub.md #priority:low - #added:2026-04-06-114131 - -### Phase: ctx Hub follow-ups (PR #60) - -**Context**: PR #60 `feat: ctx Hub for cross-project knowledge -sharing` (parlakisik) merged despite open review feedback from @bilersan and -a pending review request. Author is heads-down on his Ph.D.; these tasks -capture the cleanup and documentation debt we accepted by merging. -PR: https://github.com/ActiveMemory/ctx/pull/60 -Review with findings: -https://github.com/ActiveMemory/ctx/pull/60#pullrequestreview-PRR_kwDOQ9VoNc7ze3nA - -#### Build / platform - -- [x] Fix Windows build: `internal/exec/daemon/daemon.go` uses - `syscall.SysProcAttr{Setsid: true}` (Unix-only). Split into - `daemon.go` (platform-agnostic), `detach_unix.go` (`//go:build !windows`, - `Setsid`), `detach_windows.go` (`//go:build windows`, - `CREATE_NEW_PROCESS_GROUP | HideWindow`). Verified with - `GOOS=windows go build ./...`. #priority:high #added:2026-04-11 #pr:60 - #done:2026-04-11 -- [ ] Add Windows job to CI so this class of regression is caught at PR time, - not by reviewers running local builds. #priority:high #added:2026-04-11 #pr:60 -- [ ] Triage the 16 package-level test failures @bilersan reported on Windows - — classify as platform-specific vs genuine bugs. #added:2026-04-11 #pr:60 - -#### Convention drift - -- [x] Fix 38 `types.go` convention violations introduced by `internal/hub` - and related packages. Resolved upstream in commit `9efe1a94 fix: reconcile - hub code with main's audit tests after rebase` — `make audit` now reports - "All checks passed!" on Linux, and `make lint` is 0 issues. - #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [ ] Audit `internal/hub`, `internal/cli/connect`, `internal/cli/hub`, - `internal/cli/serve` against CONVENTIONS.md (godoc format, import aliases, - error wrapping, package layout). #added:2026-04-11 #pr:60 -- [ ] Run `/ctx-code-review` over the hub subsystem for edge cases missed in - the merge: token rotation, connection-config migration, Raft leader - handoff failure modes, sync cursor corruption recovery. #added:2026-04-11 - #pr:60 - -#### User-facing docs (cornerstone — scope first) - -- [x] Enumerate all doc surfaces touched by the hub: `docs/cli/connect.md`, - `docs/cli/hub.md`, `docs/cli/serve.md`, `docs/cli/init-status.md`, - `docs/cli/index.md` already existed from the PR but the three new CLI - pages were NOT wired into `zensical.toml` nav — fixed. Added three new - recipes, two operations docs, and one security doc; wired all into nav. - #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Write a **Getting Started: Shared Hub** recipe: single-node hub on - localhost, register first project, publish a decision, sync from a second - project, `ctx agent --include-shared`. Written to - `docs/recipes/hub-getting-started.md` and wired into nav. - #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Write a **Multi-machine hub** recipe: `ctx serve --shared --daemon` - on a LAN host, firewall/port guidance, bearer token provisioning, - `.connect.enc` distribution, `ctx connect register` from clients. - Written to `docs/recipes/hub-multi-machine.md`, wired into nav. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Write a **High-availability cluster** recipe: Raft peers with - `--peers`, `ctx hub peer add/remove`, `ctx hub stepdown`, failure-mode - walkthrough (leader loss, split brain, recovery). Written to - `docs/recipes/hub-cluster.md`, wired into nav. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Write a **Security model** doc: bearer token lifecycle, AES-256-GCM - `.connect.enc` at-rest, constant-time comparison, 1 MB content cap, type - allowlist. Threat model and operational hardening checklist. Written to - `docs/security/hub.md`, wired into nav. #priority:high - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Write an **Operations** doc: starting/stopping the daemon, log - locations, `ctx serve --stop`, `ctx hub status`, JSONL store layout, - backup/restore of the append-only log, systemd unit, log rotation. - Written to `docs/operations/hub.md`, wired into nav. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [ ] Document the auto-sync-on-session-start hook: what it does, how to - opt out, interaction with existing UserPromptSubmit hooks, performance - impact on large hubs. Partially covered in connect.md (`check-hub-sync` - mention); a dedicated section is still owed. #added:2026-04-11 #pr:60 -- [x] Document `ctx add --share` and `ctx agent --include-shared` — already - covered in `docs/cli/connect.md` (`--share`) and `docs/cli/init-status.md` - (`--include-shared` flag + Tier 8 explanation); playbook update deferred - until a dedicated "shared knowledge in agent packets" section is written. - #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [ ] Add an **architecture** section to `ARCHITECTURE.md` / - `DETAILED_DESIGN.md` covering: JSONL append-only store, JSON-over-gRPC - codec (no protoc), fan-out broadcaster, Raft-lite (election only, data - via gRPC sync), sequence-based replication. #added:2026-04-11 #pr:60 -- [x] Add a **failure analysis** page for the hub: what happens on network - partition, disk full, corrupted JSONL, token rotation during active - streams, clock skew between peers. Written to - `docs/operations/hub-failure-modes.md`, wired into nav. Covers - reminder [7]. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [ ] Record a DECISION explaining why we merged PR #60 with known Windows - breakage and convention drift — trade-off, author context, mitigation - plan (this task group). #added:2026-04-11 #pr:60 -- [ ] Update CONVENTIONS.md if any new patterns from the hub are worth - canonicalizing (gRPC handler layout, JSONL store access, bearer-token - middleware). #added:2026-04-11 #pr:60 - -#### Framing and mental model (2026-04-11 follow-up) - -- [x] Write `docs/recipes/hub-overview.md` — mental model in one - paragraph, what flows / what does not flow, two explicit user stories - (personal cross-project brain vs small trusted team), "when not to - use it" section. Wired as the first entry in the ctx Hub - nav section. #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rewrite the opening of `docs/recipes/hub-getting-started.md` - to plant stakes ("what you'll get out of this recipe", "what this - recipe does not cover") and point at the overview before any commands. - #priority:high #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Add a "read the overview first" signpost to the top of - `hub-multi-machine.md` and `hub-cluster.md`, naming - each recipe as Story 2 (trusted team) shape. #added:2026-04-11 #pr:60 - #done:2026-04-11 -- [x] Give `docs/cli/connect.md` a real "what is this" intro — unit of - identity is a project not a user, only four entry types flow, link - to the recipes. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Give `docs/cli/hub.md` a real "who needs this page" intro — - operator commands only, link to `ctx connect` for clients and to the - overview for the mental model. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Give `docs/operations/hub.md` an operator-cheat-sheet - intro (four entry types, project identity, append-only model) and - link to the overview. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Fix factual error in `docs/security/hub.md`: - `clients.json` stores client tokens **plaintext**, not hashed. - Replaced the "hashed" claim with a prominent warning admonition, - listed filesystem-level mitigations, and referenced the follow-up - task for hashing / keyring storage. Updated - `docs/operations/hub-failure-modes.md` compromise scenarios - to match (including a new "Compromised hub host" entry). Also - documented that `Origin` is self-asserted on publish, so attribution - cannot be trusted after token compromise. #priority:high - #added:2026-04-11 #pr:60 #done:2026-04-11 - -#### Design follow-ups surfaced by the brainstorm (2026-04-11) - -- [ ] Decide the product story: "personal cross-project brain", - "small trusted team", or both — then align the overview, recipes, - and CONTRIBUTING guidance to match. #priority:high #added:2026-04-11 - #pr:60 -- [ ] Server-enforce `Origin` on publish: reject entries whose - `Origin` does not match the authenticated client's `ProjectName`. - Closes a spoofing vector and eliminates accidental mislabeling. - Small change in `internal/hub/handler.go publish()`. - #priority:high #added:2026-04-11 #pr:60 -- [ ] Hash `clients.json` tokens or move them behind the local - keyring (reuse `internal/crypto`). Removes the plaintext-token - footgun documented in the security page. - #priority:high #added:2026-04-11 #pr:60 -- [x] Decide the fate of `Entry.Author`: keep, drop, or promote to - a real identity field. **Decided**: server-authoritative. The - server stamps Author from the authenticated identity source on - every publish; client input is ignored. Pre-registry: stamp with - `ClientInfo.ProjectName`. Registry MVP: stamp with - `users.json.user_id`. PKI stretch: stamp with signed-claim `sub`. - See `.context/DECISIONS.md` [2026-04-11-180000]. Implementation - tasks land under H-22 in the security audit phase. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [ ] Explore journal-entry → `learning` export path: the density - users expect from "shared context" lives in enriched journal - entries, not in manually written `ctx add learning`. Would let - the hub surface the lessons agents already recorded in sessions - without actually replicating journals. #added:2026-04-11 #pr:60 - -#### Phase: Hub identity layer for public-internet usage (2026-04-11) - -**Context**: The current hub has no concept of user identity. -Tokens identify **projects**, not humans. `Origin` is -self-asserted on publish. `clients.json` stores tokens in -plaintext. For the "personal" and "small trusted team" stories -(overview.md Stories 1 and 2) this is acceptable — the trust -model is "everyone holding a token is friendly." - -For public-internet usage (the "Story 3" shape we explicitly -declared out of scope in the overview) these become real gaps: -no per-user attribution, no way to revoke individual humans, no -audit trail that proves who published what, and `clients.json` -compromise equals total hub compromise. - -**Near-term MVP**: a pre-seeded identity registry owned by the -sysadmin. Instead of dynamic token issuance via admin token, -the hub reads a `users.json` file the sysadmin hand-edits, and -client registration validates against that pre-seeded list. -This is simpler than OAuth/OIDC, doesn't require a separate -identity service, and matches how internal services at small -orgs usually start before adopting an SSO. - -**Eventual design requirements** (decision record TBD): - -- Per-human identity, not per-project -- Tokens tied to a user ID, not a project name -- Server-enforced `Origin` matches the authenticated user (or - a user's declared project list, with server validation) -- Revocation by removing a user row from the registry and - forcing token rotation -- Hashed token storage at rest -- Optional: attribution-bearing audit log distinct from - `entries.jsonl` - -The following tasks feed into this track (they already exist -in the "Design follow-ups surfaced by the brainstorm" section -above; do not duplicate here): - -- Server-enforce `Origin` on publish (blocks spoofing) -- Hash `clients.json` tokens (blocks plaintext compromise) -- Decide the fate of `Entry.Author` (promote, drop, or keep - unauthenticated) - -Tasks unique to this phase: - -- [ ] Write a spec for the sysadmin-curated identity registry: - filename, format, schema, bootstrap flow, revocation - procedure, migration path from today's `clients.json`. - `specs/hub-identity-registry.md`. Must resolve: - - - **Token issuance**: out-of-band on the server - (`ctx hub users add` prints the plaintext token once - on stdout; only a hash is persisted). - - **Client pickup**: user receives the token out-of-band - and runs `ctx connect register --token - ctx_cli_... --project `; hub validates against - the registry. - - **TTL decision** (pick one, document in the spec): - * **Option A** (recommended): no TTL, manual revocation - only. `ctx hub users remove ` is the only - expiry path. Matches today's `clients.json` - semantics, zero surprise breakage on migration. - * **Option B**: optional `expires_at` per user row. - Tokens without it are valid forever (Option A - behavior); tokens with it are rejected after the - timestamp. Ship as an additive follow-up. - * **Option C** (explicitly rejected): rolling - expiry based on `last_used_at`. Garbage-collects - dormant tokens but breaks users who take long - vacations. Not worth the support cost. - - **Revocation procedure**: sysadmin edits `users.json`, - signals the hub to reload, affected tokens fail - immediately on next RPC. - - **Migration from `clients.json`**: one-shot converter - that reads today's `clients.json`, prompts the - sysadmin for a `user_id` per row, and writes - `users.json`. Leave `clients.json` in place as a - read fallback during migration, delete once - everyone is on the new path. - - #priority:high #added:2026-04-11 #pr:60 -- [ ] Implement `users.json` format: `{user_id: {project_ids: - [...], token_hash: "...", created_at: "...", notes: "..."}}`. - Read on hub start and on each Register RPC. Hot-reload via - SIGHUP or file watcher. #added:2026-04-11 #pr:60 -- [ ] Change `Register` RPC semantics: instead of minting a - new client token from the admin token, look up the - requested `ProjectName` in `users.json`. Reject if not - pre-seeded. Return the pre-hashed token only if the caller - presents an initial-provisioning credential the sysadmin - seeded alongside the registry row. #added:2026-04-11 #pr:60 -- [ ] Add `ctx hub users` subcommand group for sysadmin - operations: `add`, `remove`, `rotate`, `list`. These edit - `users.json` directly and signal the running hub to - reload. #added:2026-04-11 #pr:60 -- [ ] Add per-user audit log (`audits.jsonl` beside - `entries.jsonl`). Each RPC records user_id, method, result - status, timestamp. Separate from `entries.jsonl` so it can - be retained on a different schedule. #added:2026-04-11 - #pr:60 -- [ ] Write `docs/security/hub-identity.md` explaining the - registry-based identity model, the threat model it closes, - the threats it still doesn't close, and the operational - procedures (seed the registry, rotate a token, revoke a - user). #added:2026-04-11 #pr:60 -- [ ] Decide whether to ship the identity layer as a - **breaking change** (existing `clients.json` deployments - must migrate) or as an **opt-in flag** (`ctx hub start - --identity users.json`). Document in the spec above. - #added:2026-04-11 #pr:60 -- [ ] Update the hub overview and team recipe to name the - identity registry as the "upgrade path to larger teams" - story: "once your team grows past ~10 people or you need - auditable attribution, enable the identity registry." The - current overview treats Story 3 as unsupported — with the - registry this becomes Story 2.5: "small trusted team with - real attribution." #added:2026-04-11 #pr:60 -- [ ] Stretch: OIDC/OAuth bridge. Once the registry layer is - stable, consider adding an optional provider bridge so - `users.json` can be auto-populated from an external - identity source (Google Workspace, GitHub orgs, etc.). Not - a near-term priority — registry-only covers the first - order of magnitude of users. #added:2026-04-11 #pr:60 -- [ ] Stretch: signed-claim / PKI authentication. The - sysadmin-registry MVP and the OIDC bridge are both - **bearer token** models — possession of the token bytes - is identity. This is fine for trusted orgs but has - well-known replay/rotation/identity limits for true - public-internet usage. - - The next tier up is **asymmetric / signed-claim** auth: - sysadmin holds a private signing key, issues short-lived - claims `{user, project, expiry}` signed with that key, - clients present the signed claim on each RPC, server - verifies with the public key. Benefits: - - - Private key never leaves the sysadmin's machine. - - Claims expire in minutes → revocation is automatic. - - Each claim carries identity cryptographically. - - No per-RPC registry lookup — signature verification - is cheap. - - Reference designs to evaluate: JWT (RS256/ES256/EdDSA), - mTLS client certificates, SPIFFE/SPIRE workload - identities. Decision driver: does ctx ever want to run - as a real public-internet service, or does "trusted - team" always remain the upper bound? - - This is the Story 3 → true multi-tenant upgrade. Not a - near-term priority; captured here so the registry-first - MVP doesn't get confused for a final-state solution. - #added:2026-04-11 #pr:60 - -#### Phase: "dependency-free" claim cleanup (2026-04-11) - -**Context**: The design-invariant list in marketing and -reference docs historically included "dependency-free" -as one of five properties (alongside local-first, -file-based, CLI-driven, developer-controlled). This was -accurate when ctx was a single Go binary with no -external services. PR #60 (hub), the zensical -integration (`ctx serve`), the Claude Code plugin + -MCP, and future networked features make the blanket -claim false. - -**Replacement framing (adopted 2026-04-11)**: -"**single-binary core**". The context persistence path -(`init`, `add`, `agent`, `status`, `drift`, `load`, -`sync`, `compact`, `task`, `decision`, `learning`, and -siblings) remains a single Go binary with no required -runtime dependencies. Optional integrations — `ctx -trace` (needs `git`), `ctx serve` (needs `zensical`), -`ctx` Hub (needs a running hub), Claude Code plugin -(needs `claude`) — are opt-in and each declares its -dependency explicitly. - -This framing is load-bearing: it communicates the -design intent (nothing you don't opt into) without -claiming a literal falsehood. - -- [x] Update `docs/reference/comparison.md` bullet list - from "dependency-free" to "single-binary core" with - an explicit list of optional integrations and their - dependencies. #added:2026-04-11 #done:2026-04-11 -- [x] Update `docs/thesis/index.md:73` (the five-property - claim) from "zero runtime dependencies" to "a - single-binary core with zero required runtime - dependencies for the persistence path". - #added:2026-04-11 #done:2026-04-11 -- [-] `docs/thesis/index.md:412` (the primitive - comparison table saying "Document: Zero-dependency: - Yes"): left intact. The claim is about the document - primitive itself (markdown files have no runtime - deps), not about ctx as an implementation. Accurate. - #added:2026-04-11 #skipped:primitive-claim-is-correct -- [ ] Add a design-invariants reference note: the - blanket claim "dependency-free" MUST NOT be - reintroduced in new docs. Any new framing should use - "single-binary core" or name the specific path - (e.g., "persistence path", "agent packet assembly"). - #priority:medium #added:2026-04-11 -- [ ] Pre-release re-sweep: before each minor release, - grep `docs/`, `README.md`, and any blog drafts for - `dependency-free|dependency free|zero dependencies| - no dependencies` and verify each occurrence is - scoped to a path that is still dependency-free. Add - to the release runbook. #priority:medium - #added:2026-04-11 -- [ ] Update `docs/reference/design-invariants.md` to - explicitly list "single-binary core" as an invariant - with the scope definition, so future doc authors - have a canonical source to reference instead of - re-deriving the phrase. #priority:medium - #added:2026-04-11 - -#### Phase: Hub security audit (2026-04-11) - -**Context**: Full security audit of the hub subsystem, -completed during the PR #60 follow-up brainstorm as a -precondition for any public-internet deployment. 30 -findings total — 5 Critical, 12 High, 7 Medium, 4 Low, 2 -Info — covering transport security, identity, -attribution, DoS surface, Raft cluster integrity, and -storage integrity. - -The audit lives at `specs/hub-security-audit.md` and is -the canonical reference for the rest of the hub security -work. Each finding has a concrete remediation, -complexity estimate, and cross-reference to existing -tasks where applicable. The spec also contains -recommendations grouped by timeline (do-now / short / -medium / long). - -**Per-story verdicts from the audit**: - -- **Story 1** (personal cross-project brain, localhost): - acceptable as-is. No adversary in scope. -- **Story 2** (small trusted team on LAN): acceptable - with documented caveats — LAN private, hub host - hardened, admin token held only by the sysadmin. The - `hub-team.md` recipe already names these. -- **Story 3** (public-internet / multi-user): **UNSAFE**. - Do not deploy. Five critical findings apply, several - high-severity findings compound catastrophically - without transport security, and the Raft cluster is - a remote unauthenticated DoS surface. - -**This phase tracks the findings as actionable work**. -Individual findings are numbered H-01 through H-30 in -the spec; this task list references them by number and -links back to the spec for detail. - -- [ ] Read and internalize - [`specs/hub-security-audit.md`](../specs/hub-security-audit.md) - before starting any hub-security implementation. - The spec is the single source of truth for findings, - severity, and remediation patterns. #priority:high - #added:2026-04-11 #pr:60 - -**Do-now track** (prerequisites for non-localhost deployments): - -- [ ] **H-01** Add server-side TLS: `--tls-cert` and - `--tls-key` flags on `ctx hub start`, wire into - `grpc.NewServer` via `grpc.Creds`. Keep plaintext - default for Story 1. #priority:critical - #added:2026-04-11 #pr:60 #audit:H-01 -- [ ] **H-02** Add client-side TLS: accept `grpc://` - and `grpcs://` schemes in `hub_addr`. Update - `NewClient`, `replicateOnce`, `NewFailoverClient` to - switch credentials per scheme. Optional `--ca-cert` - for self-signed. Update - `docs/recipes/hub-multi-machine.md` to document both - forms (the current nginx-reverse-proxy recommendation - is un-implementable until this ships). #priority:critical - #added:2026-04-11 #pr:60 #audit:H-02 -- [ ] **H-04** Server-enforce `Origin` on publish: - `validateBearer` attaches `ClientInfo` to context; - `handler.go publish()` overwrites `pe.Origin` with - the authenticated `ClientInfo.ProjectName` before - store. Add a test that a client authenticated as - `alpha` cannot publish as `beta`. #priority:high - #added:2026-04-11 #pr:60 #audit:H-04 -- [ ] **H-15** Fix `appendFile` in `internal/hub/persist.go` - to use real `O_APPEND` instead of read-all-rewrite. - Closes both a performance bug (O(N²) publishes) and - a data-loss risk (partial write can truncate history). - #priority:high #added:2026-04-11 #pr:60 #audit:H-15 - -**Short-term track** (Story 2 hardening): - -- [ ] **H-03** Hash `clients.json` tokens with argon2id. - One-shot migration reads old file, hashes each token, - rewrites. Plaintext token only passes through memory - at registration time; disk only stores hashes. - Already referenced in the design-follow-ups section - above; this entry ties it to the audit. #priority:high - #added:2026-04-11 #pr:60 #audit:H-03 -- [ ] **H-08** Per-token Publish rate limiting using - `golang.org/x/time/rate`. Starting target: 10 entries/sec - per token, 100 burst. Return `ResourceExhausted` with - Retry-After hint. #priority:high #added:2026-04-11 #pr:60 - #audit:H-08 -- [ ] **H-09** Per-token Listen stream cap (suggested - limit: 4 concurrent streams per token, 256 total). - Track in the `fanOut` struct; reject further subscribes - with `ResourceExhausted`. #priority:high - #added:2026-04-11 #pr:60 #audit:H-09 -- [ ] **H-17** Cap `PublishRequest.Entries` at 32 per - request; reject larger batches with - `InvalidArgument`. Document the limit. #priority:high - #added:2026-04-11 #pr:60 #audit:H-17 -- [ ] **H-18** Add `audits.jsonl` as a per-RPC audit log - distinct from `entries.jsonl`. Records - `{ts, method, user, project, status, entry_count}` - per call, including authentication failures. Exposed - via `ctx hub status --audit`. Independent rotation - cadence. Already referenced in the identity-layer - phase; this entry ties it to the audit. #priority:high - #added:2026-04-11 #pr:60 #audit:H-18 -- [ ] **H-19** Implement real revocation: `ctx hub users - remove ` edits the registry and signals the hub - to reload via `fsnotify`. Revoked tokens fail - immediately on next RPC. Revocation events logged to - `audits.jsonl`. Merged with the Hub identity layer - phase implementation. #priority:high #added:2026-04-11 - #pr:60 #audit:H-19 -- [x] **H-22 (decide)** Decide `Entry.Author` fate. - **Decided** 2026-04-11: server-authoritative — stamp - from the authenticated identity source, ignore client - input. See `.context/DECISIONS.md` [2026-04-11-180000]. - #added:2026-04-11 #pr:60 #audit:H-22 #done:2026-04-11 -- [ ] **H-22 (implement)** Implement server-authoritative - `Entry.Author`. Identical mechanism to H-04 (Origin - enforcement): `validateBearer` attaches `ClientInfo` - to the gRPC context; `handler.go publish()` reads - `ClientInfo` and stamps `entries[i].Author` from the - server-known identity before calling `store.Append`. - Pre-registry the stamping source is - `ClientInfo.ProjectName`; after the registry MVP the - source becomes `users.json` row's `user_id`; after - the PKI stretch it becomes the signed-claim `sub`. - Same commit as H-04 is fine — they share the - `authFromContext` plumbing. Add a test that a client - authenticated as project `alpha` cannot publish an - entry whose stored `Author` differs from `alpha`. - Audit client-side callers in `ctx connect publish` - and `ctx add --share` for any that populate - `pe.Author` from local config and remove them (or - document them as ignored). #priority:high - #added:2026-04-11 #pr:60 #audit:H-22 -- [x] **H-22 (meta type + wire + validation)** Schema - update landed on the `feature/ctx-hub-next` branch: - - - `EntryMeta` struct defined in - `internal/hub/types.go` with fields `DisplayName`, - `Host`, `Tool`, `Via` (all optional strings). - - `Entry.Author`, `PublishEntry.Author`, - `EntryMsg.Author` all **removed**. Replaced with - `Meta EntryMeta` on each of the three structs. - - `handler.go publish()` copies `pe.Meta` into - `entries[i].Meta` verbatim. - - `message.go entryToMsg()` copies `e.Meta` into the - wire `EntryMsg.Meta`. - - `sync_helper.go replicateOnce()` copies - `msg.Meta` into the replicated `Entry.Meta`. - - `entry_validate.go` enforces: - `maxMetaFieldLen = 256` per field, - `maxMetaTotalLen = 2048` total, no C0 control - characters (newline, carriage return, NUL, DEL, - bell, etc.) except horizontal tab. - - `internal/hub/entry_validate_test.go` added with - six regression tests: empty accepted, round-trip, - field oversize rejected, total at cap accepted, - each control char rejected (nul/lf/cr/bell/del), - tab allowed. - - JSON wire key is `"meta"` on all three structs. - Pre-existing `entries.jsonl` entries with `author` - fields load cleanly (JSON ignores unknown fields) - and silently lose the hint — acceptable on the - feature branch with no production data. - - Still open as follow-up tasks below (H-22 a through - e). #priority:high #added:2026-04-11 #pr:60 - #audit:H-22 #done:2026-04-11 -- [ ] **H-22a (server-authoritative Origin stamping)** - Implement H-04-style server-enforcement for - `Entry.Origin`: `validateBearer` attaches - `ClientInfo` to the gRPC context; - `handler.go publish()` reads `ClientInfo` and - overwrites `entries[i].Origin` with - `ClientInfo.ProjectName` before `store.Append`. - Client's `pe.Origin` becomes advisory and is - ignored. This is the actual security property - the Author→Meta split was enabling — the - schema change made room for it but the - enforcement still needs to land. Add a test: - client authenticated as `alpha` cannot publish - an entry whose stored Origin is `beta`. - #priority:high #added:2026-04-11 #pr:60 #audit:H-22 -- [ ] **H-22b (renderer labels Meta as advisory)** - Update `internal/cli/connect/core/render/` (and any - other place that writes fanned-out entries to - `.context/hub/*.md`) so `Meta`-sourced values are - labeled as "client label" or "client-reported" in - prose. The word "Origin" is reserved for the - server-authoritative project name. Example output: - - ```markdown - ## [2026-04-11] Use UTC timestamps everywhere - **Origin**: alpha (client label: Alice via ctx@0.8.1) - ``` - - Add a test verifying that a Meta.DisplayName of - `"bob"` does NOT cause the rendered output to show - `Origin: bob`. #priority:high #added:2026-04-11 - #pr:60 #audit:H-22 -- [ ] **H-22c (client publish path supports Meta)** - Update `ctx connect publish` (and `ctx add --share` - if it reaches the hub) to accept `--display-name`, - `--host`, `--tool`, `--via` flags (or a single - `--meta key=val` repeatable flag — implementation - choice). Defaults: `--tool=ctx@`, - `--host=`, `--via=` left empty, - `--display-name=` left empty. Document in - `docs/cli/connect.md`. #priority:medium - #added:2026-04-11 #pr:60 #audit:H-22 -- [ ] **H-22d (docs: `Meta` is advisory)** Add a - prominent note to `docs/cli/connect.md`, - `docs/security/hub.md`, and - `docs/recipes/hub-overview.md` explaining that - `Meta` fields are client-reported hints, not - attribution. Cross-reference the decision record - [2026-04-11-180000]. #added:2026-04-11 #pr:60 - #audit:H-22 -- [ ] **H-22e (audit spec update)** Update - `specs/hub-security-audit.md` H-22 finding to - reflect the landed schema change: the "decide" - phase is done, the "meta type" phase is done, the - remaining work is the Origin stamping (a), the - renderer labels (b), and the client-side plumbing - (c). Also note the six regression tests as "partial - coverage" of the finding. #added:2026-04-11 #pr:60 - #audit:H-22 -- [ ] **H-30** gRPC server hardening: `KeepaliveEnforcementPolicy`, - `KeepaliveParams`, `MaxConcurrentStreams`, total - concurrent connection limit at the listener level. - #priority:medium #added:2026-04-11 #pr:60 #audit:H-30 - -**Medium-term track** (correctness + cluster integrity): - -- [ ] **H-12** Deterministic Raft bootstrap: single - `--bootstrap` node calls `BootstrapCluster`, others - join via `AddVoter`. Persist a `bootstrapped` flag - in the raft data dir to avoid double-bootstrapping - on restart. #priority:medium #added:2026-04-11 #pr:60 - #audit:H-12 -- [ ] **H-13** Follower-side replication validation: - call `validateEntry` on every entry received from - master before appending. Defense-in-depth against a - compromised master (which becomes possible under any - Raft transport compromise — see H-10/H-11). - #priority:medium #added:2026-04-11 #pr:60 #audit:H-13 -- [ ] **H-14** Preserve master sequence on replication: - add `masterSequence` field to Entry, followers - remember master-assigned sequences alongside local - ones. Clients cursor by master sequence so failover - doesn't re-replicate the entire log. #priority:medium - #added:2026-04-11 #pr:60 #audit:H-14 -- [ ] **H-24** `ctx hub redact ` subcommand: mark - the entry in `entries_redacted.jsonl`, broadcast a - redaction notice via Listen, filter on queries, log - to `audits.jsonl`. #priority:medium #added:2026-04-11 - #pr:60 #audit:H-24 -- [ ] **H-29** Bounded in-memory entry cache: LRU over - `entries.jsonl` with a persistent offset index - (`entries.idx`). O(log N) seeks without full-file - reads. Secondary: entries.jsonl rotation at threshold. - #priority:medium #added:2026-04-11 #pr:60 #audit:H-29 - -**Long-term track** (Story 3 enablement): - -- [ ] **H-10 + H-11** Authenticated + encrypted Raft - transport. Replace `raft.NewTCPTransport` with a - TLS-wrapped transport using mTLS between cluster - peers. Peer certs issued from a cluster CA managed - by the sysadmin. Precondition for any non-localhost - multi-node deployment. #priority:critical - #added:2026-04-11 #pr:60 #audit:H-10,H-11 -- [ ] **H-28** Decouple Raft bind port from gRPC port. - Accept a dedicated `--raft-bind` flag; default to a - random high port or refuse to start. Makes port - scanning less productive. #priority:low - #added:2026-04-11 #pr:60 #audit:H-28 -- [ ] Signed-entry mode: publishing clients sign their - entries with a per-client signing key; followers - verify on replication. Eliminates the "trust the - master" assumption even if H-10 fails. Merged with - the PKI stretch task in the Hub identity layer - phase. #added:2026-04-11 #pr:60 #audit:H-13 - -**Low-priority polish** (defense-in-depth): - -- [ ] **H-16** Escape / fence `Content` when the - client-side renderer writes to `.context/hub/*.md`. - Wrap every entry in explicit markers - (``) so malicious - triple-dash patterns can't inject fake frontmatter. - #added:2026-04-11 #pr:60 #audit:H-16 -- [ ] **H-20** Strict constant-time token validation: - iterate all `ClientInfo` entries and OR the results - of `subtle.ConstantTimeCompare` instead of a map - lookup followed by a constant-time compare. Rolled - into the H-03 hashing work. #added:2026-04-11 #pr:60 - #audit:H-20 -- [ ] **H-21** Require exact `Bearer ` prefix in the - `authorization` header; reject otherwise with - `Unauthenticated`. Trivial one-line tightening. - #added:2026-04-11 #pr:60 #audit:H-21 -- [ ] **H-23** Offer passphrase-derived admin token - storage (argon2id) instead of plaintext `admin.token` - on disk. Optional; document in - `docs/operations/hub.md`. #added:2026-04-11 #pr:60 - #audit:H-23 -- [ ] **H-25** Collapse auth error messages to a single - generic `Unauthenticated` reason ("authentication - required"). Log the specific reason server-side - only. #added:2026-04-11 #pr:60 #audit:H-25 - -**Informational (no action needed)**: - -- H-26: daemon re-exec flag — already fixed earlier in - this session as part of the `ctx serve --hub` → `ctx - hub start` split. Recorded in the audit for audit- - trail completeness. -- H-27: mTLS / asymmetric auth discussion — covered by - the PKI stretch task in the Hub identity layer - phase. No separate task needed. - -**Out of scope for this audit** (tracked elsewhere): - -- Supply chain (Go module pinning, CVE monitoring, - reproducible builds) -- Build integrity (signed binaries, transparency log) -- Third-party library CVEs (`hashicorp/raft`, `grpc`, - `raft-boltdb`) -- AI-agent misbehavior (accidental secret publishing - via `--share` — covered by the "secret-leak runbook" - task in the PR #60 follow-up section above) -- Per-project read ACLs (still out of scope even after - the identity layer MVP) - -#### Rename "Shared Context Hub" → "`ctx` Hub" (2026-04-11) - -Brainstorm outcome: "shared" was overloaded (shared memory, -shared journal, shared state) and actively primed the wrong -mental model in docs. `ctx` Hub is the canonical name; `Hub` is -used alone in nav and operator contexts where surrounding text -disambiguates. - -- [x] Rename nav entries: Recipes subsection "Shared Context Hub" - → "Hub"; add `docs/home/hub.md` as a home-level intro; split - Operations section into `Hub` / `Operating ctx` / `Maintainers` - subsections. `zensical.toml` updated. #added:2026-04-11 #pr:60 - #done:2026-04-11 -- [x] Create `docs/home/hub.md` — home-level introduction, names - the two user stories, lists what flows vs what does not, - points readers at recipes/overview for the five-minute - walkthrough. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rewrite `docs/operations/index.md` with three audience-keyed - sections (Hub / Operating ctx / Maintainers). Matches the new - nav structure. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename doc files: `docs/recipes/shared-hub-*.md` → - `docs/recipes/hub-*.md`, `docs/operations/shared-hub*.md` → - `docs/operations/hub*.md`, `docs/security/shared-hub.md` → - `docs/security/hub.md`. Updated all internal cross-links. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename spec files: `specs/shared-context-hub.md` → - `specs/context-hub.md`, `specs/shared-hub-federation.md` → - `specs/hub-federation.md`. Updated prose and cross-refs in - remaining spec files (`hub_implementation.md`, - `task-allocation.md`, `hub-federation.md`). #added:2026-04-11 - #pr:60 #done:2026-04-11 -- [x] Rename Go packages: `internal/cli/agent/core/shared` → - `internal/cli/agent/core/hub`; `internal/cli/serve/core/shared` - → `internal/cli/serve/core/hub`. Resolved the package-name - collision with `internal/hub` by aliasing the inner import to - `hublib` in the two files that need both. Updated audit-test - exempt lists (`magic_strings_test.go`, `magic_values_test.go`) - to match. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename Go constants and flag definitions: - `cFlag.Shared` → `cFlag.Hub`, `cFlag.IncludeShared` → - `cFlag.IncludeHub`, `DescKeyServeShared` → `DescKeyServeHub`, - `DescKeyAgentIncludeShared` → `DescKeyAgentIncludeHub`. - YAML keys in `flags.yaml` and `commands.yaml` updated to - match. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename CLI flags: `ctx serve --shared` → `ctx serve --hub`, - `ctx agent --include-shared` → `ctx agent --include-hub`. - `ctx add --share` kept (verb form is still correct). - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename on-disk directory: `.context/shared/` → - `.context/hub/`. Updated constants (`sharedDir` → `hubDir` in - `agent/core/hub/load.go` and `connect/core/render/render.go`), - path literals in `connect/core/sync/state.go`, and all test - fixtures in `connect/core/render/render_test.go`. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Rename `packet.Shared` / `AssembledPacket.Shared` struct - field → `Hub`, with matching json tag. Updated - `assemble.go`, `out.go`, `render.go`, `types.go` in - `internal/cli/agent/core/budget`. Tier 8 comment updated. - #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Fix a bug surfaced by the rename: `internal/cli/serve/core/hub/daemon.go` - was spawning child processes with the stale flag - `ctx serve --shared` — now correctly passes `--hub`. Without - this fix, `ctx serve --hub --daemon` would have failed silently - on the re-exec. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Fix stuttery function name flagged by audit: - `hub.hubDir()` → `hub.defaultDataDir()` in - `internal/cli/serve/core/hub/setup.go`. #added:2026-04-11 - #pr:60 #done:2026-04-11 -- [x] Prose sweep across all hub docs: "Shared Context Hub" → - "`ctx` Hub", "shared hub" → "hub", `--shared` → `--hub`, - `.context/shared/` → `.context/hub/`. Covered - `docs/home/`, `docs/recipes/`, `docs/operations/`, - `docs/security/`, `docs/cli/`. #added:2026-04-11 #pr:60 - #done:2026-04-11 -- [x] Verify all nav targets exist after rename. All sixteen - hub-related paths referenced in `zensical.toml` resolve to - real files. #added:2026-04-11 #pr:60 #done:2026-04-11 -- [x] Full QA gate: `go build ./...` (Linux), - `GOOS=windows go build ./...`, `make lint` (0 issues), - `make test` (0 failures including the audit exempt-list - update and the `gofmt` round-trip on `serve/cmd/root/cmd.go`). - #added:2026-04-11 #pr:60 #done:2026-04-11 +## Blocked diff --git a/.context/steering/product.md b/.context/steering/product.md new file mode 100644 index 000000000..f0f86d1a1 --- /dev/null +++ b/.context/steering/product.md @@ -0,0 +1,50 @@ +--- +name: product +description: Product context, goals, and target users +inclusion: always +priority: 10 +--- + + +# Product Context + +Describe the product, its goals, and target users. + +- **What is this project?** +- **Who uses it?** +- **What problem does it solve?** +- **What is explicitly out of scope?** \ No newline at end of file diff --git a/.context/steering/structure.md b/.context/steering/structure.md new file mode 100644 index 000000000..25a014e5f --- /dev/null +++ b/.context/steering/structure.md @@ -0,0 +1,49 @@ +--- +name: structure +description: Project structure and directory conventions +inclusion: always +priority: 10 +--- + + +# Project Structure + +Describe the project layout and directory conventions. + +- **Top-level directories and their purpose** +- **Where new files should go** (and where they should not) +- **Naming conventions** for files, packages, modules \ No newline at end of file diff --git a/.context/steering/tech.md b/.context/steering/tech.md new file mode 100644 index 000000000..acfe5736a --- /dev/null +++ b/.context/steering/tech.md @@ -0,0 +1,50 @@ +--- +name: tech +description: Technology stack, constraints, and dependencies +inclusion: always +priority: 10 +--- + + +# Technology Stack + +Describe the technology stack, constraints, and key dependencies. + +- **Languages and versions** +- **Frameworks and key libraries** +- **Runtime / deployment target** +- **Hard constraints** (e.g. no CGO, no network at test time) \ No newline at end of file diff --git a/.context/steering/workflow.md b/.context/steering/workflow.md new file mode 100644 index 000000000..4b9b43a4d --- /dev/null +++ b/.context/steering/workflow.md @@ -0,0 +1,50 @@ +--- +name: workflow +description: Development workflow and process rules +inclusion: always +priority: 10 +--- + + +# Development Workflow + +Describe the development workflow, branching strategy, and process rules. + +- **Branch strategy** (main-only, trunk-based, feature branches) +- **Commit conventions** (message format, signed-off-by) +- **Pre-commit / pre-push checks** +- **Review expectations** \ No newline at end of file diff --git a/.ctxrc.dev b/.ctxrc.dev index 3a0c029e9..dc856af01 100644 --- a/.ctxrc.dev +++ b/.ctxrc.dev @@ -11,7 +11,6 @@ profile: dev # auto_archive: true # archive_after_days: 7 # scratchpad_encrypt: true -# allow_outside_cwd: false # entry_count_learnings: 30 # entry_count_decisions: 20 # convention_line_count: 200 @@ -53,7 +52,7 @@ key_rotation_days: 90 # Days before encryption key rotation nudge # --- Webhook notifications --- # Notifications are opt-in: nothing fires unless events are listed. -# Run `ctx notify setup` to configure the encrypted webhook URL first. +# Run `ctx hook notify setup` to configure the encrypted webhook URL first. # notify: events: @@ -73,5 +72,5 @@ notify: # qa-reminder — QA gate reminder emitted # block-non-path-ctx — blocked non-PATH ctx invocation # -# Note: `ctx notify test` always bypasses the event filter — no need to +# Note: `ctx hook notify test` always bypasses the event filter — no need to # list "test" here. It warns if filtered but sends anyway. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 96172842d..8c1754315 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -6,7 +6,7 @@ ## Context System This project uses Context (`ctx`) for persistent AI context -management. Your memory is NOT ephemeral — it lives in `.context/` files. +management. Your memory is NOT ephemeral; it lives in `.context/` files. ## On Session Start @@ -50,7 +50,7 @@ After completing meaningful work, save a session summary to Create a file named `YYYY-MM-DD-topic.md`: ```markdown -# Session: YYYY-MM-DD — Brief Topic Description +# Session: YYYY-MM-DD: Brief Topic Description ## What Was Done - Describe completed work items @@ -90,7 +90,7 @@ Periodically ask yourself: > "If this session ended right now, would the next session know what happened?" -If no — save a session file or update context files before continuing. +If no, save a session file or update context files before continuing. ## CLI Commands @@ -100,7 +100,7 @@ If `ctx` is installed, use these commands: ctx status # Context summary and health check ctx agent # AI-ready context packet ctx drift # Check for stale context -ctx recall list # Recent session history +ctx journal source --limit 5 # Recent session history ``` diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f4ffba5cf..11915875a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -34,7 +34,7 @@ jobs: run: ./hack/build-all.sh ${{ steps.version.outputs.VERSION }} - name: Create Release - uses: softprops/action-gh-release@v2 + uses: softprops/action-gh-release@v3 with: files: | dist/ctx-* diff --git a/.gitignore b/.gitignore index 4bf83b9cf..8096cef53 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ dist/ .claude/settings.local.json .claude/ralph-loop.local.md .claude/scheduled_tasks.lock +.claude/worktrees/ # Hook diagnostic logs (machine-specific, not committed) .context/logs/ diff --git a/CLAUDE.md b/CLAUDE.md index 4c43e7967..2a20b1430 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -11,11 +11,15 @@ This project uses Context (`ctx`) for context persistence across sessions. ## On Session Start 1. **Run `ctx system bootstrap`**: CRITICAL, not optional. - This tells you where the context directory is. If it fails or returns - no context_dir, STOP and warn the user. + This tells you where the context directory is. + If it returns any error, relay the error output to the user + verbatim, point them at + https://ctx.ist/recipes/activating-context/ for setup, and STOP. + Do not try to activate, initialize, or otherwise recover: **those + are the user's decisions**. Wait for their next instruction. 2. **Read AGENT_PLAYBOOK.md** from the context directory: it explains how to use this system -3. **Run `ctx agent --budget 4000`** for a content summary +3. **Run `ctx agent`** for a content summary ## When Asked "Do You Remember?" @@ -41,7 +45,7 @@ Read them silently, then present what you found as recall, not as a search. ```bash # Get AI-optimized context packet (what you should know) -ctx agent --budget 4000 +ctx agent # Or see full status ctx status @@ -59,6 +63,26 @@ ctx status All files live in the context directory reported by `ctx system bootstrap`. +## Context Directory Lives at the Project Root + +The project root is the parent of `.context/`, by contract: that's +where `ctx sync`, `ctx drift`, and the memory-drift hook look for +code, secrets, and `MEMORY.md`. + +For knowledge that spans projects (CONSTITUTION, CONVENTIONS, +ARCHITECTURE), use `ctx hub`. + +Recommended layout: + +``` +~/WORKSPACE/my-project + ├── .git + ├── .context + ├── Makefile + └── specs + └── ... +``` + ## Hook Authority Instructions from PreToolUse hooks regarding `.context/` files are ALWAYS diff --git a/CONTRIBUTING-SKILLS.md b/CONTRIBUTING-SKILLS.md index c3f2d69f0..506abb001 100644 --- a/CONTRIBUTING-SKILLS.md +++ b/CONTRIBUTING-SKILLS.md @@ -30,7 +30,7 @@ For ctx plugin skills (`/ctx-status`, `/ctx-history`, etc.), see the ### absorb Extracts a diff between two copies of the same project and applies it as a -patch. The companion to `/ctx-worktree` — worktree splits work apart, absorb +patch. The companion to `/ctx-worktree`: worktree splits work apart, absorb merges it back. Useful when `git push/pull` isn't practical (USB copies, disconnected machines, worktrees without a shared remote). @@ -40,20 +40,12 @@ disconnected machines, worktrees without a shared remote). ### audit Detects code-level drift: predicate naming, magic strings, hardcoded values, -missing godoc. Follows the 3:1 consolidation ratio — run after every ~3 +missing godoc. Follows the 3:1 consolidation ratio; run after every ~3 rapid implementation sessions. - **Use when**: after YOLO sprints, before releases - **Skip when**: mid-feature with intentionally incomplete code -### backup - -Backs up `.context/`, `.claude/`, and global Claude data to a configured SMB -share. Requires `CTX_BACKUP_SMB_URL` environment variable. - -- **Use when**: before risky operations, end of productive sessions -- **Skip when**: SMB is not configured, or no changes since last backup - ### brainstorm Structured design thinking before implementation. Transforms vague ideas into diff --git a/GITNEXUS.md b/GITNEXUS.md index 4f0d2d759..6adeaa310 100644 --- a/GITNEXUS.md +++ b/GITNEXUS.md @@ -1,5 +1,5 @@ -# GitNexus — Code Intelligence +# GitNexus: Code Intelligence This project is indexed by GitNexus as **ctx** (13443 symbols, 67145 relationships, 257 execution flows). Use the GitNexus MCP tools to understand code, assess impact, and navigate safely. @@ -11,18 +11,18 @@ This project is indexed by GitNexus as **ctx** (13443 symbols, 67145 relationshi - **MUST run `gitnexus_detect_changes()` before committing** to verify your changes only affect expected symbols and execution flows. - **MUST warn the user** if impact analysis returns HIGH or CRITICAL risk before proceeding with edits. - When exploring unfamiliar code, use `gitnexus_query({query: "concept"})` to find execution flows instead of grepping. It returns process-grouped results ranked by relevance. -- When you need full context on a specific symbol — callers, callees, which execution flows it participates in — use `gitnexus_context({name: "symbolName"})`. +- When you need full context on a specific symbol (callers, callees, which execution flows it participates in), use `gitnexus_context({name: "symbolName"})`. ## When Debugging -1. `gitnexus_query({query: ""})` — find execution flows related to the issue -2. `gitnexus_context({name: ""})` — see all callers, callees, and process participation -3. `READ gitnexus://repo/ctx/process/{processName}` — trace the full execution flow step by step -4. For regressions: `gitnexus_detect_changes({scope: "compare", base_ref: "main"})` — see what your branch changed +1. `gitnexus_query({query: ""})`: find execution flows related to the issue +2. `gitnexus_context({name: ""})`: see all callers, callees, and process participation +3. `READ gitnexus://repo/ctx/process/{processName}`: trace the full execution flow step by step +4. For regressions: `gitnexus_detect_changes({scope: "compare", base_ref: "main"})`: see what your branch changed ## When Refactoring -- **Renaming**: MUST use `gitnexus_rename({symbol_name: "old", new_name: "new", dry_run: true})` first. Review the preview — graph edits are safe, text_search edits need manual review. Then run with `dry_run: false`. +- **Renaming**: MUST use `gitnexus_rename({symbol_name: "old", new_name: "new", dry_run: true})` first. Review the preview: graph edits are safe, text_search edits need manual review. Then run with `dry_run: false`. - **Extracting/Splitting**: MUST run `gitnexus_context({name: "target"})` to see all incoming/outgoing refs, then `gitnexus_impact({target: "target", direction: "upstream"})` to find all external callers before moving code. - After any refactor: run `gitnexus_detect_changes({scope: "all"})` to verify only expected files changed. @@ -30,7 +30,7 @@ This project is indexed by GitNexus as **ctx** (13443 symbols, 67145 relationshi - NEVER edit a function, class, or method without first running `gitnexus_impact` on it. - NEVER ignore HIGH or CRITICAL risk warnings from impact analysis. -- NEVER rename symbols with find-and-replace — use `gitnexus_rename` which understands the call graph. +- NEVER rename symbols with find-and-replace; use `gitnexus_rename` which understands the call graph. - NEVER commit changes without running `gitnexus_detect_changes()` to check affected scope. ## Tools Quick Reference @@ -48,9 +48,9 @@ This project is indexed by GitNexus as **ctx** (13443 symbols, 67145 relationshi | Depth | Meaning | Action | |-------|---------|--------| -| d=1 | WILL BREAK — direct callers/importers | MUST update these | -| d=2 | LIKELY AFFECTED — indirect deps | Should test | -| d=3 | MAY NEED TESTING — transitive | Test if critical path | +| d=1 | WILL BREAK: direct callers/importers | MUST update these | +| d=2 | LIKELY AFFECTED: indirect deps | Should test | +| d=3 | MAY NEED TESTING: transitive | Test if critical path | ## Resources @@ -83,7 +83,7 @@ If the index previously included embeddings, preserve them by adding `--embeddin npx gitnexus analyze --embeddings ``` -To check whether embeddings exist, inspect `.gitnexus/meta.json` — the `stats.embeddings` field shows the count (0 means no embeddings). **Running analyze without `--embeddings` will delete any previously generated embeddings.** +To check whether embeddings exist, inspect `.gitnexus/meta.json`: the `stats.embeddings` field shows the count (0 means no embeddings). **Running analyze without `--embeddings` will delete any previously generated embeddings.** > Claude Code users: A PostToolUse hook handles this automatically after `git commit` and `git merge`. diff --git a/Makefile b/Makefile index 2f6e229a5..599038d0a 100644 --- a/Makefile +++ b/Makefile @@ -306,6 +306,14 @@ check-why: @diff -q docs/reference/design-invariants.md internal/assets/why/design-invariants.md || (echo "FAIL: design-invariants.md is stale — run 'make sync-why'" && exit 1) @echo "Why docs are in sync." +## title-case-check: Dry-run title-case checker on docs (or TARGET=path) +title-case-check: + @python3 hack/title-case-headings.py $${TARGET:-docs} + +## title-case-fix: Apply title-case fixes to headings + admonition titles (TARGET=path defaults to docs) +title-case-fix: + @python3 hack/title-case-headings.py --apply $${TARGET:-docs} + ## help: Show this help help: @echo "Context CLI - Available targets:" diff --git a/README.md b/README.md index b6fef2cea..a25cd601b 100644 --- a/README.md +++ b/README.md @@ -34,13 +34,13 @@ Read the full **[Thesis](https://ctx.ist/thesis/)** ## Core Documents -| Document | Context | -|-------------------------------------------------------------------|---------------------------------------------------| -| [Manifesto](https://ctx.ist/manifesto/) | Philosophy: creation, context, verification | -| [The Thesis](https://ctx.ist/thesis/) | Whitepaper: context as deterministic state | -| [Design Invariants](https://ctx.ist/reference/design-invariants/) | System properties that must always hold | -| [Tool Comparison](https://ctx.ist/reference/comparison/) | How ctx differs from .cursorrules, Aider, Copilot | -| [`ctx` Blog](https://ctx.ist/blog/) | Deep dives, architecture notes, learnings | +| Document | Context | +|-------------------------------------------------------------------|-----------------------------------------------------| +| [Manifesto](https://ctx.ist/manifesto/) | Philosophy: creation, context, verification | +| [The Thesis](https://ctx.ist/thesis/) | Whitepaper: context as deterministic state | +| [Design Invariants](https://ctx.ist/reference/design-invariants/) | System properties that must always hold | +| [Tool Comparison](https://ctx.ist/reference/comparison/) | How `ctx` differs from .cursorrules, Aider, Copilot | +| [`ctx` Blog](https://ctx.ist/blog/) | Deep dives, architecture notes, learnings | ## The Problem @@ -99,6 +99,11 @@ instructions. # Initialize context directory in your project ctx init +# Activate it for the current shell (binds CTX_DIR). Required +# before every other command: ctx no longer walks up the +# filesystem looking for .context/. +eval "$(ctx activate)" + # Check context status ctx status @@ -114,16 +119,33 @@ ctx add decision "Use PostgreSQL for primary database" \ ctx add learning "Mock functions must be hoisted in Jest" ``` +`ctx activate` emits `export CTX_DIR=...` for your shell; one-shot +callers can prefix the binding inline as `CTX_DIR= ctx ...`. +The value must be an absolute path with `.context` as its basename; +relative paths and other names are rejected on first use. A small +allowlist (`init`, `activate`, `deactivate`, `version`, `help`, +`system bootstrap`, `doctor`, `guide`, `why`, `config switch/status`, +`hub *`) runs without CTX_DIR declared; every other command exits +with a next-step hint when it is unset. + ## Documentation +This README is a map, not the territory. The full documentation +lives at **[ctx.ist](https://ctx.ist)** and carries the recipes, +runbooks, threat model, and design rationale that this file +intentionally doesn't try to fit. If you're past install and +wondering "*how do I actually use this in a real session,*" the +recipes are the right next stop. + | Guide | Description | |-------------------------------------------------|----------------------------------------| | [Getting Started](https://ctx.ist) | Installation, quick start, first steps | +| [Recipes](https://ctx.ist/recipes/) | Practical workflow guides | | [CLI Reference](https://ctx.ist/cli-reference/) | All commands and options | | [Context Files](https://ctx.ist/context-files/) | File formats and structure | | [Integrations](https://ctx.ist/integrations/) | Claude Code, Cursor, Aider setup | -| [Recipes](https://ctx.ist/recipes/) | Practical guides and workflows | -| [Security](https://ctx.ist/security/) | Threat model, encryption, permissions | +| [Operations](https://ctx.ist/operations/) | Runbooks, day-to-day, hub deployment | +| [Security](https://ctx.ist/security/) | Trust model, audit trail, permissions | ## Contributing diff --git a/SECURITY.md b/SECURITY.md index 9978f84d2..bc396331c 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -72,7 +72,7 @@ This means: ### The `--file` Flag The `ctx add` subcommands accept a `--file` flag that reads content from an -arbitrary file path. **No boundary check is enforced** — any file readable by +arbitrary file path. **No boundary check is enforced**; any file readable by the current user can be supplied. This is by design: `ctx` runs as the local user and does not elevate privileges. diff --git a/cmd/ctx/doc.go b/cmd/ctx/doc.go index 797d17e92..b39ed1753 100644 --- a/cmd/ctx/doc.go +++ b/cmd/ctx/doc.go @@ -6,8 +6,29 @@ // Package main is the entry point for the ctx CLI. // -// The binary delegates immediately to [bootstrap.Execute], which -// builds the root cobra.Command, registers all 24 subcommand -// packages, and calls cmd.Execute. No business logic lives here; -// version injection happens via ldflags at build time. +// The binary performs three steps at startup: +// +// 1. Initializes the embedded asset lookup table via +// [lookup.Init] so that YAML-backed descriptions, +// templates, and text constants are available to +// every subcommand. +// 2. Builds the root cobra.Command through +// [bootstrap.Initialize], which registers all +// subcommand packages, wires persistent flags, +// and injects build-time version metadata via +// ldflags. +// 3. Calls cmd.Execute and translates any returned +// error into a formatted stderr message via +// [writeErr.With] before exiting with code 1. +// +// No business logic lives in this package. All +// domain behavior is delegated to packages under +// internal/. +// +// # Build-Time Injection +// +// Version, commit hash, and build date are injected +// via ldflags at compile time. The Makefile and +// goreleaser config set these values; the bootstrap +// package reads them. package main diff --git a/docs/blog/2026-01-27-building-ctx-using-ctx.md b/docs/blog/2026-01-27-building-ctx-using-ctx.md index 074d5aa81..74f531bc9 100644 --- a/docs/blog/2026-01-27-building-ctx-using-ctx.md +++ b/docs/blog/2026-01-27-building-ctx-using-ctx.md @@ -33,7 +33,7 @@ This is the story of `ctx`, how it evolved from a hasty "*YOLO mode*" experiment to a disciplined system for **persistent AI context**, and what I have learned along the way. -!!! info "Context is a Record" +!!! info "Context Is a Record" **Context** *is a* **persistent record**. By "*context*", I **don't** mean model memory or stored thoughts: @@ -165,7 +165,7 @@ The `git` history tells the story: 4f0e195 feat: separate orchestrator directive from agent tasks ``` -## YOLO Mode: Fast, But Dangerous +## YOLO Mode: Fast, but Dangerous The *Ralph Loop* made feature development *incredibly fast*. @@ -277,12 +277,12 @@ conventions...*) should go in to `CONVENTIONS.md`. Here's how `ctx` explained why the distinction was important: -!!! tip "Decision record, 2026-01-25" +!!! tip "Decision Record, 2026-01-25" Overly strict constitution creates friction and gets ignored. Conventions can be bent; constitution **cannot**. -## Hooks: Harder Than They Look +## Hooks: Harder than They Look Claude Code hooks seemed simple: Run a script before/after certain events. @@ -339,7 +339,7 @@ By the time of this writing this project's `ctx` sessions They are not part of the source code due to security, privacy, and size concerns. -!!! tip "Middle Ground: the Scratchpad" +!!! tip "Middle Ground: The Scratchpad" For sensitive notes that *do* need to travel with the project, `ctx pad` stores encrypted one-liners in git, and `ctx pad add "label" --file PATH` can ingest small files. @@ -470,7 +470,7 @@ The archive from January 23rd shows 13 phases of work: That's an impressive ^^173 commits** across **8 days** of development. -## What I Learned About AI-Assisted Development +## What I Learned about AI-Assisted Development **1. Memory changes everything** @@ -551,7 +551,7 @@ If you are reading this, chances are that you already have heard about `ctx`. [github.com/ActiveMemory/ctx](https://github.com/ActiveMemory/ctx), * and the documentation lives at [ctx.ist](https://ctx.ist). -!!! note "Session Records are a Gold Mine" +!!! note "Session Records Are a Gold Mine" By the time of this writing, I have **more than 70 megabytes** of **text-only** session capture, spread across >100 Markdown and `JSONL` files. diff --git a/docs/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release.md b/docs/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release.md index e369fde4f..8ecbfd9f2 100644 --- a/docs/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release.md +++ b/docs/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release.md @@ -22,11 +22,11 @@ topics: ![ctx](../images/ctx-banner.png) -## Digging Through the Past to Build the Future +## Digging through the Past to Build the Future *Jose Alekhinne / 2026-02-01* -!!! question "What if Your AI Could Remember Everything?" +!!! question "What If Your AI Could Remember Everything?" Not just the current session, but **every** session: * **Every** decision made, @@ -74,7 +74,7 @@ I found myself grepping through files to answer questions like: * "*What was the session where we fixed the hook regex?*" * "*How did the `embed.go` split actually happen?*" -!!! note "Fate is Whimsical" +!!! note "Fate Is Whimsical" The irony was **painful**: I built a tool to prevent AI amnesia, but I was suffering from @@ -114,7 +114,7 @@ them in a human-readable format: Slugs are auto-generated from session IDs (*memorable names instead of UUIDs*). The goal (*as the name implies*) is **recall**, not archival accuracy. -!!! note "2,121 lines of new code" +!!! note "2,121 Lines of New Code" The `ctx recall` feature was the largest single addition: parser library, CLI commands, test suite, and slash command. @@ -214,7 +214,7 @@ ctx add learning "CGO breaks ARM64 builds" \ --application "Added to Makefile and CI config" ``` -!!! quote "Structured entries are prompts to the AI" +!!! quote "Structured Entries Are Prompts to the AI" When the AI reads a decision with full context, rationale, and consequences, it understands the **why**, *not* just the **what**. @@ -250,7 +250,7 @@ always loaded first. The same structure serves two very different readers. -!!! tip "Reindex after manual edits" +!!! tip "Reindex After Manual Edits" If you edit entries by hand, rebuild the index with: ```bash @@ -301,7 +301,7 @@ a human can **reason** about. ### 2. Enforcement > Documentation -!!! quote "The Prompt is a Guideline" +!!! quote "The Prompt Is a Guideline" The code is more what you'd call '*guidelines*' than actual rules. -**Hector Barbossa** @@ -324,7 +324,7 @@ The journal system started as a way to understand `ctx` itself. It immediately became useful for everything else. -## v0.2.0 in The Numbers +## v0.2.0 in the Numbers This was a heavy release. The numbers reflect that: diff --git a/docs/blog/2026-02-01-refactoring-with-intent.md b/docs/blog/2026-02-01-refactoring-with-intent.md index 6c0cfc74f..f8ff81b70 100644 --- a/docs/blog/2026-02-01-refactoring-with-intent.md +++ b/docs/blog/2026-02-01-refactoring-with-intent.md @@ -196,7 +196,7 @@ This is where **v0.2.0** becomes more than a refactor. The biggest feature of this change window wasn't a refactor; it was the **journal system**. -!!! note "45 files changed, 1680 insertions" +!!! note "45 Files Changed, 1680 Insertions" This commit added the infrastructure for synthesizing AI session history into human-readable content. @@ -245,7 +245,7 @@ The work also introduced **thread safety** in the **recall parser** and centralized shared validation logic; removing duplication that had quietly spread during YOLO mode. -## I (Re)learned My Lessons +## I (Re)Learned My Lessons Similar to what I've learned in [the former human-assisted refactoring post][first-post], this diff --git a/docs/blog/2026-02-03-the-attention-budget.md b/docs/blog/2026-02-03-the-attention-budget.md index 724c33b20..0751ee251 100644 --- a/docs/blog/2026-02-03-the-attention-budget.md +++ b/docs/blog/2026-02-03-the-attention-budget.md @@ -166,7 +166,7 @@ Also noted: ~100 tokens (title-only summaries for overflow) The constraint is the feature: It enforces ruthless prioritization. -### Primitive 3: Indexes Over Full Content +### Primitive 3: Indexes over Full Content `DECISIONS.md` and `LEARNINGS.md` both include index sections: @@ -243,7 +243,7 @@ cat .context/sessions/... # Deep dive when needed Summaries **first**. Details: **on demand**. -## Quality Over Quantity +## Quality over Quantity Here is the counterintuitive part: **more context can make AI worse**. diff --git a/docs/blog/2026-02-04-skills-that-fight-the-platform.md b/docs/blog/2026-02-04-skills-that-fight-the-platform.md index f083accc1..eeca176f6 100644 --- a/docs/blog/2026-02-04-skills-that-fight-the-platform.md +++ b/docs/blog/2026-02-04-skills-that-fight-the-platform.md @@ -15,11 +15,11 @@ topics: ![ctx](../images/ctx-banner.png) -## When Your Custom Prompts Work Against You +## When Your Custom Prompts Work against You *Jose Alekhinne / 2026-02-04* -!!! question "Have You Ever Written a Skill that Made Your AI Worse?" +!!! question "Have You Ever Written a Skill That Made Your AI Worse?" You craft detailed instructions. You add examples. You build elaborate guardrails... @@ -62,7 +62,7 @@ Here is a partial overview of what's built in: **Skills should complement this, not compete with it.** -!!! tip "You are the Guest, not the Host" +!!! tip "You Are the Guest, Not the Host" Treat the system prompt like a kernel scheduler. You don't re-implement it in user space: @@ -119,7 +119,7 @@ Once judgment is suppressed, every other safeguard becomes **optional**. | "*Just this once*" | No exceptions | ``` -!!! danger "Judgment Suppression is Dangerous" +!!! danger "Judgment Suppression Is Dangerous" The **attack vector** structurally identical to **prompt injection**. It teaches the AI that its own judgment is wrong. @@ -238,7 +238,7 @@ Skills designed to activate on every interaction regardless of relevance. Universal triggers override the platform's **relevance matching**: The AI spends tokens on process overhead instead of the actual task. -!!! tip "`ctx` preserves relevance" +!!! tip "`ctx` Preserves Relevance" This is exactly the failure mode `ctx` exists to mitigate: Wasting attention budget on irrelevant process instead of diff --git a/docs/blog/2026-02-05-you-cant-import-expertise.md b/docs/blog/2026-02-05-you-cant-import-expertise.md index 9a723786d..879c6ef50 100644 --- a/docs/blog/2026-02-05-you-cant-import-expertise.md +++ b/docs/blog/2026-02-05-you-cant-import-expertise.md @@ -19,7 +19,7 @@ topics: *Jose Alekhinne / 2026-02-05* -!!! question "Have You Ever Dropped a Well-Crafted Template Into a Project and Had It Do... Nothing Useful?" +!!! question "Have You Ever Dropped a Well-Crafted Template into a Project and Had It Do... Nothing Useful?" * The template was **thorough**, * The structure was **sound**, * The advice was **correct**... @@ -40,7 +40,7 @@ Then I stopped, and applied `ctx`'s own evaluation framework: This post is about **why**. -!!! tip "It Is About Encoding Templates" +!!! tip "It Is about Encoding Templates" **Templates describe categories of problems.** **Expertise encodes which problems actually happen, and how often.** @@ -138,7 +138,7 @@ that competed with the AI's built-in capabilities. ## What the Skill Didn't Know -!!! tip "AI Without Context is Just a Corpus" +!!! tip "AI without Context Is Just a Corpus" * LLMs are optimized on insanely large **corpora**. * And then they are passed through several layers of human-assisted **refinement**. @@ -305,7 +305,7 @@ that **actually** drift in this project. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Frameworks travel. Expertise doesn't.** You can import structures, matrices, and workflows. diff --git a/docs/blog/2026-02-07-the-anatomy-of-a-skill-that-works.md b/docs/blog/2026-02-07-the-anatomy-of-a-skill-that-works.md index c11e6c32c..b3f31f06a 100644 --- a/docs/blog/2026-02-07-the-anatomy-of-a-skill-that-works.md +++ b/docs/blog/2026-02-07-the-anatomy-of-a-skill-that-works.md @@ -20,11 +20,11 @@ topics: ![ctx](../images/ctx-banner.png) -## What 20 Skill Rewrites Taught Me About Guiding AI +## What 20 Skill Rewrites Taught Me about Guiding AI *Jose Alekhinne / 2026-02-07* -!!! question "Why do some skills produce great results while others get ignored or produce garbage?" +!!! question "Why Do Some Skills Produce Great Results While Others Get Ignored or Produce Garbage?" I had 20 skills. Most were well-intentioned stubs: a description, a command to run, and a wish for the best. @@ -151,7 +151,7 @@ These are not just nice-to-have. They are **load-bearing**. Withoutthem, the agent will trigger the skill at the *wrong* time, produce *unwanted* output, and **erode the user's trust** in the skill system. -## Lesson 3: Examples Set Boundaries Better Than Rules +## Lesson 3: Examples Set Boundaries Better than Rules The most common failure mode of thin skills was not wrong behavior but *vague* behavior. The agent would do roughly the right thing, @@ -201,7 +201,7 @@ The agent does not have a notes app. It does not browse the web to find one. This instruction, clearly written for a human audience, was *dead weight* in a skill consumed by an AI. -!!! tip "Skills are for the Agents" +!!! tip "Skills Are for the Agents" **Every sentence in a skill should be actionable by the agent**. If the guidance requires human judgment or human tools, it belongs in @@ -322,7 +322,7 @@ checklists is a **tool**: the difference is not the content; it is **whether the agent can reliably execute it without human intervention**. -!!! tip "Skills are Interfaces" +!!! tip "Skills Are Interfaces" **Good skills are not instructions. They are contracts.**: * They **specify** preconditions, postconditions, and boundaries. @@ -331,7 +331,7 @@ intervention**. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Skills that work have bones, not just flesh.** Quality gates, negative triggers, examples, and checklists diff --git a/docs/blog/2026-02-08-not-everything-is-a-skill.md b/docs/blog/2026-02-08-not-everything-is-a-skill.md index e8a6502e5..db9db7780 100644 --- a/docs/blog/2026-02-08-not-everything-is-a-skill.md +++ b/docs/blog/2026-02-08-not-everything-is-a-skill.md @@ -20,11 +20,11 @@ topics: ![ctx](../images/ctx-banner.png) -## What a Codebase Audit Taught Me About Restraint +## What a Codebase Audit Taught Me about Restraint *Jose Alekhinne / 2026-02-08* -!!! question "When You Find a Useful Prompt, What Do You Do With It?" +!!! question "When You Find a Useful Prompt, What Do You Do with It?" My instinct was to make it a *skill*. I had just spent **three posts** explaining how to build skills that work. @@ -334,7 +334,7 @@ time: If yes, it is a skill. If no, it is a recipe. If you are not sure, it is a recipe until proven otherwise. -## This Mindset In the Context of `ctx` +## This Mindset in the Context of `ctx` `ctx` is a **tool** that gives AI agents persistent memory. Its purpose is **automation**: reducing the **friction** of context loading, session @@ -356,7 +356,7 @@ the right things and to make the rest easy to find when you need it. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **The best automation decision is sometimes not to automate.** A **runbook** in a Markdown file costs nothing until you use it. diff --git a/docs/blog/2026-02-09-defense-in-depth-securing-ai-agents.md b/docs/blog/2026-02-09-defense-in-depth-securing-ai-agents.md index ebcafe4b0..0855e5bfe 100644 --- a/docs/blog/2026-02-09-defense-in-depth-securing-ai-agents.md +++ b/docs/blog/2026-02-09-defense-in-depth-securing-ai-agents.md @@ -73,7 +73,7 @@ iteration `N`. **The agent can rewrite its own guardrails.** -## Five Layers, Each With a Hole +## Five Layers, Each with a Hole That's five nested layers of swiss cheese. Alone, each of them has large holes. Together, they create a **boundary**. @@ -102,7 +102,7 @@ The agent *usually* follows them. **Verdict**: Necessary. Not sufficient. Good for the common case. Never trust it as a security boundary. -### Layer 2: Application Controls (*Deterministic at Runtime, Mutable Across Iterations*) +### Layer 2: Application Controls (*Deterministic at Runtime, Mutable across Iterations*) Permission allowlists in `.claude/settings.local.json`: @@ -367,7 +367,7 @@ overnight. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Markdown is not a security boundary.** `CONSTITUTION.md` is a nudge. An allowlist is a gate. diff --git a/docs/blog/2026-02-12-how-deep-is-too-deep.md b/docs/blog/2026-02-12-how-deep-is-too-deep.md index f87543e3a..e7f157ade 100644 --- a/docs/blog/2026-02-12-how-deep-is-too-deep.md +++ b/docs/blog/2026-02-12-how-deep-is-too-deep.md @@ -19,7 +19,7 @@ topics: *Jose Alekhinne / 2026-02-12* -!!! question "Have You Ever Felt Like You Should Understand More of the Stack Beneath You?" +!!! question "Have You Ever Felt like You Should Understand More of the Stack beneath You?" You can talk about transformers at a whiteboard. You can explain attention to a colleague. @@ -316,7 +316,7 @@ loss function. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" Go deep enough to diagnose your failures. Stop before you are solving problems that do not propagate to your layer. diff --git a/docs/blog/2026-02-14-irc-as-context.md b/docs/blog/2026-02-14-irc-as-context.md index 4e5dd7718..ff0b07adb 100644 --- a/docs/blog/2026-02-14-irc-as-context.md +++ b/docs/blog/2026-02-14-irc-as-context.md @@ -89,7 +89,7 @@ Client sessions become **ephemeral**. Presence becomes **infrastructural**. -!!! tip "ZNC is tmux for IRC" +!!! tip "ZNC Is Tmux for IRC" * Close your laptop. * ZNC remains. @@ -100,7 +100,7 @@ This is **not** convenience; this is **continuity**. --- -## Presence Without Flapping +## Presence without Flapping With a bouncer: diff --git a/docs/blog/2026-02-14-parallel-agents-with-worktrees.md b/docs/blog/2026-02-14-parallel-agents-with-worktrees.md index 2e88f38a9..c940ee5dc 100644 --- a/docs/blog/2026-02-14-parallel-agents-with-worktrees.md +++ b/docs/blog/2026-02-14-parallel-agents-with-worktrees.md @@ -19,7 +19,7 @@ topics: *Jose Alekhinne / 2026-02-14* -!!! question "What Do You Do With 30 Open Tasks?" +!!! question "What Do You Do with 30 Open Tasks?" You could work through them one at a time. One agent, one branch, one commit stream. @@ -274,7 +274,7 @@ The hard part is not the `git` commands; it is the **discipline**: --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Partition by blast radius, not by priority.** Two tasks that touch the same files belong in the same track, diff --git a/docs/blog/2026-02-15-ctx-v0.3.0-the-discipline-release.md b/docs/blog/2026-02-15-ctx-v0.3.0-the-discipline-release.md index f7853149f..b84884eca 100644 --- a/docs/blog/2026-02-15-ctx-v0.3.0-the-discipline-release.md +++ b/docs/blog/2026-02-15-ctx-v0.3.0-the-discipline-release.md @@ -19,7 +19,7 @@ topics: *Jose Alekhinne / February 15, 2026* -!!! question "What Does a Release Look Like When Most of the Work Is Invisible?" +!!! question "What Does a Release Look like When Most of the Work Is Invisible?" No new headline feature. No architectural pivot. No rewrite. Just **35+ documentation and quality commits** against **~15 feature @@ -203,8 +203,8 @@ failures. | Skills with "When NOT to Use" | 0 | 21 | | Average skill body | ~15 lines | ~80 lines | | Hooks using `$CLAUDE_PROJECT_DIR` | 0 | All | -| Documentation commits | -- | 35+ | -| Feature/fix commits | -- | ~15 | +| Documentation commits | n/a | 35+ | +| Feature/fix commits | n/a | ~15 | That ratio (*35+ documentation and quality commits to ~15 feature commits*) is the defining characteristic of this release: diff --git a/docs/blog/2026-02-15-why-zensical.md b/docs/blog/2026-02-15-why-zensical.md index 5caa9e32b..b52852fde 100644 --- a/docs/blog/2026-02-15-why-zensical.md +++ b/docs/blog/2026-02-15-why-zensical.md @@ -15,7 +15,7 @@ topics: ![ctx](../images/ctx-banner.png) -## Why ctx's Journal Site Runs on a v0.0.21 Tool +## Why `ctx`'s Journal Site Runs on a v0.0.21 Tool *Jose Alekhinne / 2026-02-15* @@ -329,7 +329,7 @@ This is the same kind of decision that shows up throughout `ctx`: --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Version numbers measure where a project has been.** **The team and the architecture tell you where it's going.** diff --git a/docs/blog/2026-02-17-code-is-cheap-judgment-is-not.md b/docs/blog/2026-02-17-code-is-cheap-judgment-is-not.md index 677844094..b356ad609 100644 --- a/docs/blog/2026-02-17-code-is-cheap-judgment-is-not.md +++ b/docs/blog/2026-02-17-code-is-cheap-judgment-is-not.md @@ -25,7 +25,7 @@ topics: *Jose Alekhinne / February 17, 2026* -!!! question "Are You Worried About AI Taking Your Job?" +!!! question "Are You Worried about AI Taking Your Job?" You might be confusing the thing that's *cheap* with the thing that's **valuable**. @@ -116,7 +116,7 @@ accountable. --- -## The Evidence From Building ctx +## The Evidence from Building ctx I did **not** arrive at this conclusion theoretically. @@ -306,7 +306,7 @@ the *replacement*: --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Code is cheap. Judgment is not.** AI replaces unstructured effort, not directed expertise. The diff --git a/docs/blog/2026-02-17-context-as-infrastructure.md b/docs/blog/2026-02-17-context-as-infrastructure.md index ec0e625d9..c0c20ac43 100644 --- a/docs/blog/2026-02-17-context-as-infrastructure.md +++ b/docs/blog/2026-02-17-context-as-infrastructure.md @@ -25,7 +25,7 @@ topics: *Jose Alekhinne / February 17, 2026* -!!! question "Where does your AI's knowledge live between sessions?" +!!! question "Where Does Your AI's Knowledge Live between Sessions?" If the answer is "in a prompt I paste at the start," you are treating context as a **consumable**. Something assembled, used, and discarded. @@ -392,7 +392,7 @@ The tool is a convenience: **The principles are what matter**. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Prompts are conversations. Infrastructure persists.** Your AI does not need a better prompt. It needs a filesystem: diff --git a/docs/blog/2026-02-17-parallel-agents-merge-debt-and-the-myth-of-overnight-progress.md b/docs/blog/2026-02-17-parallel-agents-merge-debt-and-the-myth-of-overnight-progress.md index 6f18a5be5..42ec905eb 100644 --- a/docs/blog/2026-02-17-parallel-agents-merge-debt-and-the-myth-of-overnight-progress.md +++ b/docs/blog/2026-02-17-parallel-agents-merge-debt-and-the-myth-of-overnight-progress.md @@ -15,11 +15,11 @@ topics: ![ctx](../images/ctx-banner.png) -## When the Screen Looks Like Progress +## When the Screen Looks like Progress *Jose Alekhinne / 2026-02-17* -!!! question "How Many Terminals Are too Many?" +!!! question "How Many Terminals Are Too Many?" You discover agents can run in parallel. So you open ten... @@ -135,7 +135,7 @@ Real parallelism comes from **task topology**, not from tooling. * Four agents editing the same implementation surface -!!! tip "Context is the Boundary" +!!! tip "Context Is the Boundary" * The goal is **not** to keep agents busy. * The goal is to keep **contexts isolated**. @@ -343,7 +343,7 @@ A well-written task description that takes 50 tokens outperforms a rambling one that takes 200: **Not** just because it is cheaper, but because it leaves more **headroom** for the model to actually **think**. -!!! tip "Literature is NOT Overrated" +!!! tip "Literature Is NOT Overrated" * Attention is a **finite** budget. * **Language** determines how fast you spend it. @@ -382,7 +382,7 @@ Every post has arrived (*and made me converge*) at the same answer so far: --- -## `ctx` Was Never About Spawning More Minds +## `ctx` Was Never about Spawning More Minds `ctx` is about: @@ -426,7 +426,7 @@ This is *slower* to watch. **Faster** to ship. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Progress is not what the machine produces while you sleep.** **Progress is what survives contact with the main branch.** diff --git a/docs/blog/2026-02-17-the-3-1-ratio.md b/docs/blog/2026-02-17-the-3-1-ratio.md index d6f855b7b..65f365b7a 100644 --- a/docs/blog/2026-02-17-the-3-1-ratio.md +++ b/docs/blog/2026-02-17-the-3-1-ratio.md @@ -25,7 +25,7 @@ topics: *Jose Alekhinne / February 17, 2026* -!!! question "How often should you stop building and start cleaning?" +!!! question "How Often Should You Stop Building and Start Cleaning?" Every developer knows technical debt exists. Every developer postpones dealing with it. @@ -334,7 +334,7 @@ already paid the compounding cost. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" **Three sessions of building. One session of cleaning.** **Not** because the code is dirty, **but** because drift compounds @@ -345,7 +345,7 @@ already paid the compounding cost. --- -## The Arc So Far +## The Arc so Far This post sits at a crossroads in the `ctx` story. Looking back: diff --git a/docs/blog/2026-02-17-when-a-system-starts-explaining-itself.md b/docs/blog/2026-02-17-when-a-system-starts-explaining-itself.md index 8a661e2bc..4030e9709 100644 --- a/docs/blog/2026-02-17-when-a-system-starts-explaining-itself.md +++ b/docs/blog/2026-02-17-when-a-system-starts-explaining-itself.md @@ -94,7 +94,7 @@ The question is correct. The category is wrong. * Skills live in people. * Infrastructure lives in the environment. -!!! tip "`ctx` Is not a Skill: It is a Form of Relief" +!!! tip "`ctx` Is Not a Skill: It Is a Form of Relief" What early adopters experience is **not** an ability. **It is the removal of a cognitive constraint**. @@ -303,7 +303,7 @@ a **shared infrastructure for thought**. --- -!!! quote "**If You Remember One Thing From This Post...**" +!!! quote "**If You Remember One Thing from This Post...**" You do not know a substrate is real when people praise it. You know it is real when: diff --git a/docs/blog/2026-02-25-the-homework-problem.md b/docs/blog/2026-02-25-the-homework-problem.md index 4370af9c4..1c27f4636 100644 --- a/docs/blog/2026-02-25-the-homework-problem.md +++ b/docs/blog/2026-02-25-the-homework-problem.md @@ -233,7 +233,7 @@ does the heavy lifting here: Without the relay requirement, the agent would silently rationalize skipping. With it, skipping becomes a **visible, auditable decision** that the user can override. -### The Compliance Canary +### The Compliance Canary Here's the design insight that only became clear after watching it work across multiple sessions: **the relay block is a compliance canary**. @@ -640,7 +640,7 @@ context at session start**. The [Prompting Guide](../home/prompting-guide.md) applies this insight directly: Scope constraints, verification commands, and the reliability checklist are all **one-hop**, moment-of-action patterns. -### 2. Delegation chains decay +### 2. Delegation Chains Decay Every hop in an instruction chain loses authority: diff --git a/docs/blog/2026-03-04-agent-memory-is-infrastructure.md b/docs/blog/2026-03-04-agent-memory-is-infrastructure.md index 46bea17f2..d40daa54b 100644 --- a/docs/blog/2026-03-04-agent-memory-is-infrastructure.md +++ b/docs/blog/2026-03-04-agent-memory-is-infrastructure.md @@ -244,7 +244,7 @@ That's the test. That's the difference. --- -## What Gets Lost Without Infrastructure Memory +## What Gets Lost without Infrastructure Memory Consider the knowledge that accumulates around a non-trivial project: @@ -410,7 +410,7 @@ The project's memory is an infrastructure problem. And **infrastructure belongs in the repository**. -!!! quote "If You Remember One Thing From This Post..." +!!! quote "If You Remember One Thing from This Post..." **Prompts are conversations: Infrastructure persists.** Your AI doesn't need a better notepad. It needs a filesystem: diff --git a/docs/blog/2026-03-23-we-broke-the-3-1-rule.md b/docs/blog/2026-03-23-we-broke-the-3-1-rule.md index ba7f84004..7363848ca 100644 --- a/docs/blog/2026-03-23-we-broke-the-3-1-rule.md +++ b/docs/blog/2026-03-23-we-broke-the-3-1-rule.md @@ -216,7 +216,7 @@ was the same; the experience was worse. felt productive. But the codebase on March 5 was harder to modify than the codebase on February 16, despite having more features. -!!! tip "Speed Without Structure" +!!! tip "Speed without Structure" Speed without structure is negative progress. **Agents amplify both building and debt**: The same AI that can diff --git a/docs/blog/2026-04-02-code-structure-as-an-agent-interface.md b/docs/blog/2026-04-02-code-structure-as-an-agent-interface.md index 994fe41b7..d5ba41776 100644 --- a/docs/blog/2026-04-02-code-structure-as-an-agent-interface.md +++ b/docs/blog/2026-04-02-code-structure-as-an-agent-interface.md @@ -20,7 +20,7 @@ topics: # Code Structure as an Agent Interface -## What 19 AST Tests Taught Us About Agent-Readable Code +## What 19 AST Tests Taught Us about Agent-Readable Code ![ctx](../images/ctx-banner.png) diff --git a/docs/cli/backup.md b/docs/cli/backup.md deleted file mode 100644 index 04670a320..000000000 --- a/docs/cli/backup.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -# / ctx: https://ctx.ist -# ,'`./ do you remember? -# `.,'\ -# \ Copyright 2026-present Context contributors. -# SPDX-License-Identifier: Apache-2.0 - -title: Backup -icon: lucide/archive ---- - -![ctx](../images/ctx-banner.png) - -### `ctx backup` - -Create timestamped tar.gz archives of project context and/or global -Claude Code data. Optionally copies archives to an SMB share via GVFS. - -```bash -ctx backup [flags] -``` - -**Flags**: - -| Flag | Description | -|-----------|----------------------------------------------------| -| `--scope` | Backup scope: `project`, `global`, or `all` (default: `all`) | -| `--json` | Output results as JSON | - -**Scopes**: - -| Scope | What's archived | -|-----------|-----------------------------------------------| -| `project` | `.context/`, `.claude/`, `ideas/`, `~/.bashrc` | -| `global` | `~/.claude/` (excludes `todos/`) | -| `all` | Both project and global (default) | - -**Environment**: - -| Variable | Purpose | -|-------------------------|--------------------------------------------------| -| `CTX_BACKUP_SMB_URL` | SMB share URL (e.g. `smb://host/share`) | -| `CTX_BACKUP_SMB_SUBDIR` | Subdirectory on share (default: `ctx-sessions`) | - -**Examples**: - -```bash -ctx backup # Back up everything (default: all) -ctx backup --scope project # Project context only -ctx backup --scope global # Global Claude data only -ctx backup --scope all --json # Both, JSON output -``` diff --git a/docs/cli/bootstrap.md b/docs/cli/bootstrap.md index fe05ce45f..06ee3b819 100644 --- a/docs/cli/bootstrap.md +++ b/docs/cli/bootstrap.md @@ -19,7 +19,7 @@ tracked context files, and a short health snapshot. `--quiet` prints just the path; `--json` produces structured output for automation. This is a hidden, agent-only command that agents are instructed to -run first in their session-start procedure — it is the authoritative +run first in their session-start procedure; it is the authoritative answer to "where does this project's context live?". ```bash @@ -41,6 +41,6 @@ ctx system bootstrap -q # Just the context directory path ctx system bootstrap --json # Structured output for automation ``` -**Scripting tip**: `CTX_DIR=$(ctx system bootstrap -q)` is the -canonical way for skills and scripts to find the project's context -directory without hardcoding `.context/`. +**Note**: `-q` prints just the resolved directory path. See +[Activating a Context Directory](../recipes/activating-context.md) +if you hit a "*no context directory specified*" error. diff --git a/docs/cli/config.md b/docs/cli/config.md index 0f5326590..b60479add 100644 --- a/docs/cli/config.md +++ b/docs/cli/config.md @@ -8,8 +8,8 @@ title: Config icon: lucide/settings-2 --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ### `ctx config` diff --git a/docs/cli/connect.md b/docs/cli/connect.md index 5ffe3a6fa..053c2a7dd 100644 --- a/docs/cli/connect.md +++ b/docs/cli/connect.md @@ -9,6 +9,8 @@ title: Connect icon: lucide/link --- +![ctx](../images/ctx-banner.png) + ## `ctx connect` Connect a project to a `ctx` Hub for cross-project @@ -16,7 +18,7 @@ knowledge sharing. Projects publish decisions, learnings, conventions, and tasks to a hub; other subscribed projects receive them alongside local context. -!!! tip "New to the hub?" +!!! tip "New to the Hub?" Start with the [`ctx` Hub overview](../recipes/hub-overview.md) for the mental model (what the hub is, who it's for, what it is @@ -30,7 +32,7 @@ token in `.context/.connect.enc`. Two developers on the same project either share that file over a trusted channel, or each register under a different project name. -**Only structured entries flow through the hub** — `decision`, +**Only structured entries flow through the hub**: `decision`, `learning`, `convention`, `task`. Session journals, scratchpad contents, and other local state stay on the machine that created them. @@ -92,7 +94,7 @@ Show hub connection state and entry statistics. ctx connect status ``` -## Automatic sharing +## Automatic Sharing Use `--share` on `ctx add` to write locally AND publish to the hub: @@ -104,16 +106,16 @@ ctx add decision "Use UTC" --share \ ``` If the hub is unreachable, the local write succeeds and a warning -is printed. The `--share` flag is best-effort — it never blocks +is printed. The `--share` flag is best-effort; it never blocks local context updates. -## Auto-sync +## Auto-Sync Once registered, the `check-hub-sync` hook automatically syncs new entries from the hub at the start of each session (daily throttled). No manual `ctx connect sync` needed. -## Shared files +## Shared Files Entries from the hub are stored in `.context/hub/`: @@ -128,7 +130,7 @@ Entries from the hub are stored in `.context/hub/`: These files are read-only (managed by sync/listen) and never mixed with local context files. -## Agent integration +## Agent Integration Include shared knowledge in agent context packets: diff --git a/docs/cli/connection.md b/docs/cli/connection.md index 1d9dd7af0..e4928b95d 100644 --- a/docs/cli/connection.md +++ b/docs/cli/connection.md @@ -8,8 +8,8 @@ title: Connect icon: lucide/link --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ## `ctx connect` @@ -32,7 +32,7 @@ token in `.context/.connect.enc`. Two developers on the same project either share that file over a trusted channel, or each register under a different project name. -**Only structured entries flow through the hub** — `decision`, +**Only structured entries flow through the hub**: `decision`, `learning`, `convention`, `task`. Session journals, scratchpad contents, and other local state stay on the machine that created them. @@ -107,7 +107,7 @@ Show ctx Hub connection state and entry statistics. ctx connection status ``` -## Automatic sharing +## Automatic Sharing Use `--share` on `ctx add` to write locally AND publish to the ctx Hub: @@ -119,16 +119,16 @@ ctx add decision "Use UTC" --share \ ``` If the hub is unreachable, the local write succeeds and a warning -is printed. The `--share` flag is best-effort — it never blocks +is printed. The `--share` flag is best-effort; it never blocks local context updates. -## Auto-sync +## Auto-Sync Once registered, the `check-hub-sync` hook automatically syncs new entries from the ctx Hub at the start of each session (daily throttled). No manual `ctx connection sync` needed. -## Shared files +## Shared Files Entries from the ctx Hub are stored in `.context/hub/`: @@ -143,7 +143,7 @@ Entries from the ctx Hub are stored in `.context/hub/`: These files are read-only (managed by sync/listen) and never mixed with local context files. -## Agent integration +## Agent Integration Include shared knowledge in agent context packets: diff --git a/docs/cli/context.md b/docs/cli/context.md index ad0315d10..0288858e2 100644 --- a/docs/cli/context.md +++ b/docs/cli/context.md @@ -8,8 +8,8 @@ title: Context Management icon: lucide/layers --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ### `ctx add` diff --git a/docs/cli/hub.md b/docs/cli/hub.md index 85662360f..e8e0dc6fa 100644 --- a/docs/cli/hub.md +++ b/docs/cli/hub.md @@ -13,13 +13,13 @@ icon: lucide/network ## `ctx hub` -Operator commands for a **`ctx` Hub** — the gRPC server that +Operator commands for a **`ctx` Hub**: the gRPC server that fans out decisions, learnings, conventions, and tasks across projects. Use `ctx hub` to start and stop the server, inspect cluster state, add or remove peers at runtime, and hand off leadership before maintenance. -!!! tip "Who needs this page" +!!! tip "Who Needs This Page" You only need `ctx hub` if you are **running** a hub server or cluster. For client-side operations (register, subscribe, sync, publish, listen), see @@ -40,14 +40,14 @@ ctx hub start --data-dir /srv/ctx-hub # Custom data directory ``` On first run, generates an **admin token** and prints it to -stdout. Save this token — it's required for +stdout. Save this token; it's required for [`ctx connection register`](connection.md#ctx-connect-register) in client projects. Subsequent runs reuse the stored token from `/admin.token`. **Default data directory**: `~/.ctx/hub-data/` -#### Daemon mode +#### Daemon Mode Run the hub as a detached background process: @@ -59,7 +59,7 @@ ctx hub stop # Graceful shutdown The daemon writes a PID file to `/hub.pid`. Stop the daemon with `ctx hub stop` (see below). -#### Cluster mode +#### Cluster Mode For high availability, run multiple hubs with Raft-based leader election: @@ -70,7 +70,7 @@ ctx hub start --port 9900 \ ``` Raft is used **only** for leader election. Data replication -uses sequence-based gRPC sync on the append-only JSONL log — +uses sequence-based gRPC sync on the append-only JSONL log; there is no multi-node consensus on writes. See the [HA cluster recipe](../recipes/hub-cluster.md) for the full setup and the Raft-lite durability caveat. @@ -106,7 +106,7 @@ ctx hub stop --data-dir /srv/ctx-hub # Custom data directory Sends `SIGTERM` to the PID recorded in `/hub.pid`, waits for in-flight RPCs to drain, and removes the PID file. -Safe to rerun — if no daemon is running, returns a +Safe to rerun: if no daemon is running, returns a "no running hub" error without side effects. ### `ctx hub status` @@ -146,14 +146,14 @@ maintenance. ctx hub stepdown ``` -### See also +### See Also -- [`ctx connect`](connection.md) — client-side commands +- [`ctx connect`](connection.md): client-side commands (register, subscribe, sync, publish, listen) -- [`ctx` Hub overview](../recipes/hub-overview.md) — mental +- [`ctx` Hub overview](../recipes/hub-overview.md): mental model and user stories - [`ctx` Hub: Getting Started](../recipes/hub-getting-started.md) -- [Hub operations](../operations/hub.md) — production +- [Hub operations](../operations/hub.md): production deployment, backup, monitoring - [Hub failure modes](../operations/hub-failure-modes.md) - [Hub security model](../security/hub.md) diff --git a/docs/cli/index.md b/docs/cli/index.md index dc10e8a8e..19b3e519f 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -19,63 +19,81 @@ Complete reference for all `ctx` commands, grouped by function. All commands support these flags: -| Flag | Description | -|------------------------|-----------------------------------------------------------| -| `--help` | Show command help | -| `--version` | Show version | -| `--context-dir ` | Override context directory (default: `.context/`) | -| `--allow-outside-cwd` | Allow context directory outside current working directory | -| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | - -**Initialization required.** Most commands require a `.context/` directory -created by `ctx init`. Running a command without one produces: - -``` -ctx: not initialized - run "ctx init" first -``` - -Commands that work before initialization: `ctx init`, `ctx setup`, -`ctx doctor`, and grouping commands that only show help. +| Flag | Description | +|-----------------|------------------------------------------------------------| +| `--help` | Show command help | +| `--version` | Show version | +| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | + +**Context declaration required.** ctx does not walk the filesystem +looking for `.context/`. Every non-exempt command requires `CTX_DIR` +to be declared explicitly before it runs. The single declaration +channel is the environment variable: + +- `eval "$(ctx activate)"`: binds `CTX_DIR` for the current shell. +- `CTX_DIR=/abs/path/to/.context` exported in the environment, or + inlined as `CTX_DIR=/abs/path/to/.context ctx ` for a + one-shot. + +`CTX_DIR` must be an absolute path with `.context` as its basename. +Relative paths and other names are rejected on first use; the +basename guard catches the common footgun +(`export CTX_DIR=$(pwd)`) before stray writes can leak to the +project root. + +Commands fail fast with a linkable error +(see [Activating a Context Directory](../recipes/activating-context.md)) +when none is declared. The exempt allowlist (commands that run without +a declared context directory) is: `ctx init`, `ctx activate`, +`ctx deactivate`, `ctx version`, `ctx help`, `ctx system bootstrap`, +`ctx doctor`, `ctx guide`, `ctx why`, `ctx config switch/status`, +`ctx hub *`. + +**Initialization required.** Once declared, the target must already +have been initialized by `ctx init` (otherwise commands return +`ctx: not initialized`). ## Getting Started -| Command | Description | -|-----------------------------------------------|----------------------------------------------------------| -| [`ctx init`](init-status.md#ctx-init) | Initialize `.context/` directory with templates | -| [`ctx status`](init-status.md#ctx-status) | Show context summary (files, tokens, drift) | -| [`ctx guide`](guide.md#ctx-guide) | Quick-reference cheat sheet | +| Command | Description | +|-----------------------------------------------------|----------------------------------------------------------| +| [`ctx init`](init-status.md#ctx-init) | Initialize `.context/` directory with templates | +| [`ctx activate`](init-status.md#ctx-activate) | Emit `export CTX_DIR=...` to bind context for the shell | +| [`ctx deactivate`](init-status.md#ctx-deactivate) | Emit `unset CTX_DIR` to clear the binding | +| [`ctx status`](init-status.md#ctx-status) | Show context summary (files, tokens, drift) | +| [`ctx guide`](guide.md#ctx-guide) | Quick-reference cheat sheet | +| [`ctx why`](why.md#ctx-why) | Read the philosophy behind `ctx` | ## Context | Command | Description | |-----------------------------------------------|----------------------------------------------------------| | [`ctx add`](context.md#ctx-add) | Add a task, decision, learning, or convention | -| [`ctx load`](init-status.md#ctx-load) | Output assembled context in read order | -| [`ctx agent`](init-status.md#ctx-agent) | Print token-budgeted context packet for AI consumption | -| [`ctx skill`](skill.md#ctx-skill) | Manage reusable instruction bundles | +| [`ctx load`](context.md#ctx-load) | Output assembled context in read order | +| [`ctx agent`](context.md#ctx-agent) | Print token-budgeted context packet for AI consumption | | [`ctx sync`](context.md#ctx-sync) | Reconcile context with codebase state | | [`ctx drift`](context.md#ctx-drift) | Detect stale paths, secrets, missing files | | [`ctx compact`](context.md#ctx-compact) | Archive completed tasks, clean up files | | [`ctx fmt`](context.md#ctx-fmt) | Format context files to 80-char line width | - -## Artifacts - -| Command | Description | -|-----------------------------------------------|----------------------------------------------------------| | [`ctx decision`](context.md#ctx-decision) | Manage `DECISIONS.md` (reindex) | | [`ctx learning`](context.md#ctx-learning) | Manage `LEARNINGS.md` (reindex) | | [`ctx task`](context.md#ctx-task) | Task completion, archival, and snapshots | | [`ctx reindex`](context.md#ctx-reindex) | Regenerate indices for `DECISIONS.md` and `LEARNINGS.md` | +| [`ctx permission`](context.md#ctx-permission) | Permission snapshots (golden image) | +| [`ctx change`](change.md#ctx-change) | Show what changed since last session | +| [`ctx memory`](memory.md#ctx-memory) | Bridge Claude Code auto memory into `.context/` | +| [`ctx watch`](watch.md#ctx-watch) | Auto-apply context updates from AI output | ## Sessions | Command | Description | |-----------------------------------------------|----------------------------------------------------------| | [`ctx journal`](journal.md#ctx-journal) | Browse, import, enrich, and lock session history | -| [`ctx memory`](memory.md#ctx-memory) | Bridge Claude Code auto memory into `.context/` | -| [`ctx remind`](remind.md#ctx-remind) | Session-scoped reminders that surface at session start | | [`ctx pad`](pad.md#ctx-pad) | Encrypted scratchpad for sensitive one-liners | +| [`ctx remind`](remind.md#ctx-remind) | Session-scoped reminders that surface at session start | +| [`ctx hook pause`](pause.md) | Pause context hooks for the current session | +| [`ctx hook resume`](resume.md) | Resume paused context hooks | ## Integrations @@ -84,22 +102,22 @@ Commands that work before initialization: `ctx init`, `ctx setup`, | [`ctx setup`](setup.md#ctx-setup) | Generate AI tool integration configs | | [`ctx steering`](steering.md#ctx-steering) | Manage steering files (behavioral rules for AI tools) | | [`ctx trigger`](trigger.md#ctx-trigger) | Manage lifecycle triggers (scripts for automation) | -| [`ctx serve`](serve.md#ctx-serve) | Serve a static site locally via zensical | -| [`ctx hub`](hub.md#ctx-hub) | Operate a ctx Hub server or cluster | -| [`ctx connection`](connection.md#ctx-connection) | Connect to a ctx Hub | -| [`ctx mcp`](mcp.md#ctx-mcp) | MCP server for AI tool integration (stdin/stdout) | -| [`ctx watch`](watch.md#ctx-watch) | Auto-apply context updates from AI output | +| [`ctx skill`](skill.md#ctx-skill) | Manage reusable instruction bundles | +| [`ctx mcp`](mcp.md#ctx-mcp) | MCP server for AI tool integration (stdin/stdout) | +| [`ctx hook notify`](notify.md) | Webhook notifications (setup, test, send) | | [`ctx loop`](loop.md#ctx-loop) | Generate autonomous loop script | +| [`ctx connection`](connection.md#ctx-connection) | Client-side commands for connecting to a `ctx` Hub | +| [`ctx hub`](hub.md#ctx-hub) | Operate a `ctx` Hub server or cluster | +| [`ctx serve`](serve.md#ctx-serve) | Serve a static site locally via zensical | +| [`ctx site`](site.md#ctx-site) | Site management (feed generation) | ## Diagnostics | Command | Description | |-----------------------------------------------|----------------------------------------------------------| | [`ctx doctor`](doctor.md#ctx-doctor) | Structural health check (hooks, drift, config) | -| [`ctx change`](change.md#ctx-change) | Show what changed since last session | -| [`ctx why`](why.md#ctx-why) | Read the philosophy behind `ctx` | | [`ctx trace`](trace.md#ctx-trace) | Show context behind git commits | -| [`ctx sysinfo`](sysinfo.md#ctx-sysinfo) | Show system resource usage (memory, swap, disk, load) | +| [`ctx sysinfo`](sysinfo.md#ctx-sysinfo) | Show system resource usage (memory, swap, disk, load) | | [`ctx usage`](usage.md#ctx-usage) | Show session token usage stats | ## Runtime @@ -107,10 +125,9 @@ Commands that work before initialization: `ctx init`, `ctx setup`, | Command | Description | |-----------------------------------------------|----------------------------------------------------------| | [`ctx config`](config.md#ctx-config) | Manage runtime configuration profiles | -| [`ctx permission`](context.md#ctx-permission) | Permission snapshots (golden image) | -| [`ctx hook`](hook.md#ctx-hook) | Hook message, notification, and lifecycle controls | -| [`ctx backup`](backup.md#ctx-backup) | Back up context and Claude data to tar.gz / SMB | | [`ctx prune`](prune.md#ctx-prune) | Clean stale per-session state files | +| [`ctx hook`](hook.md#ctx-hook) | Hook message, notification, and lifecycle controls | +| [`ctx system`](system.md#ctx-system) | Hook plumbing and agent-only commands (not user-facing) | ## Shell @@ -118,13 +135,6 @@ Commands that work before initialization: `ctx init`, `ctx setup`, |-----------------------------------------------|----------------------------------------------------------| | [`ctx completion`](completion.md#ctx-completion) | Generate shell autocompletion scripts | -## Hidden - -| Command | Description | -|-----------------------------------------------|----------------------------------------------------------| -| [`ctx site`](site.md#ctx-site) | Site management (feed generation) | -| [`ctx system`](system.md#ctx-system) | Hook plumbing and agent-only commands (not user-facing) | - --- ## Exit Codes @@ -143,8 +153,6 @@ Commands that work before initialization: `ctx init`, `ctx setup`, |-------------------------|-----------------------------------------------------| | `CTX_DIR` | Override default context directory path | | `CTX_TOKEN_BUDGET` | Override default token budget | -| `CTX_BACKUP_SMB_URL` | SMB share URL for backups (e.g. `smb://host/share`) | -| `CTX_BACKUP_SMB_SUBDIR` | Subdirectory on SMB share (default: `ctx-sessions`) | | `CTX_SESSION_ID` | Active AI session ID (used by `ctx trace` for context linking) | @@ -154,7 +162,6 @@ Optional `.ctxrc` (*YAML format*) at project root: ```yaml # .ctxrc -context_dir: .context # Context directory name token_budget: 8000 # Default token budget priority_order: # File loading priority - TASKS.md @@ -163,7 +170,6 @@ priority_order: # File loading priority auto_archive: true # Auto-archive old items archive_after_days: 7 # Days before archiving tasks scratchpad_encrypt: true # Encrypt scratchpad (default: true) -allow_outside_cwd: false # Skip boundary check (default: false) event_log: false # Enable local hook event logging companion_check: true # Check companion tools at session start entry_count_learnings: 30 # Drift warning threshold (0 = disable) @@ -200,13 +206,11 @@ hooks: # Hook system configuration | Field | Type | Default | Description | |-------------------------|------------|----------------|----------------------------------------------------------------------------------------------------------------| -| `context_dir` | `string` | `.context` | Context directory name (relative to project root) | | `token_budget` | `int` | `8000` | Default token budget for `ctx agent` | | `priority_order` | `[]string` | *(all files)* | File loading priority for context packets | | `auto_archive` | `bool` | `true` | Auto-archive completed tasks | | `archive_after_days` | `int` | `7` | Days before completed tasks are archived | | `scratchpad_encrypt` | `bool` | `true` | Encrypt scratchpad with AES-256-GCM | -| `allow_outside_cwd` | `bool` | `false` | Skip boundary check for external context dirs | | `event_log` | `bool` | `false` | Enable local hook event logging to `.context/state/events.jsonl` | | `companion_check` | `bool` | `true` | Check companion tool availability (Gemini Search, GitNexus) during `/ctx-remember` | | `entry_count_learnings` | `int` | `30` | Drift warning when `LEARNINGS.md` exceeds this count | diff --git a/docs/cli/init-status.md b/docs/cli/init-status.md index 69ed52951..239903eb9 100644 --- a/docs/cli/init-status.md +++ b/docs/cli/init-status.md @@ -52,6 +52,77 @@ ctx init --force ctx init --merge ``` +After `ctx init` succeeds, the final output includes a hint showing +the exact `eval "$(ctx activate)"` line to bind the new directory +for your shell. Every other `ctx` command requires that binding +(or an equivalent direct `CTX_DIR=/abs/path/.context` export) before +it will run. + +--- + +### `ctx activate` + +Emit a shell-native `export CTX_DIR=...` line for the target +`.context/` directory. `ctx` does not walk the filesystem during +operating commands; every non-exempt command requires `CTX_DIR` +set before it will run. `activate` is the convenience that figures +out the path and lets you bind it with one line. + +```bash +# Walk up from CWD, emit if exactly one candidate visible. +eval "$(ctx activate)" +``` + +**Flags**: + +| Flag | Description | +|-----------|------------------------------------------------------------------------------------------| +| `--shell` | Shell dialect override. POSIX-family (`bash`, `zsh`, `sh`) all share one syntax today; the flag exists for future fish/nushell/powershell support. Auto-detected from `$SHELL`. | + +**Resolution**: + +| Candidate count from CWD | Behavior | +|--------------------------|--------------------------------------------------------------------------| +| Zero | Error. Use `ctx init` to create one, or `cd` closer to the project root. | +| One | Emit `export CTX_DIR=` for that candidate. | +| Two or more | Refuse. List every candidate. Re-run from a more specific cwd. | + +`activate` is args-free under the single-source-anchor model; the +explicit-path mode was removed because hub-client / hub-server +scenarios store at `~/.ctx/hub-data/` and never read `.context/`, +so they activate from the project root like everyone else. Direct +binding without a project-local scan is still available via +`export CTX_DIR=/abs/path/.context` or the inline form. + +If the parent shell already has `CTX_DIR` set to a different value, +the output gains a leading `# ctx: replacing stale CTX_DIR=...` +comment so the user sees the change in `eval` output before the +replacement takes effect. + +**See also**: [Activating a Context Directory](../recipes/activating-context.md) +for the full recipe including direnv setup and CI patterns. + +--- + +### `ctx deactivate` + +Emit a shell-native `unset CTX_DIR` line. Pairs with `activate`. + +```bash +eval "$(ctx deactivate)" +``` + +**Flags**: + +| Flag | Description | +|-----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `--shell` | Shell dialect override. POSIX-family (`bash`, `zsh`, `sh`) all share one `unset` syntax today; the flag exists for future fish/nushell/powershell support. Auto-detected from `$SHELL`. | + +`deactivate` does not touch the filesystem, doesn't require a +declared context directory, and never fails under normal operation; +unsetting an already-unset variable is a no-op across supported +shells. + --- ### `ctx status` diff --git a/docs/cli/journal.md b/docs/cli/journal.md index dc20b9c24..28f8b3f31 100644 --- a/docs/cli/journal.md +++ b/docs/cli/journal.md @@ -8,8 +8,8 @@ title: Journal icon: lucide/history --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ### `ctx journal` diff --git a/docs/cli/mcp.md b/docs/cli/mcp.md index b2976132c..bb4843d39 100644 --- a/docs/cli/mcp.md +++ b/docs/cli/mcp.md @@ -35,8 +35,9 @@ by MCP clients (Claude Desktop, Cursor, VS Code Copilot), **not run directly from a shell**. See [Configuration](#configuration) below for how each host launches it. -**Flags:** None. The server uses the configured context directory -(from `--context-dir`, `CTX_DIR`, `.ctxrc`, or the default `.context`). +**Flags:** None. The server uses the declared context directory +from `CTX_DIR`. As with every other ctx command, that variable +must be set: the server does not walk the filesystem. **Examples**: @@ -45,7 +46,7 @@ for how each host launches it. ctx mcp serve # Pin a context directory for a specific workspace -ctx --context-dir /path/to/project/.context mcp serve +CTX_DIR=/path/to/project/.context ctx mcp serve # Verify the binary starts without a client attached (Ctrl-C to exit) ctx mcp serve < /dev/null diff --git a/docs/cli/pause.md b/docs/cli/pause.md index 587f1e046..d9159a67f 100644 --- a/docs/cli/pause.md +++ b/docs/cli/pause.md @@ -39,5 +39,5 @@ ctx hook resume **See also**: -- [`ctx hook resume`](resume.md) — the matching resume command +- [`ctx hook resume`](resume.md): the matching resume command - [Pausing Context Hooks recipe](../recipes/session-pause.md) diff --git a/docs/cli/resume.md b/docs/cli/resume.md index ad7a1503d..bbf863946 100644 --- a/docs/cli/resume.md +++ b/docs/cli/resume.md @@ -33,5 +33,5 @@ ctx hook resume **See also**: -- [`ctx hook pause`](pause.md) — the matching pause command +- [`ctx hook pause`](pause.md): the matching pause command - [Pausing Context Hooks recipe](../recipes/session-pause.md) diff --git a/docs/cli/serve.md b/docs/cli/serve.md index 29760da36..21f63013b 100644 --- a/docs/cli/serve.md +++ b/docs/cli/serve.md @@ -25,7 +25,7 @@ ctx serve ./my-site # Serve a specific directory ctx serve ./docs # Serve any zensical site ``` -!!! info "This command does NOT start a hub" +!!! info "This Command Does NOT Start a Hub" `ctx serve` is purely for static-site serving. To run a `ctx` Hub for cross-project knowledge sharing, use [`ctx hub start`](hub.md). That command lives in its @@ -44,7 +44,7 @@ pipx install zensical |--------------|--------------------------------------------------| | `[directory]` | Directory containing a `zensical.toml` to serve | -When omitted, serves `.context/journal-site` by default — the +When omitted, serves `.context/journal-site` by default, the directory produced by `ctx journal site`. **Examples**: @@ -55,11 +55,11 @@ ctx serve ./my-site # Serve a specific directory ctx serve ./docs # Serve any zensical site ``` -### See also +### See Also -- [`ctx journal`](journal.md) — generate the journal site +- [`ctx journal`](journal.md): generate the journal site that `ctx serve` displays. -- [`ctx hub start`](hub.md) — for running a `ctx` Hub +- [`ctx hub start`](hub.md): for running a `ctx` Hub server, not a static site. -- [Browsing and enriching past sessions](../recipes/session-archaeology.md) - — the recipe that combines `ctx journal` and `ctx serve`. +- [Browsing and enriching past sessions](../recipes/session-archaeology.md): + the recipe that combines `ctx journal` and `ctx serve`. diff --git a/docs/cli/setup.md b/docs/cli/setup.md index daaedfac2..6a7c291f4 100644 --- a/docs/cli/setup.md +++ b/docs/cli/setup.md @@ -37,7 +37,7 @@ ctx setup [flags] | `copilot` | GitHub Copilot | | `windsurf` | Windsurf IDE | -!!! note "Claude Code uses the plugin system" +!!! note "Claude Code Uses the Plugin System" Claude Code integration is now provided via the `ctx` plugin. Running `ctx setup claude-code` prints plugin install instructions. diff --git a/docs/cli/steering.md b/docs/cli/steering.md index 6f4734592..b41b8bfa5 100644 --- a/docs/cli/steering.md +++ b/docs/cli/steering.md @@ -27,7 +27,7 @@ prompt, and syncs them out to each AI tool's native format ctx steering ``` -!!! tip "Steering vs decisions vs conventions" +!!! tip "Steering vs Decisions vs Conventions" The three look similar on disk but serve different purposes: - **Decisions** record *what* was chosen and *why*. @@ -39,10 +39,10 @@ ctx steering about X*. Consumed by the AI tool's prompt injection layer, conditionally on prompt match. - If you find yourself writing "the AI should always do X" - — that belongs in steering, not decisions. + If you find yourself writing "the AI should always do X", + that belongs in steering, not decisions. -### Anatomy of a steering file +### Anatomy of a Steering File ```yaml --- @@ -141,7 +141,7 @@ ctx steering preview "create a REST API endpoint" Sync steering files to tool-native formats for tools that have a **built-in rules primitive**. Not every tool needs -this — Claude Code and Codex use a different delivery +this; Claude Code and Codex use a different delivery mechanism (see below). **Examples**: @@ -157,7 +157,7 @@ ctx steering sync | Cursor | `.cursor/rules/` | Cursor reads the directory natively | | Cline | `.clinerules/` | Cline reads the directory natively | | Kiro | `.kiro/steering/` | Kiro reads the directory natively | -| Claude Code | *(no-op)* | **Delivered via hook + MCP** — see next section | +| Claude Code | *(no-op)* | **Delivered via hook + MCP** (see next section) | | Codex | *(no-op)* | Same as Claude Code | For the three native-rules tools, `ctx steering sync` writes @@ -165,7 +165,7 @@ each matching steering file to the appropriate directory with tool-specific frontmatter transforms. Unchanged files are skipped (idempotent). -### How Claude Code and Codex consume steering +### How Claude Code and Codex Consume Steering Claude Code has no native "steering files" primitive, so `ctx steering sync` skips it entirely. Instead, steering @@ -191,7 +191,7 @@ Claude Code on plugin install. Once registered, Claude can invoke the `ctx_steering_get` tool mid-task to fetch matching steering files for a specific prompt. This is the **only** path that resolves `inclusion: auto` and -`inclusion: manual` matches for Claude Code — Claude +`inclusion: manual` matches for Claude Code; Claude passes the prompt to the MCP tool, which runs the keyword match against each file's description. @@ -203,7 +203,7 @@ claude mcp list Expected line: `ctx: ctx mcp serve - ✓ Connected`. If it's missing, reinstall the plugin from Claude Code -(`/plugin` → find `ctx` → uninstall → install again) — +(`/plugin` → find `ctx` → uninstall → install again); older plugin versions shipped without the `.mcp.json` file. @@ -227,17 +227,17 @@ file. - Running `ctx steering sync` before starting a Claude session does **nothing** for Claude's benefit. Skip it. - `ctx steering preview` still works for validating your - descriptions — it doesn't depend on sync. + descriptions; it doesn't depend on sync. - If Claude Code is your only tool, the `ctx steering` commands you care about are `add`, `list`, `preview`, - `init` — never `sync`. + `init` (never `sync`). - If you use both Claude Code **and** (say) Cursor, `ctx steering sync` covers Cursor (where `auto` and `manual` work natively) while the hook+MCP pipeline covers Claude Code. For rules you need to fire automatically on both, use `inclusion: always`. -### `ctx agent` integration +### `ctx agent` Integration When `ctx agent` builds a context packet, steering files are loaded as Tier 6 of the budget-aware assembly (see @@ -246,11 +246,11 @@ loaded as Tier 6 of the budget-aware assembly (see scored against the current prompt and included in priority order until the tier budget is exhausted. -### See also +### See Also -- [`ctx setup`](setup.md) — configure which tools receive +- [`ctx setup`](setup.md): configure which tools receive steering syncs -- [`ctx trigger`](trigger.md) — lifecycle scripts (a different +- [`ctx trigger`](trigger.md): lifecycle scripts (a different hooking concept, see below) -- [Building steering files recipe](../recipes/steering.md) - — walkthrough from first file to synced output +- [Building steering files recipe](../recipes/steering.md): + walkthrough from first file to synced output diff --git a/docs/cli/sysinfo.md b/docs/cli/sysinfo.md index 2ba4b7eda..333e41237 100644 --- a/docs/cli/sysinfo.md +++ b/docs/cli/sysinfo.md @@ -15,7 +15,7 @@ icon: lucide/cpu Display a snapshot of system resources (memory, swap, disk, load) with threshold-based alert severities. Mirrors what the -`check-resource` hook plumbing monitors in the background — but this +`check-resource` hook plumbing monitors in the background, but this command prints the full report at any severity level, not only at DANGER. diff --git a/docs/cli/system.md b/docs/cli/system.md index eb17b0292..419200603 100644 --- a/docs/cli/system.md +++ b/docs/cli/system.md @@ -11,7 +11,6 @@ icon: lucide/settings ![ctx](../images/ctx-banner.png) - ### `ctx system` Hidden parent command that hosts Claude Code hook plumbing and a small @@ -23,11 +22,10 @@ integrations. The parent is registered without a visible group in ctx system ``` -!!! note "Commands previously under `ctx system`" +!!! note "Commands Previously under `ctx system`" Several user-facing maintenance commands used to live under `ctx system` and were promoted to top-level: - - `ctx system backup` → **`ctx backup`** - `ctx system events` → **`ctx hook event`** - `ctx system message` → **`ctx hook message`** - `ctx system prune` → **`ctx prune`** @@ -38,9 +36,9 @@ ctx system agent-only command. Update any scripts or personal docs that reference the old paths. -## Plumbing subcommands +## Plumbing Subcommands -These are not hook handlers — they're called by skills and editor +These are not hook handlers; they're called by skills and editor integrations during the session lifecycle. Safe to run manually. #### `ctx system mark-journal` @@ -104,7 +102,7 @@ ctx system session-event --type start --caller vscode ctx system session-event --type end --caller vscode ``` -## Hook subcommands +## Hook Subcommands Hidden Claude Code hook handlers implementing the hook contract: read JSON from stdin, perform logic, emit output on stdout, exit 0. Block @@ -114,7 +112,7 @@ UserPromptSubmit hooks: `context-load-gate`, `check-context-size`, `check-persistence`, `check-ceremony`, `check-journal`, `check-version`, `check-resource`, `check-knowledge`, `check-map-staleness`, `check-memory-drift`, `check-reminder`, `check-freshness`, -`check-hub-sync`, `check-backup-age`, `check-skill-discovery`, +`check-hub-sync`, `check-skill-discovery`, `heartbeat`. PreToolUse hooks: `block-non-path-ctx`, `block-dangerous-command`, diff --git a/docs/cli/trace.md b/docs/cli/trace.md index 4bfe93005..7a3b0e209 100644 --- a/docs/cli/trace.md +++ b/docs/cli/trace.md @@ -8,15 +8,15 @@ title: Commit Context Tracing icon: lucide/git-commit-horizontal --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ### `ctx trace` Show the context behind git commits. Links commits back to the decisions, tasks, learnings, and sessions that motivated them. -`git log` shows *what* changed, `git blame` shows *who* — +`git log` shows *what* changed, `git blame` shows *who*, and `ctx trace` shows *why*. ```bash diff --git a/docs/cli/trigger.md b/docs/cli/trigger.md index 024a13fcd..44c0383dd 100644 --- a/docs/cli/trigger.md +++ b/docs/cli/trigger.md @@ -15,7 +15,7 @@ icon: lucide/zap Manage **lifecycle triggers**: executable scripts that fire at specific events during an AI session. Triggers can block tool -calls, inject context, automate reactions — any side effect +calls, inject context, and automate reactions: any side effect you want at session boundaries, tool boundaries, or file-save events. @@ -23,7 +23,7 @@ events. ctx trigger ``` -!!! warning "Triggers execute arbitrary scripts" +!!! warning "Triggers Execute Arbitrary Scripts" A trigger is a shell script with the executable bit set. It runs with the same privileges as your AI tool and receives JSON input on stdin. Treat triggers like @@ -31,7 +31,7 @@ ctx trigger understand. A malicious or buggy trigger can block tool calls, corrupt context files, or exfiltrate data. -### Where triggers live +### Where Triggers Live Triggers live in `.context/hooks//` as executable scripts. The on-disk directory name is still `hooks/` for @@ -53,7 +53,7 @@ Each script: └── record-edit.sh ``` -### Trigger types +### Trigger Types | Type | Fires when | |-----------------|--------------------------------------| @@ -64,7 +64,7 @@ Each script: | `file-save` | When a file is saved | | `context-add` | When a context entry is added | -### Input and output contract +### Input and Output Contract Each trigger receives a JSON object on stdin with the event details. Minimal contract (fields vary by trigger type): @@ -189,7 +189,7 @@ ctx trigger disable inject-context # Disabled .context/hooks/session-start/inject-context.sh ``` -### Three hooking concepts in ctx — don't confuse them +### Three Hooking Concepts in ctx (Don't Confuse Them) This is a common source of confusion. `ctx` has three distinct hook-like layers, and they serve different purposes: @@ -204,12 +204,12 @@ Use `ctx trigger` when you want project-specific automation that your AI tool will run at lifecycle events. Use Claude Code hooks for tool-specific integrations that don't need to be portable across tools. `ctx system` hooks are not something -you author — they're the internal nudge machinery that ships +you author; they're the internal nudge machinery that ships with ctx. -### See also +### See Also -- [`ctx steering`](steering.md) — persistent AI behavioral +- [`ctx steering`](steering.md): persistent AI behavioral rules (a different concept; rules vs scripts) -- [Authoring triggers recipe](../recipes/triggers.md) — a +- [Authoring triggers recipe](../recipes/triggers.md): a full walkthrough with security guidance diff --git a/docs/home/common-workflows.md b/docs/home/common-workflows.md index a09ac82bd..ed0c46bfb 100644 --- a/docs/home/common-workflows.md +++ b/docs/home/common-workflows.md @@ -24,11 +24,11 @@ For deeper, step-by-step guides, see [Recipes](../recipes/index.md). ## Track Context -!!! tip "Prefer skills over raw commands" +!!! tip "Prefer Skills over Raw Commands" When working with an AI agent, use `/ctx-task-add`, `/ctx-decision-add`, or `/ctx-learning-add` instead of raw `ctx add` commands. The agent automatically picks up session ID, - branch, and commit hash from its context — no manual flags needed. + branch, and commit hash from its context, so no manual flags are needed. ```bash # Add a task @@ -141,7 +141,7 @@ Open [http://localhost:8000](http://localhost:8000) to browse. To update after new sessions, run the same two commands again. -### Safe By Default +### Safe by Default `ctx journal import --all` is **safe by default**: @@ -225,7 +225,7 @@ ctx trace hook enable ``` From now on, every `git commit` automatically gets a `ctx-context` -trailer linking it to relevant context. No extra steps needed — +trailer linking it to relevant context. No extra steps needed; just use `ctx add`, `ctx task complete`, and commit as usual. ```bash @@ -353,6 +353,7 @@ These have no CLI equivalent. They require the agent's reasoning. | `/ctx-reflect` | Pause and assess session progress | | `/ctx-consolidate` | Merge overlapping learnings or decisions | | `/ctx-prompt-audit` | Analyze prompting patterns for improvement | +| `/ctx-plan` | Stress-test an existing plan through adversarial interview | | `/ctx-plan-import` | Import Claude Code plan files into project specs | | `/ctx-implement` | Execute a plan step-by-step with verification | | `/ctx-worktree` | Manage parallel agent worktrees | @@ -390,7 +391,6 @@ These are infrastructure: used in scripts, CI, or one-time setup. | `ctx site` | Site management commands | | `ctx config` | Manage runtime configuration profiles | | `ctx system` | System diagnostics and hook commands | -| `ctx backup` | Back up context and Claude data to tar.gz / SMB | | `ctx completion` | Generate shell autocompletion scripts | !!! tip "Rule of Thumb" diff --git a/docs/home/configuration.md b/docs/home/configuration.md index 528951b68..d11b49d87 100644 --- a/docs/home/configuration.md +++ b/docs/home/configuration.md @@ -43,9 +43,12 @@ my-project/ └── src/ ``` -`ctx` looks for `.ctxrc` in the current working directory when any command runs. -There is no global or user-level config file: Configuration is always -per-project. +`ctx` reads `.ctxrc` from the **project root** (*i.e. the parent of +`CTX_DIR`, or `dirname(CTX_DIR)/.ctxrc`*). It does not walk up from CWD. +That means whichever project you've activated via `eval "$(ctx activate)"` +(or by exporting `CTX_DIR` directly), its paired `.ctxrc` is what governs the +invocation. There is no global or user-level config file: configuration is +always per-project. !!! note "Contributors: Dev Configuration Profile" The ctx repo ships two `.ctxrc` source profiles (`.ctxrc.base` and @@ -53,13 +56,14 @@ per-project. via `ctx config switch dev` / `ctx config switch base`. See [Contributing: Configuration Profiles](contributing.md#configuration-profiles). -!!! tip "Using a Different .context Directory" - The default `.context/` directory can be changed per-project via the - `context_dir` key in `.ctxrc`, the `CTX_DIR` environment variable, or the - `--context-dir` CLI flag. +!!! tip "Using a Different `.context` Directory" + The context directory is declared via the `CTX_DIR` environment variable; + not via `.ctxrc`. `ctx` does not walk the filesystem; every non-exempt + command requires `CTX_DIR` to be set. Use `eval "$(ctx activate)"` to + bind it for your shell. `CTX_DIR` must be an absolute path with + `.context` as its basename. - See [Environment Variables](#environment-variables) - and [CLI Global Flags](#cli-global-flags) below for details. + See [Environment Variables](#environment-variables) below for details. ### Full Reference @@ -73,12 +77,10 @@ A commented `.ctxrc` showing all options and their defaults: # All settings are optional. Missing values use defaults. # Priority: CLI flags > environment variables > .ctxrc > defaults # -# context_dir: .context # token_budget: 8000 # auto_archive: true # archive_after_days: 7 # scratchpad_encrypt: true -# allow_outside_cwd: false # event_log: false # entry_count_learnings: 30 # entry_count_decisions: 20 @@ -130,12 +132,10 @@ A commented `.ctxrc` showing all options and their defaults: | Option | Type | Default | Description | |-------------------------|------------|---------------|-------------------------------------------------------------------------------------------------------------------------------------------| -| `context_dir` | `string` | `.context` | Context directory name (relative to project root) | | `token_budget` | `int` | `8000` | Default token budget for `ctx agent` and `ctx load` | | `auto_archive` | `bool` | `true` | Auto-archive completed tasks during `ctx compact` | | `archive_after_days` | `int` | `7` | Days before completed tasks are archived | | `scratchpad_encrypt` | `bool` | `true` | Encrypt scratchpad with AES-256-GCM | -| `allow_outside_cwd` | `bool` | `false` | Allow context directory outside the current working directory | | `event_log` | `bool` | `false` | Enable local hook event logging to `.context/state/events.jsonl` | | `entry_count_learnings` | `int` | `30` | Drift warning when `LEARNINGS.md` exceeds this entry count (0 = disable) | | `entry_count_decisions` | `int` | `20` | Drift warning when `DECISIONS.md` exceeds this entry count (0 = disable) | @@ -180,10 +180,10 @@ behind this ordering. Environment variables override `.ctxrc` values but are overridden by CLI flags. -| Variable | Description | Equivalent `.ctxrc` key | -|--------------------|---------------------------------------------|-------------------------| -| `CTX_DIR` | Override the context directory path | `context_dir` | -| `CTX_TOKEN_BUDGET` | Override the default token budget | `token_budget` | +| Variable | Description | Equivalent `.ctxrc` key | +|--------------------|-------------------------------------------------------------|-------------------------| +| `CTX_DIR` | Declare the context directory path (required, no fallback) | *(none)* | +| `CTX_TOKEN_BUDGET` | Override the default token budget | `token_budget` | ### Examples @@ -203,22 +203,17 @@ CTX_TOKEN_BUDGET=16000 ctx agent CLI flags have the highest priority and override both environment variables and `.ctxrc` settings. These flags are available on every `ctx` command. -| Flag | Description | -|------------------------|-----------------------------------------------------------| -| `--context-dir ` | Override context directory (default: `.context/`) | -| `--allow-outside-cwd` | Allow context directory outside current working directory | -| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | -| `--version` | Show version and exit | -| `--help` | Show command help and exit | +| Flag | Description | +|-----------------|------------------------------------------------------------| +| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | +| `--version` | Show version and exit | +| `--help` | Show command help and exit | ### Examples ```bash -# Point to a different context directory: -ctx status --context-dir /path/to/shared/.context - -# Allow external context directory (skips boundary check): -ctx status --context-dir /mnt/nas/project-context --allow-outside-cwd +# Point to a different context directory inline: +CTX_DIR=/path/to/project/.context ctx status ``` --- @@ -233,20 +228,18 @@ CLI flags > Environment variables > .ctxrc > Built-in defaults (highest) (lowest) ``` -**Example resolution for `context_dir`:** - -| Layer | Value | Wins? | -|--------------------|--------------------|-------| -| `--context-dir` | `/tmp/ctx` | Yes | -| `CTX_DIR` | `/shared/context` | No | -| `.ctxrc` | `.my-context` | No | -| Default | `.context` | No | +The context directory itself is resolved differently: it lives *outside* +this priority chain. `CTX_DIR` (env) must be declared; `.ctxrc` does not +carry a fallback for it, and there is no built-in default. See +[Activating a Context Directory](../recipes/activating-context.md). -The CLI flag `/tmp/ctx` is used because it has the highest priority. +**Example resolution for `token_budget`:** -If the CLI flag were absent, `CTX_DIR=/shared/context` would win. If neither -the flag nor the env var were set, the `.ctxrc` value `.my-context` would be -used. With nothing configured, the default `.context` applies. +| Layer | Value | Wins? | +|--------------------|--------|-------| +| `CTX_TOKEN_BUDGET` | `4000` | Yes | +| `.ctxrc` | `8000` | No | +| Default | `8000` | No | --- @@ -254,14 +247,24 @@ used. With nothing configured, the default `.context` applies. ### External `.context` Directory -Store context outside the project tree (*useful for monorepos or shared context*): +Store a project's context outside the project tree (*useful when a +repo is read-only, or when you want to keep notes adjacent rather +than checked in*). Declare the path via `CTX_DIR`: -```yaml -# .ctxrc -context_dir: /home/team/shared-context -allow_outside_cwd: true +```bash +export CTX_DIR=/home/you/ctx-stores/my-project/.context ``` +!!! warning "One `.context/` per project" + The parent of the context directory is the project root by + contract: `ctx sync`, `ctx drift`, and the memory-drift hook + all read the codebase from `filepath.Dir(ContextDir())`. + Pointing two projects at the same `.context/` directory will + collide their journals, state, and secrets. To share knowledge + (CONSTITUTION / CONVENTIONS / ARCHITECTURE) across projects, + use [`ctx hub`](../recipes/hub-overview.md), not a shared + `.context/`. + ### Custom Token Budget Increase the token budget for projects with large context: diff --git a/docs/home/context-files.md b/docs/home/context-files.md index 57af67496..d9fc14609 100644 --- a/docs/home/context-files.md +++ b/docs/home/context-files.md @@ -37,9 +37,9 @@ context packet: Two subdirectories under `.context/` are **implementation details** that are user-editable but not part of the priority read order: -- **`.context/templates/`** — format templates for `ctx add decision` +- **`.context/templates/`**: format templates for `ctx add decision` and `ctx add learning`. See [templates](#templates) below. -- **`.context/steering/`** — behavioral rules with YAML frontmatter +- **`.context/steering/`**: behavioral rules with YAML frontmatter that get synced into each AI tool's native config. See [steering](#steering) below, and the full [Steering files](steering.md) page for the design and workflow. @@ -51,10 +51,10 @@ Two other moving parts are often confused with context files but are - **Skills** live in `.claude/skills/` (project-local) or are provided by the installed `ctx` plugin. A typical project doesn't see the - plugin's skills at all — they ride with the plugin and are owned by + plugin's skills at all; they ride with the plugin and are owned by its update cycle. See [`ctx skill`](../cli/skill.md) and [Skills reference](../reference/skills.md). -- **Hooks** are Claude Code `PreToolUse`/`PostToolUse`/ +- **Hooks**: Claude Code `PreToolUse`/`PostToolUse`/ `UserPromptSubmit` entries configured in `.claude/settings.json` or shipped by a plugin. The `ctx` plugin registers its own hooks automatically; **a typical project does not author hooks by hand**, @@ -487,8 +487,8 @@ LEARNINGS.md. `ctx init` deploys two starter templates: -- `decision.md` — sections: Context, Rationale, Consequence -- `learning.md` — sections: Context, Lesson, Application +- `decision.md`: sections Context, Rationale, Consequence +- `learning.md`: sections Context, Lesson, Application ### Customizing @@ -516,10 +516,10 @@ each tool's config. `ctx init` scaffolds four foundation files: -- `product.md` — who this project serves and why -- `tech.md` — the technology stack and its constraints -- `structure.md` — how the code is organized -- `workflow.md` — how work moves through the system +- `product.md`: who this project serves and why +- `tech.md`: the technology stack and its constraints +- `structure.md`: how the code is organized +- `workflow.md`: how work moves through the system Each file carries YAML frontmatter describing **when** it applies (always, matching prompts, or manually referenced) and **what** tool diff --git a/docs/home/contributing.md b/docs/home/contributing.md index 33e7d44a7..aa7609a70 100644 --- a/docs/home/contributing.md +++ b/docs/home/contributing.md @@ -109,7 +109,7 @@ ctx/ ├── docs/ # Documentation site source ├── editors/ # Editor extensions (VS Code) ├── examples/ # Example configurations -├── hack/ # Build scripts and [runbooks](../operations/index.md#runbooks) +├── hack/ # Build scripts ├── specs/ # Feature specifications └── .context/ # ctx's own context (dogfooding) ``` @@ -139,7 +139,6 @@ never distributed to users. |------------------------------|---------------------------------------------------------------| | `/_ctx-absorb` | Merge deltas from a parallel worktree or separate checkout | | `/_ctx-audit` | Detect code-level drift after YOLO sprints or before releases | -| `/_ctx-backup` | Backup context and Claude data to SMB share | | `/_ctx-qa` | Run QA checks before committing | | `/_ctx-release` | Run the full release process | | `/_ctx-release-notes` | Generate release notes for `dist/RELEASE_NOTES.md` | @@ -153,7 +152,7 @@ and are now available to all ctx users: `/ctx-brainstorm`, `/ctx-link-check`, ---- -## How To Add Things +## How to Add Things ### Adding a New CLI Command @@ -188,7 +187,7 @@ happy. Every CLI command's user-facing output lives in its own sub-package under `internal/write//`. Output functions accept -`*cobra.Command` and call `cmd.Println(...)` — never `fmt.Print*` +`*cobra.Command` and call `cmd.Println(...)`, never `fmt.Print*` directly. All text strings are loaded from YAML via `desc.Text(text.DescKey*)`, never inline. @@ -214,7 +213,7 @@ internal/err/config/config.go # errors for configuration internal/err/cli/cli.go # errors for CLI argument validation ``` -#### Config constants: `internal/config/` +#### Config Constants: `internal/config/` Pure-constant leaf packages with zero internal dependencies (stdlib only). Over 60 sub-packages, organized by domain. See @@ -230,7 +229,7 @@ only). Over 60 sub-packages, organized by domain. See | User-facing text YAML keys | `config/embed/text/.go` | | Time durations, thresholds | `config//` | -#### The assets pipeline +#### The Assets Pipeline User-facing text flows through a three-level chain: @@ -258,7 +257,7 @@ new AI tool (e.g. Aider, Cursor): Pattern to follow: the Claude Code JSONL parser in `internal/journal/parser/`. -!!! note "Multilingual session headers" +!!! note "Multilingual Session Headers" The Markdown parser recognizes session header prefixes configured via `session_prefixes` in `.ctxrc` (default: `Session:`). To support a new language, users add a prefix to their `.ctxrc` - no code change needed. @@ -319,7 +318,7 @@ make plugin-reload # nukes ~/.claude/plugins/cache/activememory-ctx/ The plugin will be re-installed from your local marketplace on startup. No version bump is needed during development. -!!! tip "Version bumps are for releases, not iteration" +!!! tip "Version Bumps Are for Releases, Not Iteration" Only bump `VERSION`, `plugin.json`, and `marketplace.json` when cutting a release. During development, `make plugin-reload` is all you need. @@ -349,17 +348,12 @@ See [Configuration](configuration.md) for the full `.ctxrc` option reference. ### Backups -Back up project context and global Claude Code data with: - -```bash -ctx backup # both project + global (default) -ctx backup --scope project # .context/, .claude/, ideas/ only -ctx backup --scope global # ~/.claude/ only -``` - -Archives are saved to `/tmp/`. When `CTX_BACKUP_SMB_URL` is configured, -they are also copied to an SMB share. See -[CLI Reference: backup](../cli/backup.md) for details. +`ctx` does not ship a backup command. File-level backup is an OS / +infrastructure concern; `ctx hub` handles the cross-machine +knowledge persistence that matters most. For everything else, see +[Backup Strategy](../operations/runbooks/backup-strategy.md): +rsync, Time Machine, Borg, or whichever tool already handles the +rest of your files. ### Running Tests @@ -423,9 +417,9 @@ Examples: * Follow Go conventions (`gofmt`, `go vet`); * Keep functions **focused** and **small**; * Add tests for new functionality; -* Handle errors explicitly — use descriptive names (`readErr`, +* Handle errors explicitly; use descriptive names (`readErr`, `writeErr`) not repeated `err`; -* No magic strings — all repeated literals go in `internal/config/`; +* No magic strings: all repeated literals go in `internal/config/`; * Output goes through `internal/write/` packages, not `fmt.Print*`; * Errors go through `internal/err/` constructors, not inline `fmt.Errorf`; diff --git a/docs/home/faq.md b/docs/home/faq.md index aa472d143..e0c98e7a6 100644 --- a/docs/home/faq.md +++ b/docs/home/faq.md @@ -19,7 +19,7 @@ terminal, a browser, or a code review. There's no schema to learn, no binary format to decode, no vendor lock-in. You can inspect your context with `cat`, diff it with `git diff`, and review it in a PR. -## Does ctx work offline? +## Does `ctx` Work Offline? Yes. ctx is completely local. It reads and writes files on disk, generates context packets from local state, and requires no network @@ -27,7 +27,7 @@ access. The only feature that touches the network is the optional [webhook notifications](../recipes/webhook-notifications.md) hook, which you have to explicitly configure. -## What gets committed to git? +## What Gets Committed to Git? The `.context/` directory: yes, commit it. That's the whole point. Team members and AI agents read the same context files. @@ -42,7 +42,7 @@ What **not** to commit: commit if you want shared scratchpad state. See [Scratchpad](../reference/scratchpad.md) for details. -## How big should my token budget be? +## How Big Should My Token Budget Be? The default is 8000 tokens, which works well for most projects. Configure it via `.ctxrc` or the `CTX_TOKEN_BUDGET` environment @@ -65,7 +65,7 @@ content first, so CONSTITUTION and TASKS always make the cut. See [Configuration](configuration.md) for all available settings. -## Why not a database? +## Why Not a Database? Files are inspectable, diffable, and reviewable in pull requests. You can `grep` them, `cat` them, pipe them through `jq` or `awk`. @@ -75,7 +75,7 @@ A database would add a dependency, require migrations, and make context opaque. The design bet is that context should be as visible and portable as the code it describes. -## Does it work with tools other than Claude Code? +## Does It Work with Tools Other than Claude Code? Yes. `ctx agent` outputs a context packet that any AI tool can consume: paste it into ChatGPT, Cursor, Copilot, Aider, or anything @@ -89,7 +89,7 @@ instruction files or manual pasting. See [Integrations](../operations/integrations.md) for tool-specific setup, including the [multi-tool recipe](../recipes/multi-tool-setup.md). -## Can I use ctx on an existing project? +## Can I Use `ctx` on an Existing Project? Yes. Run `ctx init` in any repo and it creates `.context/` with template files. Start recording decisions, tasks, and conventions as @@ -100,7 +100,7 @@ See [Getting Started](getting-started.md) for the full setup flow, or [Joining a ctx Project](joining-a-project.md) if someone else already initialized it. -## What happens when context files get too big? +## What Happens When Context Files Get Too Big? Token budgeting handles this automatically. `ctx agent` prioritizes content by file priority (CONSTITUTION first, GLOSSARY last) and @@ -113,7 +113,7 @@ old entries, keeping active context lean. You can also run The goal is to keep context files focused on **current** state. Historical entries belong in git history or the archive. -## Is .context/ meant to be shared? +## Is `.context/` Meant to Be Shared? Yes. Commit it to your repo. Every team member and every AI agent reads the same files. That's the mechanism for shared memory: diff --git a/docs/home/first-session.md b/docs/home/first-session.md index e7c0472d7..ebbd9ac30 100644 --- a/docs/home/first-session.md +++ b/docs/home/first-session.md @@ -125,12 +125,12 @@ This loads your context and presents a structured confirm the agent knows what is going on. Context also loads automatically via hooks, but the explicit ceremony gives you a **readback** to verify. -!!! tip "Steering files fire automatically" +!!! tip "Steering Files Fire Automatically" If you edited the four foundation files scaffolded by `ctx init` (`.context/steering/product.md`, `tech.md`, `structure.md`, `workflow.md`), their `inclusion: always` rules are prepended to **every** tool call via - the plugin's `PreToolUse` hook — no `/ctx-remember` + the plugin's `PreToolUse` hook, with no `/ctx-remember` needed, no MCP call. Edit a file, save, and the next tool call in Claude Code picks it up. See [Steering files](steering.md) for details on the @@ -262,7 +262,7 @@ You should **`.gitignore`** the generated and sensitive paths: .claude/settings.local.json ``` -!!! tip "`ctx init` Patches Your .gitignore for You" +!!! tip "`ctx init` Patches Your .Gitignore for You" `ctx init` automatically adds these entries to your `.gitignore`. Review the additions with `cat .gitignore` after init. diff --git a/docs/home/getting-started.md b/docs/home/getting-started.md index 954527cd6..e377bcd59 100644 --- a/docs/home/getting-started.md +++ b/docs/home/getting-started.md @@ -202,7 +202,7 @@ For Claude Code, install the [ctx plugin](../operations/integrations.md#claude-c for automatic hooks and skills. **`ctx init` also scaffolds four *foundation steering files*** in -`.context/steering/` — these are behavioral-rule templates that +`.context/steering/`; these are behavioral-rule templates that tell your AI how to act on your project: | File | What it captures | @@ -215,12 +215,12 @@ tell your AI how to act on your project: Each file starts with a self-documenting HTML comment explaining the three inclusion modes (`always` / `auto` / `manual`), priority, and tool scoping. The defaults are set -to `inclusion: always` and `priority: 10` — they fire on +to `inclusion: always` and `priority: 10`, so they fire on every AI tool call until you edit them. **You should open each of these files and replace the placeholder content with your project's actual rules.** -Running `ctx init` again won't clobber your edits — existing +Running `ctx init` again won't clobber your edits; existing files are left alone. To opt out entirely, use `ctx init --no-steering-init`. @@ -253,7 +253,7 @@ For other tools, paste the output of: ctx agent --budget 8000 ``` -### 3b. Set Up for Your AI Tool +### 3B. Set Up for Your AI Tool If you use an MCP-compatible tool, generate the integration config with `ctx setup`: @@ -297,11 +297,11 @@ better agent behavior. The investment is small and the benefits compound over sessions: * **[Gemini Search](https://github.com/nicobailon/gemini-code-search-mcp)** - — grounded web search with citations. Skills like `/ctx-code-review` +: grounded web search with citations. Skills like `/ctx-code-review` and `/ctx-explain` use it for up-to-date documentation lookups instead of relying on training data. -* **[GitNexus](https://github.com/nicobailon/gitnexus-mcp)** — code +* **[GitNexus](https://github.com/nicobailon/gitnexus-mcp)**: code knowledge graph with symbol resolution, blast radius analysis, and domain clustering. Skills like `/ctx-refactor` and `/ctx-code-review` use it for impact analysis and dependency awareness. diff --git a/docs/home/hub.md b/docs/home/hub.md index f9d3fe16c..92a80368d 100644 --- a/docs/home/hub.md +++ b/docs/home/hub.md @@ -5,17 +5,17 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: The ctx Hub +title: Hub icon: lucide/network --- ![ctx](../images/ctx-banner.png) -## The `ctx` Hub +## Sharing Is Caring `ctx` projects are normally **independent**: each project has its own `.context/` directory, its own decisions, its own learnings, -its own journal. That's the right default — most work is +its own journal. That's the right default, since most work is project-local, and mixing context across projects tends to dilute more than it helps. @@ -26,10 +26,10 @@ A is the same gotcha waiting for you in service B. The **`ctx` Hub** is the feature that makes those specific entries travel, without replicating everything else. -## What the Hub actually is +## What the Hub Actually Is In one paragraph: the `ctx` Hub is a **fan-out channel** for -four specific kinds of structured entries — `decision`, +four specific kinds of structured entries: `decision`, `learning`, `convention`, and `task`. You publish an entry with `ctx add --share` in one project, and it appears in `.context/hub/` for every other project subscribed to that @@ -52,11 +52,11 @@ If you want "my agent in project B sees everything my agent did in project A," that's not the Hub. Local session density stays local. -## Who it's for +## Who It's For Two shapes, same mechanics, different trust models. -### Personal cross-project brain +### Personal Cross-Project Brain **One developer, many projects.** You want a learning from project A to show up when you open project B a week later. You @@ -64,7 +64,7 @@ want a convention you codified in your dotfiles project to be visible everywhere else on your workstation. Run a Hub on localhost, register each project, done. -### Small trusted team +### Small Trusted Team **A few teammates on a LAN or a hub.ctx-like self-hosted server.** You want team conventions to propagate without a @@ -77,11 +77,11 @@ The Hub is **not** a multi-tenant public service. It assumes everyone holding a client token is friendly. Don't stand up `hub.example.com` for untrusted participants. -## Going further +## Going Further -- **First-time setup:** [Hub: Getting Started](../recipes/hub-getting-started.md) — +- **First-time setup:** [Hub: Getting Started](../recipes/hub-getting-started.md), a five-minute walkthrough on localhost. -- **Mental model and user stories:** [Hub Overview](../recipes/hub-overview.md) — +- **Mental model and user stories:** [Hub Overview](../recipes/hub-overview.md), what flows, what doesn't, and when not to use it. - **Team / LAN deployment:** [Multi-machine setup](../recipes/hub-multi-machine.md). - **Redundancy:** [HA cluster](../recipes/hub-cluster.md). diff --git a/docs/home/index.md b/docs/home/index.md index 441db5467..2b23712d1 100644 --- a/docs/home/index.md +++ b/docs/home/index.md @@ -12,63 +12,53 @@ icon: lucide/home *Deterministic. Git-native. Human-readable. Local-first*. -**Start here**. +**Start here**. Learn what `ctx` does, set it up, and run your first session. !!! warning "Pre-1.0: Moving Fast" `ctx` is under active development. This website tracks the - **development branch**, not the latest release: + **development branch**, not the latest release: - Some features described here may not exist in the binary + Some features described here may not exist in the binary you have installed. - Expect rough edges. + Expect rough edges. If something is missing or broken, [open an issue](https://github.com/ActiveMemory/ctx/issues). --- -### [About ctx](about.md) +## Introduction + +### [About](about.md) What `ctx` is, how it works, and why **persistent context changes** how you work with AI. ---- - ### [Is It Right for Me?](is-ctx-right.md) Good fit, not-so-good fit, and a **5-minute trial** to find out for yourself. ---- - -### [Community](community.md) +### [FAQ](faq.md) -We are the builders who care about **durable** context.
-Join the community. Hang out in IRC. Star `ctx` on GitHub. +Quick answers to the questions newcomers ask most about +**`ctx`**, files, tooling, and trade-offs. --- -### [Contributing](contributing.md) - -**Development setup**, project layout, and pull request process. - ---- +## Get Started ### [Getting Started](getting-started.md) Install the **binary**, set up the **plugin**, and **verify** it works. ---- - ### [Your First Session](first-session.md) **Step-by-step** walkthrough from `ctx init` to verified recall. ---- - ### [Common Workflows](common-workflows.md) Day-to-day commands for **tracking** context, **checking** health, @@ -76,19 +66,69 @@ and browsing **history**. --- +## Concepts + ### [Context Files](context-files.md) What each `.context/` file does. What's their **purpose**. How do we best **leverage** them. ---- - ### [Configuration](configuration.md) Flexible **configuration**: `.ctxrc`, environment variables, and CLI flags. +### [Hub](hub.md) + +A **fan-out channel** for decisions, learnings, conventions, and +tasks that need to cross **project boundaries**, without replicating +everything else. + --- +## Working with AI + ### [Prompting Guide](prompting-guide.md) **Effective prompts** for AI sessions with `ctx`. + +### [Keeping AI Honest](keeping-ai-honest.md) + +AI agents **confabulate**: they invent history, claim familiarity +with decisions never made, and sometimes declare tasks complete +when they aren't. Tools and habits to push back. + +### [My AI Keeps Making the Same Mistakes](repeated-mistakes.md) + +Stop **rediscovering** the same bugs and dead-ends across sessions. + +### [Joining a Project](joining-a-project.md) + +You inherited a `.context/` directory. Get **oriented fast**: +priority order, what to read first, how to ramp up. + +--- + +## Customization + +### [Steering Files](steering.md) + +Tell the assistant **how to behave** when a specific kind +of prompt arrives. + +### [Lifecycle Triggers](triggers.md) + +Make things **happen** at session boundaries: block dangerous +tool calls, inject standup notes, log file saves. + +--- + +## Community + +### [#ctx](community.md) + +We are the builders who care about **durable** context.
+Join the community. Hang out in IRC. Star `ctx` on GitHub. + +### [Contributing](contributing.md) + +**Development setup**, project layout, and pull request process. diff --git a/docs/home/joining-a-project.md b/docs/home/joining-a-project.md index 8655ed43a..2b44f1ba7 100644 --- a/docs/home/joining-a-project.md +++ b/docs/home/joining-a-project.md @@ -5,7 +5,7 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Joining a ctx Project +title: Joining a Project icon: lucide/user-plus --- diff --git a/docs/home/keeping-ai-honest.md b/docs/home/keeping-ai-honest.md index 5b16fb0cd..96c68619d 100644 --- a/docs/home/keeping-ai-honest.md +++ b/docs/home/keeping-ai-honest.md @@ -51,7 +51,7 @@ does not exist, the AI is hallucinating - and you know immediately. This is **grounded memory**: claims that trace back to artifacts you control and can audit. -## CONSTITUTION.md: Hard Guardrails +## `CONSTITUTION.md`: Hard Guardrails CONSTITUTION.md defines rules the AI must treat as inviolable. These are not suggestions or best practices - they are constraints that @@ -116,14 +116,14 @@ context files honest the same way tests keep your code honest. The `/ctx-commit` skill includes a built-in verification step: before staging, it maps claims to evidence and runs self-audit questions to surface gaps. This catches inconsistencies at the point where they -matter most — right before code is committed. +matter most: right before code is committed. This closes the loop. You write context. The AI reads context. The verification step confirms that context still matches reality. When it does not, you fix it - and the next session starts from truth, not from drift. -## Trust Through Structure +## Trust through Structure The common thread across all of these mechanisms is **structure over prose**. Timestamps make claims verifiable. Constitutional rules make diff --git a/docs/home/prompting-guide.md b/docs/home/prompting-guide.md index 5d80ffe6b..8f4631a19 100644 --- a/docs/home/prompting-guide.md +++ b/docs/home/prompting-guide.md @@ -64,7 +64,7 @@ This guide documents prompts that **reliably** produce **good results**. ## Session Start -### "*Do you remember?*" +### "*do you remember?*" Triggers the AI to silently read `TASKS.md`, `DECISIONS.md`, `LEARNINGS.md`, and check recent history via `ctx journal` before @@ -95,7 +95,7 @@ protocol (*including the failure modes, the timing problem, and the hook design that solved it*) see [The Dog Ate My Homework](../blog/2026-02-25-the-homework-problem.md). -### "*What's the current state?*" +### "*What's the Current State?*" Prompts reading of `TASKS.md`, recent sessions, and status overview. @@ -111,7 +111,7 @@ Use this when **resuming work** after a break. ## During Work -### "*Why doesn't X work?*" +### "*Why Doesn't X Work?*" This triggers **root cause analysis** rather than surface-level fixes. @@ -126,7 +126,7 @@ through code, check configurations, and identify the actual cause. This was a fix that benefited all users of `ctx`. -### "*Is this consistent with our decisions?*" +### "*Is This Consistent with Our Decisions?*" This prompts checking `DECISIONS.md` before implementing. @@ -137,7 +137,7 @@ Use this before making architectural choices. * "*Check if we've decided on this before*" * "*Does this align with our conventions?*" -### "*What would break if we...*" +### "*What Would Break If We...*" This triggers **defensive thinking** and **impact analysis**. @@ -147,7 +147,7 @@ Use this before making significant changes. What would break if we change the Settings struct? ``` -### "*Before you start, read X*" +### "*Before You Start, Read X*" This ensures specific context is loaded before work begins. @@ -218,7 +218,7 @@ Use them **in the moment** when you see the behavior. ## Reflection and Persistence -### "*What did we learn?*" +### "*What Did We Learn?*" This prompts **reflection** on the session and often triggers adding learnings to `LEARNINGS.md`. @@ -228,7 +228,7 @@ Use this after completing a task or debugging session. This is an **explicit reflection prompt**. The AI will summarize insights and often offer to persist them. -### "*Add this as a learning/decision*" +### "*Add This as a Learning/decision*" This is an **explicit persistence request**. @@ -242,7 +242,7 @@ Add this as a learning. # and let the AI autonomously infer and summarize. ``` -### "*Save context before we end*" +### "*Save Context Before We End*" This triggers **context persistence** before the session closes. @@ -260,7 +260,7 @@ Use it at the end of the session or before switching topics. ## Exploration and Research -### "Explore the codebase for X" +### "Explore the Codebase for X" This triggers thorough codebase search rather than guessing. @@ -269,7 +269,7 @@ Use this when you need to understand how something works. This works because "**Explore**" signals that **investigation is needed**, not immediate action. -### "*How does X work in this codebase?*" +### "*How Does X Work in This Codebase?*" This prompts reading actual code rather than explaining general concepts. @@ -279,7 +279,7 @@ Use this to understand the existing implementation. How does session saving work in this codebase? ``` -### "*Find all places where X*" +### "*Find All Places Where X*" This triggers a **comprehensive search** across the codebase. @@ -289,14 +289,14 @@ Use this before refactoring or understanding the impact. ## Meta and Process -### "*What should we document from this?*" +### "*What Should We Document from This?*" This prompts identifying learnings, decisions, and conventions worth persisting. Use this after complex discussions or implementations. -### "*Is this the right approach?*" +### "*Is This the Right Approach?*" This invites the AI to challenge the current direction. @@ -311,7 +311,7 @@ AIs often default to agreeing; this prompt signals you want an This sets the tone for the entire session: The AI will flag questionable choices proactively instead of waiting to be asked. -### "*What am I missing?*" +### "*What Am I Missing?*" This prompts thinking about **edge cases**, overlooked requirements, or **unconsidered approaches**. @@ -378,7 +378,7 @@ Use `ctx` skills by name: Skills combine a prompt, tool permissions, and domain knowledge into a single invocation. -!!! info "Skills Beyond Claude Code" +!!! info "Skills beyond Claude Code" The `/slash-command` syntax above is Claude Code native, but the underlying `SKILL.md` files are a standard markdown format that any agent can consume. If you use a different coding agent, consult its @@ -427,7 +427,7 @@ evidence. ## Safety Invariants -!!! warning "These are **Invariants**: Not Suggestions" +!!! warning "These Are **Invariants**: Not Suggestions" A prompting guide earns its trust by **being honest about risk**. These four rules mentioned below don't change with model versions, agent diff --git a/docs/home/repeated-mistakes.md b/docs/home/repeated-mistakes.md index 0a72400ce..d4fbcb6dc 100644 --- a/docs/home/repeated-mistakes.md +++ b/docs/home/repeated-mistakes.md @@ -30,7 +30,7 @@ persistent context, every session starts with amnesia. `ctx` gives your AI three files that directly prevent repeated mistakes, each targeting a different failure mode. -### DECISIONS.md: Stop Relitigating Settled Choices +### `DECISIONS.md`: Stop Relitigating Settled Choices When you make an architectural decision, record it with rationale and rejected alternatives. The AI reads this at session start and treats @@ -56,7 +56,7 @@ Next session, when the AI considers auth, it reads this entry and builds on the decision instead of re-debating it. If someone asks "why not sessions?", the rationale is already there. -### LEARNINGS.md: Capture Gotchas Once +### `LEARNINGS.md`: Capture Gotchas Once Learnings are the bugs, quirks, and non-obvious behaviors that cost you time the first time around. Write them down so they cost you zero time @@ -81,7 +81,7 @@ for SQLite builds. Never set CGO_ENABLED=0. Without this entry, the next session that touches the Dockerfile will hit the same wall. With it, the AI knows before it starts. -### CONSTITUTION.md: Draw Hard Lines +### `CONSTITUTION.md`: Draw Hard Lines Some mistakes are not about forgetting - they are about boundaries the AI should never cross. CONSTITUTION.md sets inviolable rules. diff --git a/docs/home/steering.md b/docs/home/steering.md index c8bc69a82..74df431b2 100644 --- a/docs/home/steering.md +++ b/docs/home/steering.md @@ -5,17 +5,17 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Steering files +title: Steering Files icon: lucide/compass --- ![ctx](../images/ctx-banner.png) -## Steering files +## Steering Files `ctx` projects talk to AI assistants through several layers -— context files, decisions, conventions, the agent context -packet — but none of those can tell the assistant *how to +(context files, decisions, conventions, the agent context +packet) but none of those can tell the assistant *how to behave* when a specific kind of prompt arrives. That's what **steering files** are for. @@ -27,7 +27,7 @@ and syncs them out to each AI tool's native config (Claude Code, Cursor, Kiro, Cline) so the rules actually land in the prompt pipeline. -## Not the same as decisions or conventions +## Not the Same as Decisions or Conventions The three look similar on disk but serve different purposes: @@ -40,7 +40,7 @@ The three look similar on disk but serve different purposes: If you find yourself writing "the AI should always do X when asked about Y," that belongs in steering, not decisions. -## Your first steering files +## Your First Steering Files **`ctx init` scaffolds four foundation steering files** in `.context/steering/` so you start with something to edit @@ -56,15 +56,15 @@ rather than an empty directory: Each file starts with an inline HTML comment explaining the three inclusion modes, priority semantics, and tool scoping. The comment is invisible in rendered markdown but visible -when you open the file to edit it — it's self-documenting +when you open the file to edit it; it's self-documenting scaffolding, not forever guidance. Delete the comment once you've customized the file. Default settings for foundation files: -- `inclusion: always` — fires on every AI tool call -- `priority: 10` — injected near the top of the prompt -- `tools: []` — applies to every configured AI tool +- `inclusion: always`: fires on every AI tool call +- `priority: 10`: injected near the top of the prompt +- `tools: []`: applies to every configured AI tool **You should open each of these files and replace the placeholder content with your project's actual rules.** @@ -72,7 +72,7 @@ Re-running `ctx init` is safe: existing files are left alone, so your edits survive. Use `ctx init --no-steering-init` to opt out of the scaffold entirely. -## Inclusion modes +## Inclusion Modes Each steering file declares an inclusion mode in its frontmatter: @@ -87,7 +87,7 @@ frontmatter: because the two tool families consume steering very differently. -**Claude Code and Codex** — prefer `inclusion: always` +**Claude Code and Codex**: prefer `inclusion: always` for rules that must fire reliably. These tools have two delivery channels: @@ -98,16 +98,16 @@ delivery channels: automatically when the ctx plugin is installed. Claude can call this tool mid-task to fetch `auto` or `manual` files matching a specific prompt. Verify - with `claude mcp list` — look for `ctx: ✓ Connected`. + with `claude mcp list`; look for `ctx: ✓ Connected`. Use `always` for invariants and anything that **must** fire every session. Use `auto` for situational rules where "Claude fetches this when the prompt is relevant" -is the right behavior — those still land, just on +is the right behavior; those still land, just on Claude's judgment. Use `manual` for reference libraries you'll name explicitly. -**Cursor, Cline, Kiro** — `auto` is the natural default. +**Cursor, Cline, Kiro**: `auto` is the natural default. These tools read `.cursor/rules/`, `.clinerules/`, or `.kiro/steering/` natively and resolve the description match on their own, so `auto` files fire when the prompt @@ -115,19 +115,19 @@ matches. `manual` files load on explicit invocation. `always` still works but consumes context budget on every turn. -**Mixed setups** — if a rule must fire on Claude Code, +**Mixed setups**: if a rule must fire on Claude Code, pick `always`, even if it's overkill for your Cursor setup. The context budget cost is small; the alternative (silently not firing) is worse. -## Two families of AI tools, two delivery paths +## Two Families of AI Tools, Two Delivery Paths Not every AI tool consumes steering the same way. ctx handles two tool families differently, and it's worth knowing which family your editor is in before you wonder why a rule isn't firing. -**Native-rules tools** — **Cursor**, **Cline**, **Kiro** — +**Native-rules tools** (**Cursor**, **Cline**, **Kiro**) have a built-in rules primitive. They read a specific directory (`.cursor/rules/`, `.clinerules/`, `.kiro/steering/`) and apply the rules they find there. @@ -135,7 +135,7 @@ ctx handles these via `ctx steering sync`, which exports your files into the tool-native format. Run `sync` whenever you edit a steering file. -**Hook + MCP tools** — **Claude Code**, **Codex** — have +**Hook + MCP tools** (**Claude Code**, **Codex**) have no native rules primitive, so `ctx steering sync` is a **no-op** for them. Instead, ctx delivers steering through two non-sync channels: @@ -150,7 +150,7 @@ two non-sync channels: 2. **On-demand via the `ctx_steering_get` MCP tool**. The ctx MCP server exposes a tool Claude can call mid-task to fetch matching steering files for a specific prompt. - Claude decides when to call it — it's not automatic. + Claude decides when to call it; it's not automatic. Both channels activate when you run `ctx setup claude-code --write`. After that, steering just @@ -158,42 +158,42 @@ works for Claude Code. **Practical takeaway**: -- Using Cursor/Cline/Kiro only? → Run `ctx steering sync` +- Using Cursor/Cline/Kiro only? Run `ctx steering sync` after edits. -- Using Claude Code or Codex only? → Never run `sync`; the +- Using Claude Code or Codex only? Never run `sync`; the hook+MCP pipeline handles it. -- Using both? → Run `sync` for the native-rules tools; the +- Using both? Run `sync` for the native-rules tools; the hook+MCP pipeline covers Claude Code automatically. -## Two shapes of automation: rules and scripts +## Two Shapes of Automation: Rules and Scripts Steering is one of **two** hook-like layers ctx provides for customizing AI behavior. They're complementary: -- **Steering** — *persistent rules* that get prepended to +- **Steering**: *persistent rules* that get prepended to prompts. Declarative, text-only, scored by match. -- **[Triggers](triggers.md)** — *executable shell scripts* +- **[Triggers](triggers.md)**: *executable shell scripts* that fire at lifecycle events. Imperative, runs arbitrary code, gated by exit codes. Pick steering when you want "always remind the AI of X." Pick triggers when you want "do Y when event Z happens." -They can coexist — many projects use both. +They can coexist; many projects use both. -## Where to go next +## Where to Go Next -- **[Writing Steering Files](../recipes/steering.md)** — +- **[Writing Steering Files](../recipes/steering.md)**: a six-step walkthrough: scaffold, write the rule, preview matches, list, get-rules-in-front-of-the-AI (two paths depending on tool family), verify. -- **[`ctx steering` reference](../cli/steering.md)** — full +- **[`ctx steering` reference](../cli/steering.md)**: full command, flag, and frontmatter reference; includes the per-tool delivery-mechanism table and a dedicated section on how Claude Code and Codex consume steering. -- **[`ctx setup`](../cli/setup.md)** — configure which AI +- **[`ctx setup`](../cli/setup.md)**: configure which AI tools receive steering. For Cursor/Cline/Kiro this is about sync targets; for Claude Code/Codex it installs the plugin that wires the `PreToolUse` hook and MCP server. -- **[Lifecycle Triggers](triggers.md)** — the imperative +- **[Lifecycle Triggers](triggers.md)**: the imperative companion to steering files. diff --git a/docs/home/triggers.md b/docs/home/triggers.md index a9e71880e..d4b973a9e 100644 --- a/docs/home/triggers.md +++ b/docs/home/triggers.md @@ -5,7 +5,7 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Lifecycle triggers +title: Lifecycle Triggers icon: lucide/zap --- @@ -38,9 +38,9 @@ type. | `file-save` | A file is saved | Lint on save, update indices | | `context-add` | A new entry is added to `.context/` | Cross-link, notify, enrich | -## Triggers are arbitrary code — treat them like pre-commit hooks +## Triggers Are Arbitrary Code: Treat Them like Pre-Commit Hooks -!!! warning "Only enable scripts you've read and understand" +!!! warning "Only Enable Scripts You've Read and Understand" A trigger is a shell script with the executable bit set. It runs with the same privileges as your AI tool and receives JSON input on stdin. A malicious or buggy @@ -50,9 +50,9 @@ type. `ctx trigger add` intentionally creates new scripts **disabled** (no executable bit). You must `ctx trigger enable ` after reviewing the contents. - That's not a suggestion — it's the security model. + That's not a suggestion; it's the security model. -## Three hook-like layers in ctx +## Three Hook-like Layers in ctx Triggers are one of **three** distinct hook-like concepts in ctx. The names are similar but the owners and use cases are @@ -67,7 +67,7 @@ not: This page is about the first category. The other two run automatically and are invisible to you. -## Triggers vs steering — same problem, different shape +## Triggers vs Steering: Same Problem, Different Shape Triggers are the imperative counterpart to [**steering files**](steering.md). Steering expresses @@ -80,12 +80,12 @@ complementary, not competing: Most projects use both. -## Where to go next +## Where to Go Next -- **[Authoring Lifecycle Triggers](../recipes/triggers.md)** - — walkthrough with security guidance: scaffold, test, +- **[Authoring Lifecycle Triggers](../recipes/triggers.md)**: + walkthrough with security guidance: scaffold, test, enable, iterate. -- **[`ctx trigger` reference](../cli/trigger.md)** — command +- **[`ctx trigger` reference](../cli/trigger.md)**: command reference, trigger type table, input/output contract. -- **[Steering files](steering.md)** — the declarative +- **[Steering files](steering.md)**: the declarative counterpart to triggers. diff --git a/docs/index.md b/docs/index.md index 31fab1939..1b922530b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,13 +5,13 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: The ctx Manifesto +title: Manifesto icon: lucide/flame --- ![ctx](images/ctx-banner.png) -# `ctx` Manifesto +# The `ctx` Manifesto **Creation, not code**. @@ -101,7 +101,7 @@ Vision, goals, and direction are **human responsibilities**. **Nothing** critical should depend on recall. -!!! danger "Oral Tradition Does not Scale" +!!! danger "Oral Tradition Does Not Scale" If intent cannot be inspected, it cannot be enforced. --- @@ -189,7 +189,7 @@ Memory heuristics **drift**. ## Verified Reality Is the Scoreboard -!!! danger "Activity is a False Proxy" +!!! danger "Activity Is a False Proxy" Output volume correlates *poorly* with impact. * *Code* is **not** progress. @@ -238,7 +238,7 @@ We build to: ## Failures Are Assets -!!! important "Failure Without Capture is Waste" +!!! important "Failure without Capture Is Waste" **Pain** that does not teach is pure *loss*. **Failures** are *not* erased: They are **preserved**. @@ -280,9 +280,9 @@ A repeated mistake is a missing `ctx` artifact. --- -## Encode Intent Into the Environment +## Encode Intent into the Environment -!!! danger "Goodwill Does not Belong to the Table" +!!! danger "Goodwill Does Not Belong to the Table" *Alignment* that depends on memory will **drift**. *Alignment* **cannot depend on** *memory* or *goodwill*. @@ -364,7 +364,7 @@ Transparent `ctx` **compounds** understanding. ## Continuously Verify the System -!!! warning "Stability is Temporary" +!!! warning "Stability Is Temporary" Every assumption has a half-life: * Models drift. @@ -387,7 +387,7 @@ Transparent `ctx` **compounds** understanding. ## `ctx` Is Leverage -!!! note "Humans are Decision Engines" +!!! note "Humans Are Decision Engines" *Execution* should **not** consume *judgment*. Humans **must not be** typists. diff --git a/docs/operations/autonomous-loop.md b/docs/operations/autonomous-loop.md index 8336ca862..ae67a7b15 100644 --- a/docs/operations/autonomous-loop.md +++ b/docs/operations/autonomous-loop.md @@ -129,7 +129,7 @@ Claude Code has built-in loop support: This is convenient for quick iterations, but be aware of important caveats: -!!! warning "This Loop Is not Pure" +!!! warning "This Loop Is Not Pure" Claude Code's `/loop` runs all iterations **within the same session**. This means: @@ -332,7 +332,7 @@ my-project/ └── src/ # Your code ``` -### Sample TASKS.md for Autonomous Loops +### Sample `TASKS.md` for Autonomous Loops ```markdown # Tasks diff --git a/docs/operations/hub-failure-modes.md b/docs/operations/hub-failure-modes.md index f28a3f267..eea628bb6 100644 --- a/docs/operations/hub-failure-modes.md +++ b/docs/operations/hub-failure-modes.md @@ -11,13 +11,13 @@ icon: lucide/alert-triangle ![ctx](../images/ctx-banner.png) -# `ctx` Hub: Failure modes +# `ctx` Hub: Failure Modes What can go wrong, what the system does about it, and what you should do. Complementary to [`ctx` Hub Operations](hub.md). -!!! info "Design posture" +!!! info "Design Posture" The hub is **best-effort knowledge sharing**, not a durable ledger. Local `.context/` files are the source of truth for each project; the hub is a fan-out channel. This framing @@ -25,7 +25,7 @@ should do. Complementary to ## Network -### Client loses connection mid-stream +### Client Loses Connection Mid-Stream **What happens:** `ctx connection listen` detects the EOF, waits with exponential backoff, and reconnects. On reconnect it passes @@ -34,7 +34,7 @@ its last-seen sequence; the hub replays everything newer. **What you should do:** nothing. If reconnects are looping, check firewall state on the hub and `ctx hub status` output. -### Partition — majority side reachable +### Partition: Majority Side Reachable **What happens:** clients routed to the majority side continue to publish and listen. The minority nodes step down to followers @@ -43,7 +43,7 @@ that cannot accept writes (Raft quorum lost). **What you should do:** let it heal. When the partition closes, followers catch up via sequence-based sync automatically. -### Partition — split brain (no quorum) +### Partition: Split Brain (No Quorum) **What happens:** no node holds a majority, so no leader is elected. All nodes become read-only. `ctx connection publish` and @@ -54,7 +54,7 @@ still succeed. permanent (e.g., a data center is gone), bootstrap a new cluster from the survivors with `ctx hub peer remove` for the dead nodes. -### Hub unreachable during `ctx add --share` +### Hub Unreachable during `ctx add --share` **What happens:** the local write succeeds; the share step prints a warning and exits non-zero on the share leg only. `--share` is @@ -66,7 +66,7 @@ The hub deduplicates by entry ID. ## Storage -### Disk full on the leader +### Disk Full on the Leader **What happens:** `entries.jsonl` append fails. The hub rejects writes with an error and stays up for read traffic. Clients @@ -74,7 +74,7 @@ retry; followers keep their in-sync status using whatever the leader already wrote. **What you should do:** free disk or grow the volume, then -nothing else — the hub resumes accepting writes on the next +nothing else; the hub resumes accepting writes on the next append attempt. ### Corrupt `entries.jsonl` @@ -88,35 +88,35 @@ earlier line is malformed, the hub refuses to start. line. Move the bad region to a `.quarantine` file, then start. Nothing is ever silently dropped. -### `meta.json` / `entries.jsonl` sequence mismatch +### `meta.json` / `entries.jsonl` Sequence Mismatch **What happens:** the hub refuses to start. This usually means someone copied one file without the other. **What you should do:** restore both files from the same backup, or accept the higher sequence by regenerating `meta.json` from -`entries.jsonl` (manual for now — file a bug). +`entries.jsonl` (manual for now; file a bug). ## Cluster -### Leader crash, clean shutdown +### Leader Crash, Clean Shutdown **What happens:** `ctx hub stop` triggers `stepdown` first, so a new leader is elected before the old one exits. In-flight writes drain. Clients reconnect to the new leader transparently. -### Leader crash, hard fail (kill -9, power loss) +### Leader Crash, Hard Fail (Kill -9, Power Loss) **What happens:** Raft detects the missing heartbeat and elects a new leader within a few seconds. Writes the old leader accepted -**but had not yet replicated** can be lost — see the Raft-lite +**but had not yet replicated** can be lost. See the Raft-lite warning in [the cluster recipe](../recipes/hub-cluster.md). **What you should do:** if you need stronger durability, run `ctx connection listen` on a dedicated "collector" project that persists entries locally as a write-ahead backup. -### Split-brain after rejoin +### Split-Brain After Rejoin **What happens:** Raft reconciles: the minority side's uncommitted writes are discarded, and the majority's log is authoritative. @@ -126,18 +126,18 @@ minority had important writes, grep for them in `/entries.jsonl.rejected` (written by the reconciliation pass) and replay them with `ctx connection publish`. -## Auth and tokens +## Auth and Tokens -### Lost admin token +### Lost Admin Token **What happens:** you cannot register new projects. **What you should do:** retrieve it from `/admin.token`. If that file is also gone, stop the hub -and regenerate — note that **all existing client tokens keep +and regenerate. Note that **all existing client tokens keep working**; only new registrations need the admin token. -### Compromised admin token +### Compromised Admin Token **What happens:** anyone with the token can register new projects and publish. They cannot read existing entries without @@ -148,7 +148,7 @@ a client token for a project that subscribes. suspicious client registrations via `clients.json`, and audit `entries.jsonl` for unexpected origins. -### Compromised client token +### Compromised Client Token **What happens:** the attacker can publish as that project and read anything that project is subscribed to. Because `Origin` @@ -161,10 +161,10 @@ token compromise. `clients.json`, restart the hub, and re-register the legitimate project with a fresh token. Audit `entries.jsonl` for entries published after the compromise timestamp and quarantine any -that look suspicious — remember that `Origin` on those entries +that look suspicious; remember that `Origin` on those entries proves nothing. -### Compromised hub host +### Compromised Hub Host **What happens:** `/clients.json` stores client tokens **verbatim** (not hashed). Anyone with read access to @@ -178,7 +178,7 @@ See [Security model](../security/hub.md#hub-side-token-storage) for the mitigations that reduce the blast radius while the hashing follow-up is pending. -## Clock skew +## Clock Skew Hub entries carry a timestamp assigned **by the publishing client**. The hub does not rewrite timestamps. Clients with @@ -189,18 +189,18 @@ order in the shared feed. see entries dated in the future or far past, the publisher's clock is the culprit. -## The short list +## The Short List | Symptom | First thing to check | |-----------------------------------|-----------------------------------| | Client can't reach hub | Firewall, then `ctx hub status` | -| "No leader" errors | Cluster quorum — run `ctx hub status` on each peer | +| "No leader" errors | Cluster quorum; run `ctx hub status` on each peer | | Hub won't start after crash | Last line of `entries.jsonl` | | Entries missing after restore | Check `clients.json` sequence vs local `.sync-state.json` | -| Duplicate entries in shared feed | Client replayed after restore — safe, dedup by ID | +| Duplicate entries in shared feed | Client replayed after restore, safe (dedup by ID) | | Followers lagging | Disk or network on the follower, not the leader | -## See also +## See Also - [`ctx` Hub Operations](hub.md) - [`ctx` Hub security model](../security/hub.md) diff --git a/docs/operations/hub.md b/docs/operations/hub.md index 79ea3b09f..cb160607f 100644 --- a/docs/operations/hub.md +++ b/docs/operations/hub.md @@ -14,7 +14,7 @@ icon: lucide/settings # `ctx` Hub: Operations Running the ctx `ctx` Hub in production. This page is -for **operators** — people running a hub for themselves or a +for **operators**: people running a hub for themselves or a team, not people writing to a hub someone else is running. If you have not read it yet, start with the @@ -24,7 +24,7 @@ explains what the hub is, the two user stories it supports it does **not** do. A client-side tour is in [Getting Started](../recipes/hub-getting-started.md). -!!! info "Operator cheat sheet" +!!! info "Operator Cheat Sheet" - The hub fans out four entry types only: `decision`, `learning`, `convention`, `task`. Journals, scratchpad, and other local state are out of scope. @@ -34,7 +34,7 @@ it does **not** do. A client-side tour is in - The data model is an **append-only JSONL log** plus two small JSON sidecar files. Nothing is rewritten in place. -## Data directory layout +## Data Directory Layout The hub stores everything under a single data directory (default `~/.ctx/hub-data/`, override with `--data-dir`). @@ -63,7 +63,7 @@ The hub stores everything under a single data directory * `clients.json` holds hashed client tokens; losing it invalidates all client registrations. -## Starting and stopping +## Starting and Stopping === "Foreground" @@ -82,10 +82,10 @@ The hub stores everything under a single data directory `--stop` sends SIGTERM to the PID in `hub.pid`, waits for in-flight RPCs to drain, then exits. If the daemon is wedged, -remove `hub.pid` and send `SIGKILL` manually — `entries.jsonl` is +remove `hub.pid` and send `SIGKILL` manually. `entries.jsonl` is crash-safe, so you will not lose accepted writes. -## Systemd unit +## Systemd Unit For production single-node deployments, run the hub as a systemd service instead of `--daemon`: @@ -120,19 +120,19 @@ sudo systemctl enable --now ctx-hub sudo journalctl -u ctx-hub -f ``` -## Backup and restore +## Backup and Restore Because `entries.jsonl` is append-only, backups are trivial: ```bash -# Hot backup — safe while the hub is running. +# Hot backup, safe while the hub is running. cp /entries.jsonl backups/entries-$(date +%F).jsonl cp /meta.json backups/meta-$(date +%F).json cp /clients.json backups/clients-$(date +%F).json ``` For a consistent snapshot across all three files, stop the hub, -copy, then start again — or use a filesystem-level snapshot (LVM, +copy, then start again, or use a filesystem-level snapshot (LVM, ZFS, Btrfs). **Restore:** @@ -148,9 +148,9 @@ ctx hub start --daemon Clients that pushed sequences **above** the restored watermark will re-publish on the next `listen` reconnect, because the hub now reports a lower sequence than what clients have on disk. This -is safe — the store deduplicates by entry ID. +is safe; the store deduplicates by entry ID. -## Log rotation +## Log Rotation `entries.jsonl` grows unbounded. For long-lived hubs, rotate it offline: @@ -181,23 +181,23 @@ of choice. For cluster deployments, watch for: -- **Role flaps** — the leader changing more than once per hour +- **Role flaps**: the leader changing more than once per hour suggests network instability or disk contention. -- **Replication lag** — `ctx hub status` shows per-peer sequence +- **Replication lag**: `ctx hub status` shows per-peer sequence offsets. Sustained lag > 100 sequences on a follower is worth investigating. -- **`entries.jsonl` growth rate** — sudden spikes often indicate a +- **`entries.jsonl` growth rate**: sudden spikes often indicate a misbehaving `ctx connection listen` reconnect loop. ## Upgrading -The JSONL format is versioned in `meta.json`. Ctx refuses to start +The JSONL format is versioned in `meta.json`. `ctx` refuses to start against a newer store version than it understands; older store versions are upgraded in place at first start after an upgrade. **Always back up `/` before upgrading.** -## See also +## See Also - [`ctx` Hub failure modes](hub-failure-modes.md) - [`ctx` Hub security model](../security/hub.md) diff --git a/docs/operations/index.md b/docs/operations/index.md index 965299a68..0d5402be3 100644 --- a/docs/operations/index.md +++ b/docs/operations/index.md @@ -10,28 +10,7 @@ Guides for **installing**, **upgrading**, **integrating**, and --- -## Hub - -Operator guides for running a `ctx` Hub — the gRPC server that -fans out structured entries across projects. If you're a client -connecting to a Hub someone else runs, see -[`ctx connect`](../cli/connection.md) and the -[Hub recipes](../recipes/hub-overview.md) instead. - -### [Hub Operations](hub.md) - -Data directory layout, daemon management, systemd unit, -backup and restore, log rotation, monitoring, and upgrades. - -### [Hub Failure Modes](hub-failure-modes.md) - -What can go wrong in network, storage, cluster, auth, and -clock layers — and what you should do about each one. Includes -the short-list table oncall engineers will want bookmarked. - ---- - -## Operating `ctx` +## Day-to-Day Everyday operation guides for anyone running `ctx` in a project or adopting it in a team. @@ -58,6 +37,27 @@ with `ctx` providing persistent memory between iterations. --- +## Hub + +Operator guides for running a `ctx` Hub, the gRPC server that +fans out structured entries across projects. If you're a client +connecting to a Hub someone else runs, see +[`ctx connect`](../cli/connection.md) and the +[Hub recipes](../recipes/hub-overview.md) instead. + +### [Hub Operations](hub.md) + +Data directory layout, daemon management, systemd unit, +backup and restore, log rotation, monitoring, and upgrades. + +### [Hub Failure Modes](hub-failure-modes.md) + +What can go wrong in network, storage, cluster, auth, and +clock layers, and what you should do about each one. Includes +the short-list table oncall engineers will want bookmarked. + +--- + ## Maintainers Runbooks for people shipping `ctx` itself. @@ -71,21 +71,25 @@ release notes, run the release script, and verify the result. ## Runbooks -Step-by-step procedures in `hack/runbooks/`. Run these -regularly — they catch problems that linters and tests cannot. +Step-by-step procedures you run with your agent. Each runbook +includes a prompt to paste into a Claude Code session and +guidance on triaging the results. | Runbook | Purpose | When to run | |---------|---------|-------------| -| [Codebase audit](../../hack/runbooks/codebase-audit.md) | AST audits, magic strings, dead code, doc alignment | Before release, quarterly | -| [Docs semantic audit](../../hack/runbooks/docs-semantic-audit.md) | Narrative gaps, weak pages, structural problems | Before release, after adding pages | -| [Sanitize permissions](../../hack/runbooks/sanitize-permissions.md) | Clean `.claude/settings.local.json` of over-broad grants | After heavy permission granting | +| [Release checklist](runbooks/release-checklist.md) | Full pre-release sequence | Before every release | +| [Plugin release](runbooks/plugin-release.md) | Plugin-specific release steps | Plugin changes ship | +| [Breaking migration](runbooks/breaking-migration.md) | Guide users across breaking changes | Releases with renames | +| [Hub deployment](runbooks/hub-deployment.md) | Set up a ctx Hub end-to-end | First-time hub setup | +| [New contributor](runbooks/new-contributor.md) | Onboarding: clone to first session | New contributors | +| [Codebase audit](runbooks/codebase-audit.md) | AST audits, magic strings, dead code, doc alignment | Before release, quarterly | +| [Docs semantic audit](runbooks/docs-semantic-audit.md) | Narrative gaps, weak pages, structural problems | Before release, after adding pages | +| [Sanitize permissions](runbooks/sanitize-permissions.md) | Clean `.claude/settings.local.json` of over-broad grants | After heavy permission granting | +| [Architecture exploration](runbooks/architecture-exploration.md) | Systematic architecture docs across repos | New codebase onboarding, reviews | **Recommended cadence**: -- **Before every release**: codebase audit + docs semantic audit +- **Before every release**: release checklist (which includes + codebase audit + docs semantic audit) - **Monthly**: sanitize permissions -- **Quarterly**: full sweep of all three - -The `_ctx-release` skill runs the codebase audit automatically -as part of its pre-release checks. The other two are manual — -add them to your release checklist. +- **Quarterly**: full sweep of all audit runbooks diff --git a/docs/operations/integrations.md b/docs/operations/integrations.md index c002c08b2..6d6abc533 100644 --- a/docs/operations/integrations.md +++ b/docs/operations/integrations.md @@ -289,7 +289,7 @@ These are invoked in Claude Code with `/skill-name`. #### Blogging Skills -!!! tip "Blogging is a Better Way of Creating Release Notes" +!!! tip "Blogging Is a Better Way of Creating Release Notes" The blogging workflow can also double as generating release notes: AI reads your git commit history and creates a "*narrative*", @@ -669,7 +669,7 @@ The `ctx watch` command parses update commands from AI output. Use this format: | `convention` | CONVENTIONS.md | None | | `complete` | TASKS.md | None | -### Simple Format (tasks, conventions, complete) +### Simple Format (Tasks, Conventions, Complete) ```xml Implement rate limiting @@ -677,7 +677,7 @@ The `ctx watch` command parses update commands from AI output. Use this format: rate limiting ``` -### Structured Format (*learnings, decisions*) +### Structured Format (*Learnings, Decisions*) Learnings and decisions support structured attributes for better documentation: diff --git a/docs/operations/migration.md b/docs/operations/migration.md index e7077956a..4a975e645 100644 --- a/docs/operations/migration.md +++ b/docs/operations/migration.md @@ -204,7 +204,7 @@ ctx setup copilot # Generate Copilot tips ctx setup windsurf # Generate Windsurf config ``` -### Migrating Content Into `.context/` +### Migrating Content into `.context/` If you have project knowledge scattered across `.cursorrules` or custom prompt files, consider migrating it: @@ -261,7 +261,7 @@ git push Teammates pull and immediately have context. No per-developer setup needed. -### What About `.claude/`? +### What about `.claude/`? The `.claude/` directory contains permissions that `ctx init` seeds. Hooks and skills are provided by the `ctx` plugin (*not per-project files*). diff --git a/docs/operations/release.md b/docs/operations/release.md index 84c9c3ff9..9fe92334a 100644 --- a/docs/operations/release.md +++ b/docs/operations/release.md @@ -11,6 +11,12 @@ icon: lucide/package ![ctx](../images/ctx-banner.png) +!!! tip "Full Release Checklist" + This page covers the **mechanics** of cutting a release (bump, tag, push). + For the complete pre-release ceremony (audits, tests, verification, and + post-release steps), see the + [Release Checklist runbook](runbooks/release-checklist.md). + ## Prerequisites Before you can cut a release you need: @@ -140,7 +146,7 @@ from the VERSION file. No source file needs editing. ## Troubleshooting -### "Release notes not found" +### "Release Notes Not Found" ``` ERROR: dist/RELEASE_NOTES.md not found. @@ -149,7 +155,7 @@ ERROR: dist/RELEASE_NOTES.md not found. Run `/_ctx-release-notes` in Claude Code first, or write `dist/RELEASE_NOTES.md` manually. -### "Working tree is not clean" +### "Working Tree Is Not Clean" ``` ERROR: Working tree is not clean. @@ -157,7 +163,7 @@ ERROR: Working tree is not clean. Commit or stash all changes before running `make release`. -### "Tag already exists" +### "Tag Already Exists" ``` ERROR: Tag v0.9.0 already exists. @@ -172,7 +178,7 @@ git tag -d v0.9.0 git push origin :refs/tags/v0.9.0 ``` -### CI build fails after tag push +### CI Build Fails After Tag Push The tag is already published. Fix the issue, bump to a patch version (e.g. `0.9.1`), and release again. Do not force-push diff --git a/hack/agents/architecture-explorer.md b/docs/operations/runbooks/architecture-exploration.md similarity index 53% rename from hack/agents/architecture-explorer.md rename to docs/operations/runbooks/architecture-exploration.md index e9c185042..ebd8a9bc4 100644 --- a/hack/agents/architecture-explorer.md +++ b/docs/operations/runbooks/architecture-exploration.md @@ -1,20 +1,87 @@ -# Architecture Explorer +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 -You are an autonomous architecture exploration agent. Your job is to -systematically build and evolve architecture documentation across all -repositories in this workspace using ctx skills. +title: Architecture Exploration +icon: lucide/map +--- + +![ctx](../../images/ctx-banner.png) + +# Architecture Exploration + +Systematically build architecture documentation across one or +more repositories using ctx skills. Each invocation does one +unit of work; a simple loop drives the agent through all phases. + +**When to use**: When onboarding to a new codebase, performing +architecture reviews, or building up `.context/` documentation +across a workspace of repos. + +**Prerequisites**: `ctx` installed, repos cloned under a shared +workspace directory (e.g., `~/WORKSPACE/`). + +**Companion skills**: + +- `/ctx-architecture`: structural baseline and principal analysis +- `/ctx-architecture-enrich`: code intelligence enrichment via GitNexus +- `/ctx-architecture-failure-analysis`: adversarial failure analysis -## How to Run +--- -You are invoked from `~/WORKSPACE`. A human (or script) runs you with: +## Overview +The agent progresses through phases per repo, depth-first: + +| Phase | Skill | What it does | +|-------|-------|-------------| +| `bootstrap` | `ctx init` + `/ctx-architecture` | Initialize context and build structural baseline | +| `principal` | `/ctx-architecture principal` | Deep analysis: vision, bottlenecks, alternatives | +| `enriched` | `/ctx-architecture-enrich` | Quantify with code intelligence (blast radius, flows) | +| `frontier-N` | `/ctx-architecture` (re-run) | Explore unexplored areas found in convergence report | +| `lens-*` | `/ctx-architecture` with lens | Focused exploration through conceptual lenses | + +Exploration stops when convergence >= 0.85, frontier runs +plateau, or all lenses are exhausted. + +--- + +## Setup + +Create a tracking directory in your workspace root: + +```bash +cd ~/WORKSPACE +mkdir -p .arch-explorer ``` -claude --print "Follow PROMPT.md" --allowedTools '*' + +Create `.arch-explorer/manifest.json` listing your repos: + +```json +{ + "repos": ["ctx", "portal", "infra"], + "current_repo_index": 0, + "progress": {} +} ``` -Each invocation is ONE unit of work: one phase on one repo. After -completing the unit, update tracking and stop. The caller decides -when to invoke you again. +Create `.arch-explorer/run-log.md` (empty, the agent appends to it). + +--- + +## Prompt + +Save this as `.arch-explorer/PROMPT.md` and invoke with your agent. +The prompt is self-contained: the agent reads the manifest, picks +the next unit of work, executes it, updates tracking, and stops. + +~~~ +You are an autonomous architecture exploration agent. Your job is to +systematically build and evolve architecture documentation across all +repositories in this workspace using ctx skills. ## Execution Protocol @@ -75,22 +142,38 @@ focus as input upfront. ### Step 3: Do the Work -1. `cd` into the repo directory (`~/WORKSPACE/`) -2. If phase is `bootstrap`: - - Run `ctx init`, confirm `.context/` exists - - Then run `/ctx-architecture` (structural baseline) -3. If phase is `principal` or `frontier-*`: - - Run `/ctx-architecture` (add `principal` argument for principal phase) - - The skill will read existing artifacts and build on them -4. If phase is `enriched`: - - Verify GitNexus is connected: call `mcp__gitnexus__list_repos` - - Success = non-empty list returned with no error +1. `cd` into the sub-repo directory (`~/WORKSPACE/`, NOT + `~/WORKSPACE` itself). +2. Verify `CTX_DIR` already points at THIS sub-repo's `.context/`: + + ```bash + test "$CTX_DIR" = "$PWD/.context" || { + echo "STOP: CTX_DIR=$CTX_DIR but this sub-repo needs $PWD/.context." + echo "Re-launch the agent with CTX_DIR set to the sub-repo:" + echo " cd $PWD && CTX_DIR=\"\$PWD/.context\" claude --print 'Follow .arch-explorer/PROMPT.md' --allowedTools '*'" + exit 1 + } + ``` + + If it fails, STOP. The agent cannot change `CTX_DIR` for itself: + child shells and skill invocations inherit the parent Claude + process environment, which only the caller can control. Do not + proceed, do not run `ctx` commands, do not skip the check. +3. If phase is `bootstrap`: + - Run `ctx init`, confirm `.context/` exists. + - Then run `/ctx-architecture` (structural baseline). +4. If phase is `principal` or `frontier-*`: + - Run `/ctx-architecture` (add `principal` argument for principal phase). + - The skill will read existing artifacts and build on them. +5. If phase is `enriched`: + - Verify GitNexus is connected: call `mcp__gitnexus__list_repos`. + - Success = non-empty list returned with no error. - If GitNexus unavailable, log as `enriched-skipped` and advance - to `frontier-1` - - Run `/ctx-architecture-enrich` -5. If phase is a lens run (`lens-security`, etc.): + to `frontier-1`. + - Run `/ctx-architecture-enrich`. +6. If phase is a lens run (`lens-security`, etc.): - Run `/ctx-architecture` with lens focus prepended as instruction - (see lens table above for exact wording) + (see lens table above for exact wording). ### Step 4: Extract Results @@ -132,7 +215,7 @@ Update `.arch-explorer/manifest.json`: Append to `.arch-explorer/run-log.md`: ```markdown -## 2026-04-07T14:00:00Z — ctx — principal +## 2026-04-07T14:00:00Z / ctx / principal **Phase:** principal **Convergence:** 0.45 -> 0.72 @@ -186,7 +269,7 @@ A repo is considered "explored" when ANY of these is true: across consecutive runs) - All 5 lenses have been applied - Convergence score is `null` after 3 attempts (artifacts aren't being - generated properly — log warning and move on) + generated properly; log warning and move on) When a repo is explored, advance `current_repo_index` in the manifest. @@ -200,24 +283,62 @@ When every repo has reached its stopping condition, print: - portal: 0.87 convergence, 6 runs, 3 lenses ... ``` +~~~ + +--- -## Invocation Patterns +## Invocation + +The caller MUST set `CTX_DIR` to the sub-repo the agent will work on. +The agent verifies this at Step 3.2 and stops if it does not match. +The wrapper reads the manifest to pick the current sub-repo, then +launches `claude` with `CTX_DIR` pinned to that sub-repo's `.context/`. **Single run (safest for quota):** + ```bash cd ~/WORKSPACE -claude --print "Follow PROMPT.md" --allowedTools '*' +REPO=$(jq -r '.repos[.current_repo_index]' .arch-explorer/manifest.json) +CTX_DIR="$PWD/$REPO/.context" \ + claude --print "Follow .arch-explorer/PROMPT.md" --allowedTools '*' ``` **Batch of N runs:** + ```bash cd ~/WORKSPACE for i in $(seq 1 5); do - claude --print "Follow PROMPT.md" --allowedTools '*' - echo "--- Run $i complete ---" + REPO=$(jq -r '.repos[.current_repo_index]' .arch-explorer/manifest.json) + CTX_DIR="$PWD/$REPO/.context" \ + claude --print "Follow .arch-explorer/PROMPT.md" --allowedTools '*' + echo "--- Run $i complete (repo: $REPO) ---" done ``` **Resume after interruption:** -Just run again. The manifest tracks state; the agent picks up where -it left off. + +Just run the wrapper again. The manifest tracks state; the agent picks +up where it left off. `CTX_DIR` is recomputed from the manifest on +each invocation, so the right sub-repo is always bound. + +## Tips + +- **Start small**: list 1-2 repos in the manifest first. Add more + once you're confident in the output quality. +- **GitNexus is optional**: the enrichment phase is skipped + gracefully if GitNexus isn't connected. You still get structural + and principal analysis. +- **Review between batches**: check the run-log and generated + artifacts between batch runs. The agent is additive-only, but + early course correction saves wasted runs. +- **Lens runs are the payoff**: the first three phases build the + map; lens runs find the interesting things (security gaps, + performance cliffs, stability risks). + +## History + +- 2026-04-07: Original prompt created as `hack/agents/architecture-explorer.md`. +- 2026-04-16: Moved to docs as a runbook for discoverability. +- 2026-04-20: Added `CTX_DIR` verification at Step 3.2 and per-invocation + `CTX_DIR` binding in the wrapper, so the agent writes artifacts to the + sub-repo's `.context/` instead of the inherited workspace one. diff --git a/docs/operations/runbooks/backup-strategy.md b/docs/operations/runbooks/backup-strategy.md new file mode 100644 index 000000000..3dbe5adf2 --- /dev/null +++ b/docs/operations/runbooks/backup-strategy.md @@ -0,0 +1,125 @@ +# Backup Strategy + +`ctx backup` was removed. File-level backup is not `ctx`'s +responsibility; your OS or a dedicated backup tool handles it +better and without locking you into a specific mount strategy. + +This runbook explains what to back up, how `ctx hub` reduces the +surface, and what options exist for the rest. + +## What To Back Up + +Per project: + +- `.context/`: all context files, journal, state, scratchpad. +- `.claude/`: Claude Code settings, hooks, skills specific to the + project. Skip this entry when it lives in git; the repo is the + backup. + +Per user: + +- `~/.ctx/`: global config, the encryption key (`~/.ctx/.ctx.key`), + hub data directory (if running a local hub). + +## How Hub Reduces Backup Needs + +`ctx hub` replicates the knowledge surface across machines: + +- `DECISIONS.md` +- `LEARNINGS.md` +- `CONVENTIONS.md` +- `CONSTITUTION.md` +- `ARCHITECTURE.md` +- Task items promoted to hub + +If you run `ctx hub` (as a server or by subscribing to someone +else's), the data that matters most survives losing any single +machine. + +## What Hub Does *Not* Replicate + +Hub is not a file-level backup. The following still live only on +the machine that produced them: + +- Journal entries (`.context/journal/*.md`) +- Runtime state (`.context/state/*`) +- Session event log (`.context/events.jsonl`) +- Scratchpad (`.context/.pad`) +- Encrypted notify/webhook config (`.context/.notify.enc`) +- The encryption key itself (`~/.ctx/.ctx.key`) + +If you need those to survive a disk failure, use a file-level +backup. + +## Example Strategies + +### 1. cron + rsync to NAS or External Drive + +```cron +# Daily at 03:00, mirror ~/WORKSPACE and ~/.ctx to NAS +0 3 * * * rsync -a --delete \ + --exclude='node_modules' \ + --exclude='dist' \ + --exclude='.context/state' \ + ~/WORKSPACE/ /mnt/nas/backup/workspace/ +0 3 * * * rsync -a --delete ~/.ctx/ /mnt/nas/backup/ctx-global/ +``` + +Adjust excludes for the trash you don't want to back up. The +`.context/state/` dir is ephemeral per-session; skip it. + +### 2. cron + cp to a Cloud-Synced Directory + +iCloud Drive, Dropbox, or any directory watched by a sync client: + +```cron +0 3 * * * cp -a ~/WORKSPACE/some-project/.context \ + ~/CloudDrive/ctx-backups/some-project/$(date +\%Y-\%m-\%d) +``` + +Daily snapshots, cloud provider handles the replication. + +### 3. Time Machine (macOS) + +If you already run Time Machine, ensure `~/WORKSPACE` and `~/.ctx` +are not in its exclusion list. Time Machine handles versioning; +you get point-in-time recovery for free. + +### 4. Borg or restic for Versioned Backups + +For deduplicated, versioned, encrypted backups: + +```bash +# Borg init (once) +borg init --encryption=repokey /mnt/nas/borg-ctx + +# Daily backup +borg create /mnt/nas/borg-ctx::'ctx-{now}' \ + ~/WORKSPACE ~/.ctx \ + --exclude '*/node_modules' \ + --exclude '*/.context/state' +``` + +Use `restic` if you prefer S3-compatible targets. + +## When You Still Need File-Level Backup Even With Hub + +- **Journal**: session histories are local-only until exported. +- **Scratchpad**: private notes, encrypted locally. +- **Encryption key**: losing `~/.ctx/.ctx.key` means losing access + to every encrypted file in every project. +- **Non-hub projects**: projects that never called `ctx hub + register` have zero cross-machine persistence. + +For these, pick one strategy above and forget about it. + +## Why ctx No Longer Ships a Backup Command + +Backup is inherently environment-specific: SMB, NFS, S3, rsync, +Time Machine, Borg, restic. Every user has a different story. The +previous `ctx backup` picked SMB via GVFS, which was Linux-only and +narrow. Chasing mount strategies would never generalize. + +Hub is the right answer for the data `ctx` owns (knowledge). For +everything else, your OS or a dedicated backup tool is the right +layer. diff --git a/docs/operations/runbooks/breaking-migration.md b/docs/operations/runbooks/breaking-migration.md new file mode 100644 index 000000000..bde2048e3 --- /dev/null +++ b/docs/operations/runbooks/breaking-migration.md @@ -0,0 +1,124 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Breaking Migration +icon: lucide/arrow-right-left +--- + +![ctx](../../images/ctx-banner.png) + +# Breaking Migration Guide + +Template for upgrading across breaking CLI renames or behavior +changes. Use this as a starting point when writing migration +notes for a specific release, or hand it to your agent as +context for generating release-specific guidance. + +**When to use**: When a release includes breaking changes +(command renames, removed flags, changed defaults) that require +user action. + +**Companion**: [Upgrade guide](../upgrading.md) covers the +general upgrade flow. This runbook covers the breaking-change +specifics. + +--- + +## Step 1: Identify What Changed + +Ask your agent to diff the CLI surface between the old and new +version: + +``` +Compare the CLI command surface between the previous release tag +and HEAD. For each change, categorize as: renamed, removed, +new, or changed-behavior. Include old and new command signatures. +``` + +Or use the `/_ctx-command-audit` skill after the rename. + +## Step 2: Regenerate Infrastructure + +```bash +# Install the new binary +make build && sudo make install + +# Regenerate CLAUDE.md and permissions +ctx init --force --merge +``` + +`--merge` preserves your knowledge files (TASKS.md, DECISIONS.md, +etc.) while regenerating infrastructure (permissions, CLAUDE.md +managed sections). + +## Step 3: Update the Plugin + +``` +/plugin -> select ctx -> Update now +``` + +Or, if using a local clone: + +```bash +make plugin-reload +# restart Claude Code +``` + +## Step 4: Update Personal Scripts + +Search your scripts and aliases for old command names: + +```bash +# Example: find references to old command names +grep -r "ctx old-command" ~/scripts/ ~/.zshrc ~/.bashrc +``` + +Replace with the new names per the changelog. + +## Step 5: Update Hook Configs + +If you have custom hooks in `.claude/settings.local.json` that +reference ctx commands, update them: + +```bash +jq '.hooks' .claude/settings.local.json | grep "ctx " +``` + +## Step 6: Verify + +```bash +ctx status # context files intact +ctx drift # no broken references +make test # if you're a contributor +``` + +--- + +## Writing Release-Specific Migration Notes + +When preparing a release with breaking changes, create a section +in the release notes using this template: + +```markdown +## Breaking Changes + +### `old-command` renamed to `new-command` + +**What changed**: `ctx old-command` is now `ctx new-command`. +The old name is removed (no deprecation alias). + +**Action required**: +1. Run `ctx init --force --merge` to update CLAUDE.md +2. Update any scripts referencing `ctx old-command` +3. Update hook configs if applicable + +**Why**: [brief rationale for the rename] +``` + +Repeat for each breaking change. Users should be able to follow +the notes mechanically without needing to understand the +codebase. diff --git a/hack/runbooks/codebase-audit.md b/docs/operations/runbooks/codebase-audit.md similarity index 81% rename from hack/runbooks/codebase-audit.md rename to docs/operations/runbooks/codebase-audit.md index 52864504d..0d23671e4 100644 --- a/hack/runbooks/codebase-audit.md +++ b/docs/operations/runbooks/codebase-audit.md @@ -1,19 +1,37 @@ -# Codebase Audit Runbook +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Codebase Audit +icon: lucide/search-code +--- + +![ctx](../../images/ctx-banner.png) -**When to use**: Before a release, after a long YOLO sprint, quarterly, or when -planning the next phase of work. Generates analysis reports that feed into task -planning. +# Codebase Audit -**Frequency**: Periodic (not a daily workflow). +A structured audit of the codebase: dead code, magic strings, +documentation drift, security surface, and roadmap opportunities. + +**When to run**: Before a release, after a long YOLO sprint, +quarterly, or when planning the next phase of work. **Time**: ~15-30 minutes with a team of agents. --- -## Prompt +## How to Use This Runbook -Paste or adapt the following into a Claude Code session. The repo should be in -a clean git state (`git stash` or commit first). +Start a Claude Code session with a clean git state +(`git stash` or commit first). Paste or adapt the prompt below. +The agent does the analysis; you triage the findings. + +--- + +## Prompt ``` I want you to create an agent team to audit this codebase. Save each report as @@ -106,13 +124,9 @@ it suits. Ground suggestions in actual project patterns, not generic advice. - **ideas/ is gitignored**: reports saved there won't be committed. Move specific findings to TASKS.md, DECISIONS.md, or LEARNINGS.md to persist them. -## Changes from Original +## History -The original prompt (2026-02-08) was improved: -- Specified read-only agents to prevent accidental code changes -- Added report structure template for consistency across analyses -- Scoped security review to CLI-relevant threats instead of generic OWASP -- Added maintainability thresholds (>80 lines, >5 cases) to avoid style nitpicks -- Fixed duplicate numbering (two #6s) and added analysis #8 -- Added concrete output expectations for each analysis -- Removed defensive "we can revert" language in favor of preventive controls +- 2026-02-08: Original prompt created after a codebase audit sprint. +- 2026-02-17: Improved with read-only agents, report structure template, + CLI-scoped security review, and maintainability thresholds. +- 2026-04-16: Moved from `hack/runbooks/` to `docs/operations/runbooks/`. diff --git a/hack/runbooks/docs-semantic-audit.md b/docs/operations/runbooks/docs-semantic-audit.md similarity index 66% rename from hack/runbooks/docs-semantic-audit.md rename to docs/operations/runbooks/docs-semantic-audit.md index b15c45a26..735d80fc6 100644 --- a/hack/runbooks/docs-semantic-audit.md +++ b/docs/operations/runbooks/docs-semantic-audit.md @@ -1,31 +1,43 @@ -# Documentation Semantic Audit Runbook +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Docs Semantic Audit +icon: lucide/book-open-check +--- + +![ctx](../../images/ctx-banner.png) + +# Documentation Semantic Audit -**When to use**: Before a release, after adding several new pages, when the -site feels sprawling, or when you suspect narrative gaps. This audit finds -structural problems that linters and link checkers cannot: weak pages that -should be merged, heavy pages that should be split, missing cross-links, -and narrative arcs that don't land. +Find structural problems that linters and link checkers cannot: +weak pages that should be merged, heavy pages that should be split, +missing cross-links, and narrative arcs that don't land. -**Frequency**: Per release cycle, or when the docs surface area grows by -more than 3-4 pages. +**When to run**: Before a release, after adding several new pages, +when the site feels sprawling, or when you suspect narrative gaps. -**Time**: ~20-40 minutes with an LLM session. +**Time**: ~20-40 minutes with an agent session. --- ## Why This Is a Runbook -These judgments are inherently subjective and context-dependent. A page is -"weak" relative to its neighbors; a narrative arc only matters if the docs -intend to tell a story. Deterministic tools (broken-link checkers, word -counters) can't do this. An LLM reading the full doc set can. +These judgments are inherently subjective and context-dependent. +A page is "weak" relative to its neighbors; a narrative arc only +matters if the docs intend to tell a story. Deterministic tools +(broken-link checkers, word counters) can't do this. An LLM +reading the full doc set can. --- ## Prompt -Paste or adapt the following into a Claude Code session. The agent needs -read access to `docs/` and `site/` (for nav structure). +Paste or adapt the following into a Claude Code session. The agent +needs read access to `docs/` and the site nav structure. ``` Read every file under docs/ (including docs/blog/ and docs/recipes/). @@ -72,8 +84,8 @@ For each: source page, anchor text, suggested link target. ## 4. Narrative Gaps -The docs should tell a coherent story: problem → install → first session -→ daily workflow → advanced patterns → contributing. Look for: +The docs should tell a coherent story: problem -> install -> first session +-> daily workflow -> advanced patterns -> contributing. Look for: - Gaps in the progression (e.g., no bridge from "first session" to "daily habits") - Blog posts that introduce concepts the reference docs don't cover @@ -107,13 +119,15 @@ End with a prioritized action list: what to fix first. ## After the Audit -1. **Triage findings** — not everything needs fixing. Focus on high severity. -2. **Merge weak pages first** — fewer pages is almost always better. -3. **Add cross-links** — cheapest improvement, highest reader impact. -4. **File split decisions in DECISIONS.md** — page splits are architectural. +1. **Triage findings**: not everything needs fixing. Focus on high severity. +2. **Merge weak pages first**: fewer pages is almost always better. +3. **Add cross-links**: cheapest improvement, highest reader impact. +4. **File split decisions in DECISIONS.md**: page splits are architectural. 5. **Regenerate the site** and spot-check nav after structural changes. ## History -- 2026-02-17: Created after merging `docs/re-explaining.md` into `docs/about.md`, - which surfaced the pattern of weak standalone pages that dilute rather than add. +- 2026-02-17: Created after merging `docs/re-explaining.md` into + `docs/about.md`, which surfaced the pattern of weak standalone + pages that dilute rather than add. +- 2026-04-16: Moved from `hack/runbooks/` to `docs/operations/runbooks/`. diff --git a/docs/operations/runbooks/hub-deployment.md b/docs/operations/runbooks/hub-deployment.md new file mode 100644 index 000000000..f2c231b93 --- /dev/null +++ b/docs/operations/runbooks/hub-deployment.md @@ -0,0 +1,164 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Hub Deployment +icon: lucide/server +--- + +![ctx](../../images/ctx-banner.png) + +# Hub Deployment + +Linear runbook for setting up a ctx Hub for yourself or a team. +Consolidates pieces currently scattered across hub recipes and +operations docs. + +**When to use**: First-time hub setup, or when onboarding a new +team onto an existing hub. + +**Prerequisites**: `ctx` binary installed, network connectivity +between hub and clients. + +**Companion docs**: + +- [Hub overview](../../recipes/hub-overview.md): what the hub + is and is not +- [Hub operations](../hub.md): data directory, systemd, + backup, monitoring +- [Hub failure modes](../hub-failure-modes.md): what can go wrong + +--- + +## Step 1: Start the Hub + +=== "Quick Start (foreground)" + + ```bash + ctx hub start + ``` + +=== "Production (systemd)" + + See [Hub Operations: Systemd Unit](../hub.md#systemd-unit) + for the full unit file. + + ```bash + sudo systemctl enable --now ctx-hub + ``` + +The hub creates `admin.token` on first start. Save this token; +it is the only way to register clients. + +## Step 2: Generate the Admin Token + +On first start, the hub writes `admin.token` to the data +directory (default `~/.ctx/hub-data/`): + +```bash +cat ~/.ctx/hub-data/admin.token +``` + +This token has full admin privileges. Keep it secret. + +## Step 3: Register Clients + +For each client (person or machine) that will connect: + +```bash +# On the hub machine +ctx hub register --name "volkan-laptop" --admin-token +``` + +This returns a client token. Distribute it securely to the client. + +## Step 4: Connect Clients + +On each client machine: + +```bash +ctx connect --token +``` + +Verify the connection: + +```bash +ctx connection status +``` + +## Step 5: Verify Sync + +Push a test entry from one client and verify it arrives: + +```bash +# Client A +ctx add learning "Hub sync test" --context "Verifying hub setup" + +# Client B (after a moment) +ctx status # should show the new learning +``` + +## Step 6: Configure Backup + +Set up regular backups of the hub data directory. See +[Hub Operations: Backup and Restore](../hub.md#backup-and-restore). + +Minimum: + +```bash +# Add to cron +0 */6 * * * cp ~/.ctx/hub-data/entries.jsonl ~/backups/entries-$(date +\%F).jsonl +``` + +## Step 7: Configure TLS (When Available) + +!!! note "Coming Soon" + TLS support is planned (H-01/H-02). Until then, run the hub + on a trusted network or behind a reverse proxy with TLS + termination. + +--- + +## Team Onboarding Checklist + +When adding a new team member to an existing hub: + +- [ ] Generate a client token (`ctx hub register --name ""`) +- [ ] Share the token and hub address securely +- [ ] Have them run `ctx connect --token ` +- [ ] Verify with `ctx connection status` +- [ ] Point them to the [Hub Getting Started](../../recipes/hub-getting-started.md) recipe + +## Troubleshooting + +### "Connection Refused" + +The hub isn't running or the port is wrong. Check: + +```bash +ctx hub status # on the hub machine +ss -tlnp | grep 9900 # default port +``` + +### "Authentication Failed" + +The client token is wrong or was never registered. Re-register: + +```bash +ctx hub register --name "" --admin-token +``` + +### Entries Not Syncing + +Check that the client is listening: + +```bash +ctx connection status +``` + +If connected but not syncing, check the hub logs for sequence +mismatch errors. See +[Hub Failure Modes](../hub-failure-modes.md) for details. diff --git a/docs/operations/runbooks/new-contributor.md b/docs/operations/runbooks/new-contributor.md new file mode 100644 index 000000000..a0c9d4d78 --- /dev/null +++ b/docs/operations/runbooks/new-contributor.md @@ -0,0 +1,150 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: New Contributor +icon: lucide/user-plus +--- + +![ctx](../../images/ctx-banner.png) + +# New Contributor Onboarding + +Step-by-step onboarding sequence for new contributors. Consolidates +setup instructions currently scattered across the README, +[contributing guide](../../home/contributing.md), and setup docs. + +**When to use**: First-time contributor setup, or when verifying +your development environment after a major upgrade. + +--- + +## Step 1: Clone the Repository + +```bash +git clone https://github.com/ActiveMemory/ctx.git +cd ctx +``` + +Or fork first on GitHub, then clone your fork. + +## Step 2: Initialize Context + +```bash +ctx init +``` + +This creates the `.context/` directory with knowledge files and +the `.claude/` directory with agent configuration. If `ctx` is +not yet installed, proceed to Step 3 first, then come back. + +## Step 3: Build and Install + +```bash +make build +sudo make install +``` + +Verify: + +```bash +ctx --version +``` + +## Step 4: Install the Plugin (Claude Code Users) + +If you use Claude Code, install the plugin from your local clone +so skills and hooks reflect your working tree: + +1. Launch `claude` +2. Type `/plugin` and press Enter +3. Select **Marketplaces** -> **Add Marketplace** +4. Enter the absolute path to your clone (e.g., `~/WORKSPACE/ctx`) +5. Back in `/plugin`, select **Install** and choose `ctx` + +Verify: + +```bash +claude /plugin list # should show ctx +``` + +See [Contributing: Install the Plugin](../../home/contributing.md#3-install-the-plugin-from-your-local-clone) +for details on cache clearing. + +## Step 5: Switch to Dev Profile + +```bash +ctx config switch dev +``` + +This enables verbose logging and notify events (useful during +development). + +## Step 6: Verify Hooks + +Start a Claude Code session and check that hooks fire: + +```bash +claude +``` + +You should see ctx session hooks (ceremonies reminder, context +loading) on session start. If not, check that the plugin is +installed correctly (Step 4). + +## Step 7: Run Your First Session + +In Claude Code: + +``` +/ctx-status +``` + +This should show context file health, active tasks, and recent +decisions. If it works, your setup is complete. + +## Step 8: Verify Context Persistence + +End the session and start a new one: + +``` +/ctx-remember +``` + +The agent should recall what happened in the previous session. +This confirms that context persistence is working end-to-end. + +## Step 9: Run Tests + +```bash +make test # unit tests +make audit # full check: fmt + vet + lint + drift + docs + test +``` + +All tests should pass with a clean clone. + +--- + +## Quick Reference + +| Task | Command | +|------|---------| +| Build | `make build` | +| Install | `sudo make install` | +| Test | `make test` | +| Full audit | `make audit` | +| Rebuild docs site | `make site` | +| Serve docs locally | `make site-serve` | +| Clear plugin cache | `make plugin-reload` | +| Switch config profile | `ctx config switch dev` | + +## Next Steps + +- Read the [contributing guide](../../home/contributing.md) + for project layout, code style, and PR process +- Check [TASKS.md](https://github.com/ActiveMemory/ctx/blob/main/.context/TASKS.md) + for open work items +- Ask `/ctx-next` for suggested work diff --git a/docs/operations/runbooks/plugin-release.md b/docs/operations/runbooks/plugin-release.md new file mode 100644 index 000000000..46b1d8f75 --- /dev/null +++ b/docs/operations/runbooks/plugin-release.md @@ -0,0 +1,153 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Plugin Release +icon: lucide/puzzle +--- + +![ctx](../../images/ctx-banner.png) + +# Plugin Release + +Plugin-specific release procedure. The general +[release checklist](release-checklist.md) covers the full `ctx` +release; this runbook covers the plugin-specific steps that are +not part of that flow. + +**When to use**: When releasing plugin changes (new skills, hook +updates, permission changes) independently of a `ctx` binary +release, or as a sub-procedure within the full release. + +--- + +## What Ships in the Plugin + +The plugin lives at `internal/assets/claude/` and includes: + +| Component | Path | What it does | +|-----------|------|-------------| +| Skills | `internal/assets/claude/skills/` | User-facing `/ctx-*` slash commands | +| Hooks | `internal/assets/claude/hooks/` | Pre/post tool-use hooks | +| Plugin manifest | `internal/assets/claude/.claude-plugin/plugin.json` | Declares skills, hooks, version | +| Marketplace | `.claude-plugin/marketplace.json` | Points Claude Code to the plugin | + +## Step 1: Update hooks.json (If Hooks Changed) + +If you added, removed, or modified hooks: + +```bash +# Verify hook definitions match implementations +make audit +``` + +Check that `plugin.json` lists all hooks correctly. Missing +hooks silently fail to fire. + +## Step 2: Bump Version + +Update the version in three places: + +- `internal/assets/claude/.claude-plugin/plugin.json` +- `.claude-plugin/marketplace.json` (two fields) +- `editors/vscode/package.json` + `package-lock.json` + (if VS Code extension is affected) + +!!! tip "The Release Script Does This" + If you're running `make release`, the script bumps these + automatically from `VERSION`. Only bump manually if you're + releasing the plugin independently. + +## Step 3: Test Against a Fresh Install + +```bash +# Clear cached plugin +make plugin-reload + +# Restart Claude Code, then: +claude /plugin list # verify version +``` + +Test the critical paths: + +- [ ] `/ctx-status` works +- [ ] Session hooks fire (ceremonies, context loading) +- [ ] At least one user-facing skill works end-to-end +- [ ] Pre-tool-use hooks block when they should + +## Step 4: Test Against a Clean Project + +Create a temporary project to verify the plugin works outside +the ctx repo: + +```bash +mkdir /tmp/test-ctx-plugin && cd /tmp/test-ctx-plugin +git init +ctx init +claude # start a session, verify hooks fire +``` + +## Step 5: Verify Skill Count + +The plugin manifest declares all user-invocable skills. Verify +the count matches: + +```bash +# Count skills in plugin.json +jq '.skills | length' internal/assets/claude/.claude-plugin/plugin.json + +# Count skill directories +ls -d internal/assets/claude/skills/ctx-*/ | wc -l +``` + +These numbers should match (some skills are not user-invocable +and won't appear in both counts). + +## Step 6: Commit and Tag + +If releasing independently of a binary release: + +```bash +git add internal/assets/claude/ .claude-plugin/ +git commit -m "chore: release plugin v0.X.Y" +git tag plugin-v0.X.Y +git push origin main --tags +``` + +If part of a full release, the +[release checklist](release-checklist.md) handles this. + +--- + +## Troubleshooting + +### Skills Don't Appear After Update + +Claude Code caches plugin files aggressively: + +```bash +make plugin-reload # clears cache +# restart Claude Code +``` + +### Hooks Don't Fire + +Check that the hook is registered in `plugin.json` and that +the command it calls exists: + +```bash +jq '.hooks' internal/assets/claude/.claude-plugin/plugin.json +``` + +### Version Mismatch + +If `claude /plugin list` shows an old version after updating: + +```bash +make plugin-reload +# restart Claude Code +claude /plugin list # should show new version +``` diff --git a/docs/operations/runbooks/release-checklist.md b/docs/operations/runbooks/release-checklist.md new file mode 100644 index 000000000..389d055c4 --- /dev/null +++ b/docs/operations/runbooks/release-checklist.md @@ -0,0 +1,134 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Release Checklist +icon: lucide/clipboard-check +--- + +![ctx](../../images/ctx-banner.png) + +# Release Checklist + +The canonical pre-release sequence. This runbook ties together +the audits, tests, and release steps that are otherwise scattered +across docs and the operator's head. + +**When to run**: Before every release. No exceptions. + +**Companion**: The [`/_ctx-release`](../release.md) skill +automates the tag-and-push portion; this checklist covers +everything *before* and *after* that automation. + +--- + +## Pre-Release + +### 1. Run the Codebase Audit + +Use the [codebase audit runbook](codebase-audit.md) prompt with +your agent. Focus on analyses 1-4 (extractable patterns, +documentation drift, maintainability, security). Triage findings +into TASKS.md; anything blocking ships before the release. + +### 2. Run the Docs Semantic Audit + +Use the [docs semantic audit runbook](docs-semantic-audit.md) +prompt. Fix high-severity findings (weak pages, broken narrative +arcs). Medium-severity items can be deferred. + +### 3. Sanitize Permissions + +Follow the [sanitize permissions runbook](sanitize-permissions.md). +Clean up `.claude/settings.local.json` before it gets committed +as part of the release. + +### 4. Run the Full Test Suite + +```bash +make audit # fmt + vet + lint + drift + docs + test +make smoke # integration smoke tests +``` + +All tests must pass. No exceptions. + +### 5. Check Context Health + +```bash +ctx drift # broken references, stale patterns +ctx status # context file health +/ctx-link-check # dead links in docs +``` + +Fix anything flagged. + +### 6. Review TASKS.md + +Scan for incomplete tasks tagged as release-blocking. Either +finish them or explicitly defer with a reason in the task note. + +--- + +## Release + +### 7. Bump Version + +```bash +echo "0.X.0" > VERSION +git add VERSION +git commit -m "chore: bump version to 0.X.0" +``` + +### 8. Generate Release Notes + +In Claude Code: + +``` +/_ctx-release-notes +``` + +Review `dist/RELEASE_NOTES.md`. Ensure it captures all +user-visible changes. + +### 9. Cut the Release + +```bash +make release +``` + +Or in Claude Code: `/_ctx-release`. See +[Cutting a Release](../release.md) for the full step-by-step. + +--- + +## Post-Release + +### 10. Verify the GitHub Release + +- [ ] [GitHub Releases](https://github.com/ActiveMemory/ctx/releases) shows the new version +- [ ] All 6 binaries are attached +- [ ] SHA256 checksums are attached +- [ ] Release notes render correctly + +### 11. Update the Plugin Marketplace + +If the plugin version changed, verify the marketplace entry: + +```bash +claude /plugin list # shows updated version +``` + +### 12. Announce + +Post in the project's communication channels. Reference the +release notes. + +### 13. Clean Up + +```bash +rm dist/RELEASE_NOTES.md # consumed by the release script +git stash pop # if you stashed earlier +``` diff --git a/hack/runbooks/sanitize-permissions.md b/docs/operations/runbooks/sanitize-permissions.md similarity index 80% rename from hack/runbooks/sanitize-permissions.md rename to docs/operations/runbooks/sanitize-permissions.md index c47f9a072..46867201a 100644 --- a/hack/runbooks/sanitize-permissions.md +++ b/docs/operations/runbooks/sanitize-permissions.md @@ -1,19 +1,32 @@ -# Sanitize Permissions Runbook +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Sanitize Permissions +icon: lucide/shield-check +--- + +![ctx](../../images/ctx-banner.png) + +# Sanitize Permissions Manual procedure for cleaning up `.claude/settings.local.json`. The agent may analyze and recommend, but **you** make every edit. -## Why a Runbook, Not a Skill +## Why Manual, Not Automated `settings.local.json` controls what the agent can do without asking. An agent that can edit its own permission file is a self-escalation -vector — especially if the skill is auto-accepted. Keep this manual. +vector, especially if the skill is auto-accepted. Keep this manual. -## When to Run +**When to run**: After busy sessions where you clicked "Allow" many +times, weekly hygiene (pair with `ctx drift`), or before committing +`.claude/settings.local.json`. -- After busy sessions where you clicked "Allow" many times -- Weekly hygiene (pair with `ctx drift`) -- Before committing `.claude/settings.local.json` +--- ## Step 1: Snapshot @@ -53,6 +66,7 @@ Bash(/home/jose/WORKSPACE/ctx/ctx add decision "Use PostgreSQL" --context ...) ``` Signs of a one-off: + - Full absolute paths to specific files - Literal string arguments (not wildcards) - Very specific flag combinations @@ -103,7 +117,7 @@ While you're in here, also flag: | `Bash(curl:*)`, `Bash(wget:*)` | Arbitrary network access | | Any write to `.claude/` paths | Agent self-modification | -See the `ctx-permission-sanitize` skill SKILL.md for the full threat matrix. +See the `/ctx-permission-sanitize` skill for the full threat matrix. ## Step 5: Edit @@ -139,6 +153,13 @@ You can safely ask the agent to *analyze* the file: The agent can read and report. **You** do the edits. Do **not** add these to your allow list: + - `Skill(ctx-permission-sanitize)` - `Edit(.claude/settings.local.json)` - Any `Bash(...)` pattern that writes to `.claude/` + +## History + +- 2026-02-15: Created as manual-only procedure after deciding + against a self-modifying skill. +- 2026-04-16: Moved from `hack/runbooks/` to `docs/operations/runbooks/`. diff --git a/docs/operations/upgrading.md b/docs/operations/upgrading.md index 607a79068..69dbb0415 100644 --- a/docs/operations/upgrading.md +++ b/docs/operations/upgrading.md @@ -39,7 +39,7 @@ ctx init --force --merge # /plugin → select ctx → Update now (if using Claude Code) ``` -## What Changes Between Versions +## What Changes between Versions `ctx init` generates two categories of files: diff --git a/docs/recipes/activating-context.md b/docs/recipes/activating-context.md new file mode 100644 index 000000000..55bff28a8 --- /dev/null +++ b/docs/recipes/activating-context.md @@ -0,0 +1,216 @@ +--- +title: "Activating a Context Directory" +icon: lucide/plug-zap +--- + +![ctx](../images/ctx-banner.png) + +## The Problem + +You ran a `ctx` command and got: + +``` +Error: no context directory specified for this project +``` + +This means ctx doesn't know which `.context/` directory to operate +on. It will not guess, and it will not walk up from your current +working directory looking for one; that behavior was removed +deliberately, because silent inference was the source of several +bugs (stray agent-created directories, cross-project bleed-through, +webhook-route misrouting, sub-agent fragmentation). Every `ctx` +command requires you to declare the target directory explicitly. + +This page shows you the three ways to do that and when to use each. + +## TL;DR + +If the project has already been initialized and you just need to +bind it for your shell: + +```bash +eval "$(ctx activate)" +``` + +That's 95% of the time. Add it to `.zshrc` / `.bashrc` per project +with direnv, or run it once per terminal. + +## When You See the Error + +The exact error message depends on how many `.context/` directories +are visible from the current directory: + +### Zero Candidates + +``` +Error: no context directory specified for this project +``` + +Either you haven't initialized this project yet (run `ctx init`) +or you're in a directory that doesn't belong to a ctx-tracked +project. If you know the project lives elsewhere, use one of the +declaration methods below with its absolute path. + +### One Candidate + +``` +Error: no context directory specified; a likely candidate is at + /Users/you/repos/myproject/.context +``` + +ctx found a single `.context/` on the way up from here but won't +bind to it automatically. Run `eval "$(ctx activate)"` and ctx +will emit the `export` for the candidate. Or set `CTX_DIR` by hand. + +### Multiple Candidates + +``` +Error: no context directory specified; multiple candidates visible: + /Users/you/repos/myproject/.context + /Users/you/repos/myproject/packages/web/.context +``` + +You're inside nested projects. Pick the one you mean: + +```bash +ctx activate /Users/you/repos/myproject/.context +# …copy and paste the `export` line it prints, or wrap in eval: +eval "$(ctx activate /Users/you/repos/myproject/.context)" +``` + +## Three Ways to Declare + +### 1. `ctx activate` (Recommended for Shells) + +`ctx activate` emits a shell-native `export CTX_DIR=...` line to +stdout. Wrap it in `eval` and the binding takes effect for the +current shell: + +```bash +# Walk up from current dir and bind the single visible candidate: +eval "$(ctx activate)" + +# Bind a specific path explicitly: +eval "$(ctx activate /abs/path/to/.context)" + +# Clear the binding: +eval "$(ctx deactivate)" +``` + +`ctx activate` validates paths strictly: the target must exist, be +a directory, and contain at least one canonical context file +(`CONSTITUTION.md` or `TASKS.md`). It refuses to emit for multiple +upward candidates; pick one explicitly in that case. + +Under the hood, the emitted line is just: + +```bash +export CTX_DIR='/abs/path/to/.context' +``` + +So you can copy it into your `.zshrc` / `.bashrc` if you want the +binding permanent for a given shell setup. Better: use +[direnv](https://direnv.net/) with a per-project `.envrc`. + +### 2. `CTX_DIR` Env Var + +If you already know the path, export it directly: + +```bash +export CTX_DIR=/abs/path/to/.context +ctx status +``` + +`CTX_DIR` is the same variable `ctx activate` writes; `activate` +is just a convenience that figures out the path for you. + +### 3. Inline One-Shot + +For one-shot commands (CI jobs, scripts, debugging a specific +project without changing your shell state), prefix the binding +inline: + +```bash +CTX_DIR=/abs/path/to/.context ctx status +``` + +This binds `CTX_DIR` for that invocation only. + +`CTX_DIR` must be an absolute path with `.context` as its basename. +Relative paths and other names are rejected on first use; the +basename guard catches the common footgun +(`export CTX_DIR=$(pwd)`) before stray writes can leak to the +project root. + +## For CI and Scripts + +Do not rely on shell activation in automated flows. Set `CTX_DIR` +explicitly at the top of the script: + +```bash +#!/usr/bin/env bash +set -euo pipefail + +export CTX_DIR="$GITHUB_WORKSPACE/.context" +ctx status +ctx drift +``` + +## For Claude Code Users + +The ctx plugin's hooks are generated with +`CTX_DIR="$CLAUDE_PROJECT_DIR/.context"` prefixed to each command, +so hook-driven ctx invocations resolve correctly without any +per-session setup. You only need to activate manually when running +`ctx` yourself in a terminal. + +## One Project, One `.context/` + +The context directory is not a free-floating bag of files. It is +pinned to a project by contract: **`filepath.Dir(ContextDir())` is +the project root.** That parent directory is what `ctx sync`, +`ctx drift`, and the memory-drift hook scan for code, secret files, +and `MEMORY.md` respectively. + +The practical consequences: + +- **Don't share one `.context/` across multiple projects.** It holds + per-project journals, per-session state, and per-project secrets. + Pointing two codebases at the same directory corrupts all three. +- **If you want to share knowledge** (CONSTITUTION, CONVENTIONS, + ARCHITECTURE) across projects, use `ctx hub`. It cherry-picks + entries at the right granularity and keeps the per-project bits + where they belong. +- **The `CTX_DIR` you activate is implicitly a project-root + declaration.** Setting `CTX_DIR=/weird/place/.context` means + you're telling ctx the project root is `/weird/place/`. That's + your call to make; ctx does not police it. + +### Recommended Layout + +``` +~/WORKSPACE/my-to-do-list + ├── .git + ├── .context ← owned by this project; do not share + ├── ideas + │ └── ... + ├── Makefile + ├── Makefile.ctx + └── specs + └── ... +``` + +`.context/` sits at the project root, next to `.git`. `ctx activate` +binds to it; every ctx subsystem reads the project from its parent. + +## Why Not Walk Up Automatically? + +Nested projects, submodules, rogue agent-created `.context/` +directories, and sub-agent sessions all produced silent misrouting +under the old walk-up model. See the +[explicit-context-dir spec](https://github.com/ActiveMemory/ctx/blob/main/specs/explicit-context-dir.md) +and [the analysis doc](https://github.com/ActiveMemory/ctx/blob/main/specs/context-resolution-analysis.md) +for the full reasoning. + +The short version: ctx decided to stop guessing and require the +caller to declare. Every other decision flows from there. diff --git a/docs/recipes/architecture-deep-dive.md b/docs/recipes/architecture-deep-dive.md index 258869892..272f92ea5 100644 --- a/docs/recipes/architecture-deep-dive.md +++ b/docs/recipes/architecture-deep-dive.md @@ -8,8 +8,8 @@ title: Architecture Deep Dive icon: lucide/layers --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ## The Problem @@ -32,7 +32,7 @@ where it silently fails. Most teams stop at the first pass. ``` Each pass builds on the previous one. Run them in order. The -output accumulates in `.context/` — each pass reads the prior +output accumulates in `.context/`; each pass reads the prior artifacts and extends them. ## Commands and Skills Used @@ -47,7 +47,7 @@ artifacts and extends them. ## The Workflow -### Pass 1: Map what exists +### Pass 1: Map What Exists ```text /ctx-architecture @@ -55,15 +55,15 @@ artifacts and extends them. Produces: -- **ARCHITECTURE.md** — succinct project map (< 4000 tokens), +- **ARCHITECTURE.md**: succinct project map (< 4000 tokens), loaded at every session start -- **DETAILED_DESIGN*.md** — deep per-module reference with +- **DETAILED_DESIGN*.md**: deep per-module reference with exported API, data flow, danger zones, extension points -- **CHEAT-SHEETS.md** — lifecycle flow diagrams -- **map-tracking.json** — coverage state with confidence scores +- **CHEAT-SHEETS.md**: lifecycle flow diagrams +- **map-tracking.json**: coverage state with confidence scores This pass forces deep code reading. No shortcuts, no code -intelligence tools — the agent reads every module it analyzes. +intelligence tools; the agent reads every module it analyzes. That forced reading is what makes the subsequent passes useful. **When to run**: First time on a codebase, or after significant @@ -77,7 +77,7 @@ dependencies). /ctx-architecture principal ``` -### Pass 2: Enrich with code intelligence +### Pass 2: Enrich with Code Intelligence ```text /ctx-architecture-enrich @@ -91,7 +91,7 @@ graph-backed data from GitNexus: - Domain clustering validation - Registration site discovery -This pass does not replace reading — it quantifies what reading +This pass does not replace reading; it quantifies what reading found. If Pass 1 says "module X depends on module Y," Pass 2 says "module X has 47 callers in module Y, and changing function Z would affect 12 downstream consumers." @@ -101,7 +101,7 @@ confidence for refactoring decisions or risk assessment. **Requires**: GitNexus MCP server connected. -### Pass 3: Hunt for failure modes +### Pass 3: Hunt for Failure Modes ```text /ctx-architecture-failure-analysis @@ -127,7 +127,7 @@ challenge phase attempts to disprove each finding before it is accepted. Findings carry a confidence level (High/Medium/Low) and explicit risk score. -Produces **DANGER-ZONES.md** — a ranked inventory of findings +Produces **DANGER-ZONES.md**, a ranked inventory of findings split into Critical and Elevated tiers. **When to run**: Before releases, after major refactors, when @@ -152,7 +152,7 @@ new files. ## Tips - **Run Pass 1 with focus areas** if the codebase is large. - The skill asks what to go deep on — name the modules you're + The skill asks what to go deep on, so name the modules you're about to change. - **You don't need all three passes every time.** Pass 1 is the foundation. Pass 2 and 3 are for when you need @@ -167,7 +167,7 @@ new files. ## See Also *See also: [Detecting and Fixing Context Drift](context-health.md) -— keep architecture artifacts fresh between deep-dive sessions.* +to keep architecture artifacts fresh between deep-dive sessions.* *See also: [Detecting and Fixing Context Drift](context-health.md) -— structural checks that complement architecture analysis.* +for structural checks that complement architecture analysis.* diff --git a/docs/recipes/autonomous-loops.md b/docs/recipes/autonomous-loops.md index 78281dc99..d3dd5d8d8 100644 --- a/docs/recipes/autonomous-loops.md +++ b/docs/recipes/autonomous-loops.md @@ -375,7 +375,7 @@ The self-check becomes the trigger: after meaningful work, the agent evaluates whether the context files reflect reality and updates them immediately if they do not. -### What the Agent Does Proactively Between Iterations +### What the Agent Does Proactively between Iterations At milestones within an iteration, the agent persists without waiting for instructions: diff --git a/docs/recipes/claude-code-permissions.md b/docs/recipes/claude-code-permissions.md index 5634e5d46..1ac9825be 100644 --- a/docs/recipes/claude-code-permissions.md +++ b/docs/recipes/claude-code-permissions.md @@ -214,7 +214,7 @@ Project-local hooks (not part of the plugin) catch regex edge cases: ## The Maintenance Workflow -### After busy sessions +### After Busy Sessions Permissions accumulate fastest during debugging and exploration sessions. After a session where you clicked "*Allow*" many times: @@ -226,8 +226,8 @@ After a session where you clicked "*Allow*" many times: * Commands with literal string arguments, * Entries that duplicate an existing wildcard. -See [`hack/runbooks/sanitize-permissions.md`](https://github.com/ActiveMemory/ctx/blob/main/hack/runbooks/sanitize-permissions.md) -for a step-by-step runbook. +See the [Sanitize Permissions runbook](../operations/runbooks/sanitize-permissions.md) +for a step-by-step procedure. ### Periodically @@ -245,14 +245,14 @@ Run `/ctx-permission-sanitize` to catch security issues: * Overly broad permissions * Injection vectors -### When adding new skills +### When Adding New Skills If you create a custom `ctx-*` skill, add its `Skill()` entry to the allowlist manually. `ctx init` only populates the default permissions: It won't pick up custom skills. -### Golden image snapshots +### Golden Image Snapshots If manual cleanup is too tedious, use a **golden image** to automate it: @@ -304,5 +304,5 @@ permission baselines for reproducible setups. * [Setting Up ctx Across AI Tools](multi-tool-setup.md): full setup recipe including `settings.local.json` creation * [Context Health](context-health.md): keeping `.context/` files accurate -* [`hack/runbooks/sanitize-permissions.md`](https://github.com/ActiveMemory/ctx/blob/main/hack/runbooks/sanitize-permissions.md): - manual cleanup runbook +* [Sanitize Permissions runbook](../operations/runbooks/sanitize-permissions.md): + manual cleanup procedure diff --git a/docs/recipes/configuration-profiles.md b/docs/recipes/configuration-profiles.md index e885a69ad..7d0630c6d 100644 --- a/docs/recipes/configuration-profiles.md +++ b/docs/recipes/configuration-profiles.md @@ -22,7 +22,7 @@ normal sessions. --- -## How it works +## How It Works The `ctx` repo ships two source profiles committed to git: @@ -37,7 +37,7 @@ is always a clean snapshot of one of the two sources. --- -## Switching profiles +## Switching Profiles ```bash # Switch to dev (verbose logging, notifications) @@ -58,7 +58,7 @@ in `.ctxrc`: present means dev, absent means base. --- -## Checking the active profile +## Checking the Active Profile ```bash ctx config status @@ -74,7 +74,7 @@ active: none (.ctxrc does not exist) --- -## Typical workflow +## Typical Workflow 1. **Start of a debugging session**: switch to dev for verbose logging and webhook notifications so you can trace hook @@ -95,27 +95,27 @@ active: none (.ctxrc does not exist) --- -## Customizing profiles +## Customizing Profiles Edit the source files directly: -- **`.ctxrc.dev`** -- add any `.ctxrc` keys you want active during +- **`.ctxrc.dev`**: add any `.ctxrc` keys you want active during development (e.g., `log_level: debug`, `notify.events`, `notify.webhook_url`). -- **`.ctxrc.base`** -- keep this minimal. It represents your +- **`.ctxrc.base`**: keep this minimal. It represents your "production" defaults. After editing a source file, re-run `ctx config switch ` to apply the changes to the working copy. -!!! tip "Commit your profiles" +!!! tip "Commit Your Profiles" Both `.ctxrc.base` and `.ctxrc.dev` should be committed to git so team members share the same profile definitions. The working copy `.ctxrc` stays gitignored. --- -## Using the skill +## Using the Skill In a Claude Code session, say any of: diff --git a/docs/recipes/context-health.md b/docs/recipes/context-health.md index c29c0c01b..5b9f2ba38 100644 --- a/docs/recipes/context-health.md +++ b/docs/recipes/context-health.md @@ -143,7 +143,7 @@ How's the context looking? This turns maintenance from a scheduled chore into a conversation that happens **when** it matters. -### Step 3: Real-Time Detection During Work +### Step 3: Real-Time Detection during Work Agents *can* notice drift while working: When a mismatch is directly in the path of their current task. If an agent reads `ARCHITECTURE.md` to find where @@ -219,7 +219,7 @@ ctx doctor # everything in one pass ctx doctor --json # machine-readable for scripting ``` -!!! tip "Use `/ctx-doctor` Too" +!!! tip "Use `/ctx-doctor` Too" For agent-driven diagnosis that adds semantic analysis on top of the structural checks, use `/ctx-doctor`. diff --git a/docs/recipes/customizing-hook-messages.md b/docs/recipes/customizing-hook-messages.md index 2bcced119..f708f14a0 100644 --- a/docs/recipes/customizing-hook-messages.md +++ b/docs/recipes/customizing-hook-messages.md @@ -176,14 +176,13 @@ manually. Not all messages are equal. The `list` command shows each message's category: -### Customizable (17 messages) +### Customizable (17 Messages) Messages that are **opinions**: project-specific wording that benefits from customization. These are the primary targets for override. | Hook | Variant | Description | |---------------------|------------|------------------------------------------| -| check-backup-age | warning | Backup staleness warning | | check-freshness | stale | Technology constant freshness warning | | check-ceremonies | both | Both ceremonies missing | | check-ceremonies | remember | Start-of-session ceremony | @@ -200,7 +199,7 @@ from customization. These are the primary targets for override. | post-commit | nudge | Post-commit context capture | | qa-reminder | gate | Pre-commit QA gate | -### ctx-specific (10 messages) +### ctx-Specific (10 Messages) Messages specific to ctx's own development workflow. You *can* customize them, but `edit` will warn you first. @@ -225,7 +224,6 @@ them, but `edit` will warn you first. | Hook | Variant | Variables | |--------------------------|------------------------|------------------------------------------------| -| check-backup-age | warning | `{{.Warnings}}` | | check-freshness | stale | `{{.StaleFiles}}` | | check-context-size | checkpoint | *(none)* | | check-context-size | oversize | `{{.TokenCount}}` | diff --git a/docs/recipes/design-before-coding.md b/docs/recipes/design-before-coding.md index ec73940df..9826a348d 100644 --- a/docs/recipes/design-before-coding.md +++ b/docs/recipes/design-before-coding.md @@ -109,7 +109,7 @@ and won't accept "none" without a challenge. Sections that don't apply can be skipped. The result is a complete spec at `specs/{feature-name}.md`. -### Step 3: Break Into Tasks +### Step 3: Break into Tasks After the spec is written, the skill offers to create tasks: diff --git a/docs/recipes/external-context.md b/docs/recipes/external-context.md index dacb877f6..9a5bf7f23 100644 --- a/docs/recipes/external-context.md +++ b/docs/recipes/external-context.md @@ -7,8 +7,8 @@ icon: lucide/folder-symlink ## The Problem -`ctx` files contain project-specific **decisions**, **learnings**, -**conventions**, and **tasks**. By default, they live in +`ctx` files contain project-specific **decisions**, **learnings**, +**conventions**, and **tasks**. By default, they live in `.context/` inside the project tree, and that works well when the context can be public. @@ -19,42 +19,95 @@ But sometimes you need the context *outside* the project: repo. * **Compliance or IP concerns**: Context files reference sensitive design rationale that belongs in a separate access-controlled repository. -* **Personal preference**: You want a single context repo that covers - multiple projects, or you just prefer keeping notes separate from code. +* **Personal preference**: You want to keep notes separate from code. -`ctx` supports this through three configuration methods. This recipe shows how -to set them up and how to tell your AI assistant where to find the context. +`ctx` supports this by letting you point `CTX_DIR` anywhere. This recipe +shows how to set that up and how to tell your AI assistant where to find the +context. + +!!! warning "One `.context/` per project" + The parent of the context directory is the project root by contract. + `ctx sync`, `ctx drift`, and the memory-drift hook all read the + codebase at `filepath.Dir(ContextDir())`. Pointing two projects at + the same directory corrupts their journals, state, and secrets. To + share knowledge (CONSTITUTION / CONVENTIONS / ARCHITECTURE) across + projects, use [`ctx hub`](hub-overview.md), not a shared `.context/`. ## TL;DR -First `--allow-outside-cwd` in your project: +Create the external context directory, initialize it, and bind it: ```bash -mkdir ~/repos/myproject-context && cd ~/repos/myproject-context && git init +mkdir -p ~/repos/myproject-context && cd ~/repos/myproject-context && git init cd ~/repos/myproject -ctx --context-dir ~/repos/myproject-context --allow-outside-cwd init + +# Bind CTX_DIR to the external location, then init creates files there. +export CTX_DIR=~/repos/myproject-context/.context +ctx init ``` -Then, [create a `.ctxrc`](../home/configuration.md) in your **project root** -to specify the new `.context` folder location: +All `ctx` commands now use the external directory. If you share the +setup across shells, add the `export CTX_DIR=...` line to your +shell rc, or source a per-project `.envrc` with direnv. -```yaml -context_dir: ~/repos/myproject-context -allow_outside_cwd: true +## What Works, What Quietly Degrades + +The single-source-anchor contract states that +`filepath.Dir(CTX_DIR)` is the project root. When the context +lives outside the project tree, ctx still resolves correctly for +every operation that reads or writes inside `.context/`. But any +operation that scans the **codebase** scans the wrong tree, and +does so silently: + +| Operation | Behavior with external `.context/` | +|---------------------------------|---------------------------------------------------| +| `ctx status`, `agent`, `add` | ✅ Works. Operates on files inside `CTX_DIR`. | +| Journal, scratchpad, hub | ✅ Works. Same reason. | +| `ctx sync` | ⚠️ Scans the *context repo*, not the code repo. | +| `ctx drift` | ⚠️ Same. Reports nothing useful. | +| Memory-drift hook (`MEMORY.md`) | ⚠️ Looks for `MEMORY.md` next to the external `.context/`, not the code. | + +Nothing errors. The code-aware operations just find an empty or +unrelated tree where the project root should be. + +### Workaround: symlink the `.context/` into the code tree + +If you want both the privacy of an external git repo *and* working +`ctx sync` / `drift` / memory-drift, symlink the external +`.context/` into the code repo and point `CTX_DIR` at the symlink: + +```bash +# External repo holds the real files +mkdir -p ~/repos/myproject-context && cd ~/repos/myproject-context && git init + +# Symlink it into the code repo +ln -s ~/repos/myproject-context/.context ~/repos/myproject/.context + +# Bind CTX_DIR to the symlink path; ctx init will follow it +export CTX_DIR=~/repos/myproject/.context +ctx init ``` -All `ctx` commands now use the external directory automatically. +Now `filepath.Dir(CTX_DIR)` is the **code repo**, so code-aware +operations scan the right tree. The actual files still live in +the external repo and commit there. Add `.context` to the code +repo's `.gitignore` (or `.git/info/exclude`) so the symlink itself +isn't tracked by the code repo. + +The basename guard is permissive about symlinks: it checks the +declared name, not the resolved target, so a `.context` symlink +pointing anywhere is accepted as long as the declared basename is +`.context`. ## Commands and Skills Used -| Tool | Type | Purpose | -|-----------------------|--------------|-----------------------------------------| -| `ctx init` | CLI command | Initialize context directory | -| `--context-dir` | Global flag | Point ctx at a non-default directory | -| `--allow-outside-cwd` | Global flag | Permit context outside the project root | -| `.ctxrc` | Config file | Persist the context directory setting | -| `CTX_DIR` | Env variable | Override context directory per-session | -| `/ctx-status` | Skill | Verify context is loading correctly | +| Tool | Type | Purpose | +|-----------------|--------------|-----------------------------------------| +| `ctx init` | CLI command | Initialize context directory | +| `ctx activate` | CLI command | Emit `export CTX_DIR=...` for the shell | +| `CTX_DIR` | Env variable | Declare context directory per-session | +| `.ctxrc` | Config file | Per-project configuration | +| `/ctx-status` | Skill | Verify context is loading correctly | ## The Workflow @@ -65,100 +118,79 @@ a private GitHub repo, a shared drive, a sibling directory: ```bash # Create the context repo -mkdir ~/repos/myproject-context +mkdir -p ~/repos/myproject-context cd ~/repos/myproject-context git init ``` ### Step 2: Initialize ctx Pointing at It -From your project root, initialize ctx with `--context-dir` pointing to the -external location. Because the directory is outside your project tree, you also -need `--allow-outside-cwd`: +From your project root, declare `CTX_DIR` pointing to the external +location, then initialize: ```bash cd ~/repos/myproject -ctx --context-dir ~/repos/myproject-context \ - --allow-outside-cwd \ - init +CTX_DIR=~/repos/myproject-context/.context ctx init ``` -This creates the full `.context/`-style file set inside +This creates the canonical `.context/` file set inside `~/repos/myproject-context/` instead of `~/repos/myproject/.context/`. -!!! warning "Boundary Validation" - `ctx` validates that the `.context` directory is within the current working - directory. - - If your external directory is truly outside the project root: - - * Either every `ctx` command needs `--allow-outside-cwd`, - * or you can persist the setting in `.ctxrc` (*next step*). - ### Step 3: Make It Stick -Typing `--context-dir` and `--allow-outside-cwd` on every command is tedious. -Pick one of these methods to make the configuration permanent. - -#### Option A: `.ctxrc` (*Recommended*) - -Create a `.ctxrc` file in your project root: +Declaring `CTX_DIR` on every command is tedious. Pick one of these +methods to make the configuration permanent. The context directory +itself must be declared via `CTX_DIR`; `.ctxrc` does not carry the +path. -```yaml -# .ctxrc: committed to the project repo -context_dir: ~/repos/myproject-context -allow_outside_cwd: true -``` - -ctx reads `.ctxrc` automatically. Every command now uses the external -directory without extra flags: +#### Option A: `CTX_DIR` Environment Variable (*Recommended*) ```bash -ctx status # reads from ~/repos/myproject-context -ctx add learning "Redis MULTI doesn't roll back on error" \ - --session-id abc12345 --branch main --commit 68fbc00a -``` - -!!! tip "Commit `.ctxrc`" - `.ctxrc` belongs in the project repo. It contains no secrets: It's just a - path and a boundary override. +# Direct path. Works for ctx status / agent / add but degrades +# code-aware operations. See "What Works, What Quietly Degrades". +export CTX_DIR=~/repos/myproject-context/.context - `.ctxrc` lets teammates share the same configuration. - -#### Option B: `CTX_DIR` Environment Variable - -Good for CI pipelines, temporary overrides, or when you don't want to commit -a `.ctxrc`: - -```bash -# In your shell profile (~/.bashrc, ~/.zshrc) -export CTX_DIR=~/repos/myproject-context +# Or, with the symlink approach above, point at the symlink path +# inside the code repo so code-aware operations stay healthy. +export CTX_DIR=~/repos/myproject/.context ``` -Or for a single session: +Put either form in your shell profile (`~/.bashrc`, `~/.zshrc`) +or a direnv `.envrc`. -```bash -CTX_DIR=~/repos/myproject-context ctx status -``` +For a single session, run `eval "$(ctx activate)"` from any +directory inside the project where exactly one `.context/` +candidate is visible (the symlink counts). `activate` does not +accept a path argument; bind a specific path by exporting +`CTX_DIR` directly instead. -#### Option C: Shell Alias +#### Option B: `.ctxrc` for Other Settings -If you prefer a shell alias over `.ctxrc`: +Put any settings (token budget, priority order, freshness files) in a +`.ctxrc` at the project root (`dirname(CTX_DIR)`), which here is the +parent of the external `.context/`: -```bash -# ~/.bashrc or ~/.zshrc -alias ctx='ctx --context-dir ~/repos/myproject-context --allow-outside-cwd' +```yaml +# ~/repos/myproject-context/.ctxrc +token_budget: 16000 ``` -#### Priority Order +`.ctxrc` is always read from the parent of `CTX_DIR`, so this file is +picked up whenever `CTX_DIR` points at +`~/repos/myproject-context/.context`. + +#### Resolution -When multiple methods are set, `ctx` resolves the context directory in this -order (*highest priority first*): +`ctx` reads the context directory from a single channel: the +`CTX_DIR` environment variable. When `CTX_DIR` is unset, `ctx` +errors with a "no context directory specified" hint pointing at +`ctx activate` and this recipe. When set, the value must be an +absolute path with `.context` as its basename; relative paths and +other names are rejected on first use. -1. `--context-dir` flag -2. `CTX_DIR` environment variable -3. `context_dir` in `.ctxrc` -4. Default: `.context/` +See +[Activating a Context Directory](activating-context.md) for the full +recipe. ### Step 4: Agent Auto-Discovery via Bootstrap @@ -171,40 +203,40 @@ $ ctx system bootstrap ctx system bootstrap ==================== -context_dir: /home/user/repos/myproject-context +context_dir: /home/user/repos/myproject-context/.context Files: CONSTITUTION.md, TASKS.md, DECISIONS.md, ... ``` The `CLAUDE.md` template generated by `ctx init` already instructs the agent to -run `ctx system bootstrap` at session start. Because `.ctxrc` is in the -project root, your agent inherits the external path automatically via -the `ctx system bootstrap` call instruction. +run `ctx system bootstrap` at session start. Because `CTX_DIR` is inherited +by child processes, your agent picks up the external path automatically. Here is the relevant section from `CLAUDE.md` for reference: ```markdown 1. **Run `ctx system bootstrap`**: CRITICAL, not optional. - This tells you where the context directory is. If it fails or returns - no context_dir, STOP and warn the user. + This tells you where the context directory is. If it returns any + error, relay the error output to the user verbatim, point them at + https://ctx.ist/recipes/activating-context/ for setup, and STOP. + Do not try to recover; the user decides. ``` -Moreover, every nudge (*context checkpoint, persistence reminder, etc.*) also -includes a `Context: /home/user/repos/myproject-context` footer, so the agent -remains anchored to the correct directory even in long sessions. +Moreover, every nudge (*context checkpoint, persistence reminder, etc.*) also +includes a `Context: /home/user/repos/myproject-context/.context` footer, so +the agent remains anchored to the correct directory even in long sessions. -If you use `CTX_DIR` instead of `.ctxrc`, export it in your shell -profile so the hook process inherits it: +Export `CTX_DIR` in your shell profile so every hook process inherits it: ```bash -export CTX_DIR=~/repos/myproject-context +export CTX_DIR=~/repos/myproject-context/.context ``` ### Step 5: Share with Teammates -Teammates clone both repos and set up `.ctxrc`: +Teammates clone both repos and export `CTX_DIR`: ```bash # Clone the project @@ -213,16 +245,10 @@ cd myproject # Clone the private context repo git clone git@github.com:org/myproject-context.git ~/repos/myproject-context +export CTX_DIR=~/repos/myproject-context/.context ``` -If `.ctxrc` is already committed to the project, they're done: `ctx` -commands will find the external context automatically. - -If teammates use different paths, each developer sets their own `CTX_DIR`: - -```bash -export CTX_DIR=~/my-own-path/myproject-context -``` +If teammates use different paths, each developer sets their own `CTX_DIR`. For encryption key distribution across the team, see the [Syncing Scratchpad Notes](scratchpad-sync.md) recipe. @@ -230,7 +256,7 @@ For encryption key distribution across the team, see the ### Step 6: Day-to-Day Sync The external context repo has its own git history. Treat it like any other -repo: Commit and push after sessions: +repo: commit and push after sessions: ```bash cd ~/repos/myproject-context @@ -263,9 +289,9 @@ You don't need to remember the flags; simply ask your assistant: ```text You: "Set up ctx to use ~/repos/myproject-context as the context directory." -Agent: "I'll create a .ctxrc in the project root pointing to that path. - I'll also update CLAUDE.md so future sessions know where to find - context. Want me to initialize the context files there too?" +Agent: "I'll set CTX_DIR to that path, run ctx init to materialize + it, and show you the export line to add to your shell + profile. Want me to seed the core context files too?" ``` ### Configure Separate Repo for `.context` Folder Using Natural Language @@ -273,7 +299,7 @@ Agent: "I'll create a .ctxrc in the project root pointing to that path. ```text You: "My context is in a separate repo. Can you load it?" -Agent: [reads .ctxrc, finds the path, loads context from the external dir] +Agent: [reads CTX_DIR, loads context from the external dir] "Loaded. You have 3 pending tasks, last session was about the auth refactor." ``` @@ -286,12 +312,10 @@ Agent: [reads .ctxrc, finds the path, loads context from the external dir] The default `.context/` in-tree is the easiest path. Move to an external repo when you have a concrete reason. * **One context repo per project**. Sharing a single context directory across - multiple projects creates confusion. Keep the mapping 1:1. -* **Use `.ctxrc` over env vars** when the path is stable. It's committed, - documented, and works for the whole team without per-developer shell setup. -* **Don't forget the boundary flag**. The most common error is - `Error: context directory is outside the project root`. Set - `allow_outside_cwd: true` in `.ctxrc` or pass `--allow-outside-cwd`. + multiple projects corrupts journals, state, and secrets. Use `ctx hub` for + cross-project knowledge sharing. +* **Export `CTX_DIR` in your shell profile** so hooks and tools inherit the + path without per-command flags. * **Commit both repos at session boundaries**. Context without code history (*or code without context history*) loses half the value. @@ -307,5 +331,4 @@ full ctx session from start to finish. * [Setting Up ctx Across AI Tools](multi-tool-setup.md): initial setup recipe * [Syncing Scratchpad Notes Across Machines](scratchpad-sync.md): distribute encryption keys when context is shared -* [CLI Reference](../cli/index.md): all global flags including - `--context-dir` and `--allow-outside-cwd` +* [CLI Reference](../cli/index.md): full command list and global options diff --git a/docs/recipes/hook-output-patterns.md b/docs/recipes/hook-output-patterns.md index 01a1ab621..9431c1372 100644 --- a/docs/recipes/hook-output-patterns.md +++ b/docs/recipes/hook-output-patterns.md @@ -99,7 +99,6 @@ what they asked: Stale backups, unimported sessions, resource warnings. * `ctx system check-context-size`: Context capacity warning * `ctx system check-resources`: Resource pressure (memory, swap, disk, load): `DANGER` only * `ctx system check-freshness`: Technology constant staleness warning -* `check-backup-age.sh`: Stale backup warning (*project-local*) **Trade-off**: Noisy if overused. Every VERBATIM relay adds a preamble before the agent's actual answer. Throttle with once-per-day markers or diff --git a/docs/recipes/hook-sequence-diagrams.md b/docs/recipes/hook-sequence-diagrams.md index 2ca1b7b6f..2dde1896a 100644 --- a/docs/recipes/hook-sequence-diagrams.md +++ b/docs/recipes/hook-sequence-diagrams.md @@ -12,23 +12,23 @@ title: Hook Sequence Diagrams ## Hook Lifecycle -This page documents the **ctx system hooks** — the built-in +This page documents the **ctx system hooks**: the built-in `ctx system *` subcommands that Claude Code invokes via `.claude/hooks.json` at lifecycle events. These are owned by ctx itself, not authored by users. -!!! info "Not to be confused with `ctx trigger`" +!!! info "Not to Be Confused with `ctx trigger`" `ctx` has **three distinct hook-like layers**: - - **`ctx system` hooks** (this page) — built-in, owned + - **`ctx system` hooks** (this page): built-in, owned by ctx, wired into Claude Code via `internal/assets/claude/hooks/hooks.json`. - - **`ctx trigger`** — user-authored shell scripts in + - **`ctx trigger`**: user-authored shell scripts in `.context/hooks//*.sh`. See [`ctx trigger` reference](../cli/trigger.md) and the [trigger authoring recipe](triggers.md). - **Claude Code hooks** configured directly in - `.claude/settings.local.json` — tool-specific, not + `.claude/settings.local.json`, tool-specific, not portable across AI tools. This page is *only* about the first category. @@ -49,7 +49,7 @@ on stdout. These fire **before** a tool executes. They can block, gate, or inject context. -### context-load-gate +### Context-Load-Gate Matcher: `.*` (all tools) @@ -96,7 +96,7 @@ sequenceDiagram Hook->>State: Write oversize flag if tokens > threshold ``` -### block-non-path-ctx +### Block-Non-Path-ctx Matcher: `Bash` @@ -126,7 +126,7 @@ sequenceDiagram Hook->>Hook: NudgeAndRelay(message) ``` -### qa-reminder +### Qa-Reminder Matcher: `Bash` @@ -153,7 +153,7 @@ sequenceDiagram Hook->>Hook: Relay(message) ``` -### specs-nudge +### Specs-Nudge Matcher: `EnterPlanMode` @@ -185,7 +185,7 @@ sequenceDiagram These fire **after** a tool completes. They observe, nudge, and track state. -### post-commit +### Post-Commit Matcher: `Bash` @@ -218,7 +218,7 @@ sequenceDiagram Hook->>Hook: CheckVersionDrift() ``` -### check-task-completion +### Check-Task-Completion Matcher: `Edit`, `Write` @@ -263,7 +263,7 @@ sequenceDiagram These fire **on every user prompt**, before any tools run. They perform health checks, track state, and nudge for housekeeping. -### check-context-size +### Check-Context-Size Adaptive context window monitoring. Fires checkpoints, window warnings, and billing alerts based on prompt count and token usage. @@ -325,7 +325,7 @@ sequenceDiagram Hook->>State: Write session stats ``` -### check-ceremonies +### Check-Ceremonies Daily check for `/ctx-remember` and `/ctx-wrap-up` usage in recent journal entries. @@ -362,7 +362,7 @@ sequenceDiagram Hook->>State: Touch throttle marker ``` -### check-freshness +### Check-Freshness Daily check for technology-dependent constants that may need review. @@ -393,7 +393,7 @@ sequenceDiagram Hook->>State: Touch throttle marker ``` -### check-journal +### Check-Journal Daily check for unimported sessions and unenriched journal entries. @@ -433,7 +433,7 @@ sequenceDiagram Hook->>State: Touch throttle marker ``` -### check-knowledge +### Check-Knowledge Daily check for knowledge file entry/line counts exceeding configured thresholds. @@ -473,7 +473,7 @@ sequenceDiagram Hook->>State: Touch throttle marker ``` -### check-map-staleness +### Check-Map-Staleness Daily check for architecture map age and relevant code changes. @@ -513,7 +513,7 @@ sequenceDiagram Hook->>State: Touch throttle marker ``` -### check-memory-drift +### Check-Memory-Drift Per-session check for MEMORY.md changes since last sync. @@ -548,7 +548,7 @@ sequenceDiagram Hook->>State: Touch session tombstone ``` -### check-persistence +### Check-Persistence Tracks context file modification and nudges when edits happen without persisting context. Adaptive threshold based on prompt count. @@ -589,7 +589,7 @@ sequenceDiagram Hook->>State: Update LastNudge = Count, write state ``` -### check-reminders +### Check-Reminders Per-prompt check for due reminders. No throttle. @@ -618,7 +618,7 @@ sequenceDiagram Hook->>Hook: NudgeAndRelay(message) ``` -### check-resources +### Check-Resources Checks system resources (memory, swap, disk, load). Fires on every prompt. No initialization required. @@ -647,7 +647,7 @@ sequenceDiagram Hook->>Hook: NudgeAndRelay(message) ``` -### check-version +### Check-Version Daily binary-vs-plugin version comparison with piggybacked key rotation check. @@ -691,7 +691,7 @@ sequenceDiagram Hook->>Hook: CheckKeyAge() (piggybacked) ``` -### heartbeat +### Heartbeat Silent per-prompt pulse. Tracks prompt count, context modification, and token usage. The agent never sees this hook's output. @@ -727,7 +727,7 @@ sequenceDiagram These hooks are configured in `settings.local.json` and are **not** shipped with ctx. They are specific to individual developer setups. -### block-dangerous-commands +### Block-Dangerous-Commands Lifecycle: PreToolUse. Matcher: `Bash` @@ -770,40 +770,6 @@ sequenceDiagram Hook->>Hook: NudgeAndRelay(message) ``` -### check-backup-age - -Lifecycle: UserPromptSubmit. - -Daily check for SMB mount and backup freshness. - -```mermaid -sequenceDiagram - participant CC as Claude Code - participant Hook as check-backup-age - participant State as .context/state/ - participant FS as Filesystem - participant Tpl as Message Template - - CC->>Hook: stdin {session_id} - Hook->>Hook: Check initialized + HookPreamble - alt not initialized or paused - Hook-->>CC: (silent exit) - end - Hook->>State: Check daily throttle marker - alt throttled - Hook-->>CC: (silent exit) - end - Hook->>FS: Check SMB mount (if env var set) - Hook->>FS: Check backup marker file age - alt no warnings - Hook-->>CC: (silent exit) - end - Hook->>Tpl: LoadMessage(hook, warning, {Warnings}) - Hook-->>CC: Nudge box (warnings) - Hook->>Hook: NudgeAndRelay(message) - Hook->>State: Touch throttle marker -``` - --- ## Throttling Summary @@ -829,7 +795,6 @@ sequenceDiagram | check-version | UserPromptSubmit | Daily marker | Once per day | | heartbeat | UserPromptSubmit | None | Every prompt | | block-dangerous-commands | PreToolUse * | None | Every match | -| check-backup-age | UserPromptSubmit * | Daily marker | Once per day | \* Project-local hook (settings.local.json), not shipped with ctx. @@ -843,7 +808,6 @@ All state files live in `.context/state/`. | `ctx-paused-{session}` | (all) | Session pause marker | | `ctx-wrapped-up` | check-context-size | Suppress nudges after wrap-up (2h expiry) | | `freshness-checked` | check-freshness | Daily throttle | -| `backup-reminded` | check-backup-age | Daily throttle | | `ceremony-reminded` | check-ceremonies | Daily throttle | | `journal-reminded` | check-journal | Daily throttle | | `knowledge-reminded` | check-knowledge | Daily throttle | diff --git a/docs/recipes/hub-cluster.md b/docs/recipes/hub-cluster.md index 9f4c41b90..1ed5ca930 100644 --- a/docs/recipes/hub-cluster.md +++ b/docs/recipes/hub-cluster.md @@ -5,13 +5,13 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: HA cluster +title: HA Cluster icon: lucide/layers --- ![ctx](../images/ctx-banner.png) -# `ctx` Hub: High-availability cluster +# `ctx` Hub: High-Availability Cluster Run **multiple** hub nodes with Raft-based leader election for redundancy. Any follower can take over if the leader dies. @@ -19,11 +19,11 @@ redundancy. Any follower can take over if the leader dies. This recipe assumes you've read the [`ctx` Hub overview](hub-overview.md) and the [Multi-machine setup](hub-multi-machine.md). HA only makes -sense in the "small trusted team" story — a personal +sense in the "small trusted team" story; a personal cross-project brain on one workstation does not need three Raft peers. -!!! warning "Raft-lite" +!!! warning "Raft-Lite" ctx uses Raft **only for leader election**, not for data consensus. Entry replication happens via sequence-based gRPC sync on the append-only JSONL store. This is simpler than full @@ -36,7 +36,7 @@ peers. ## Topology -A minimum HA cluster is **three** nodes. Two is worse than one — +A minimum HA cluster is **three** nodes. Two is worse than one: it doubles failure probability without providing quorum. ``` @@ -56,7 +56,7 @@ it doubles failure probability without providing quorum. gRPC (data sync) ``` -## Step 1 — Bootstrap the first node +## Step 1: Bootstrap the First Node ```bash ctx hub start --daemon \ @@ -66,7 +66,7 @@ ctx hub start --daemon \ The node starts a Raft election as soon as it sees its peers. -## Step 2 — Start the other nodes +## Step 2: Start the Other Nodes On `hub-b.lan`: @@ -87,7 +87,7 @@ ctx hub start --daemon \ After a few seconds, one node wins the election and becomes the **leader**. The other two are followers. -## Step 3 — Verify cluster state +## Step 3: Verify Cluster State From any node: @@ -106,7 +106,7 @@ entries: 1248 uptime: 3h42m ``` -## Step 4 — Register clients with failover peers +## Step 4: Register Clients with Failover Peers When registering a client, give it the **full peer list**: @@ -120,7 +120,7 @@ If the leader becomes unreachable, the client reconnects to the next peer. Followers redirect to the current leader, so writes always land on the right node. -## Runtime membership changes +## Runtime Membership Changes Add a new peer without downtime: @@ -134,7 +134,7 @@ Remove a decommissioned peer: ctx hub peer remove hub-c.lan:9900 ``` -## Planned maintenance +## Planned Maintenance Before taking a leader offline, hand off leadership: @@ -146,7 +146,7 @@ ssh hub-a.lan 'ctx hub stepdown' before the leader goes offline. In-flight clients briefly pause, then reconnect to the new leader. -## Failure modes at a glance +## Failure Modes at a Glance | Event | What happens | |-----------------------------|----------------------------------------------| @@ -159,10 +159,10 @@ then reconnect to the new leader. For the full list, see [Hub failure modes](../operations/hub-failure-modes.md). -## See also +## See Also -- [Multi-machine recipe](hub-multi-machine.md) — single-node +- [Multi-machine recipe](hub-multi-machine.md): single-node deployment -- [Hub operations](../operations/hub.md) — backup and +- [Hub operations](../operations/hub.md): backup and maintenance -- [Hub security model](../security/hub.md) — TLS, tokens +- [Hub security model](../security/hub.md): TLS, tokens diff --git a/docs/recipes/hub-getting-started.md b/docs/recipes/hub-getting-started.md index 7e53dac45..b99d3282c 100644 --- a/docs/recipes/hub-getting-started.md +++ b/docs/recipes/hub-getting-started.md @@ -15,16 +15,16 @@ icon: lucide/share-2 Stand up a **single-node** `ctx` Hub on localhost, register two projects, publish a decision from one, and see it appear in the -other — all in under five minutes. +other, all in under five minutes. -!!! tip "Read this first" +!!! tip "Read This First" If you haven't already, skim the [`ctx` Hub overview](hub-overview.md). It explains the mental model, names the two user stories (personal vs small - team), and — importantly — lists what the hub **does not do**. + team), and (importantly) lists what the hub **does not do**. This recipe assumes you already know you want the feature. -## What you'll get out of this recipe +## What You'll Get out of This Recipe By the end, you will have: @@ -36,19 +36,19 @@ By the end, you will have: Concretely, the payoff this unlocks: a lesson you record in one project becomes visible to your agent the next time you open -another project — **without** touching local files in the second +another project, **without** touching local files in the second project or opening another editor window. -## What this recipe does *not* cover +## What This Recipe Does *Not* Cover - Sharing `.context/journal/`, `.context/pad`, or any other local state. The hub only fans out `decision`, `learning`, `convention`, and `task` entries. Everything else stays local. - Multi-user attribution. The hub identifies **projects**, not people. -- Running over a LAN — see +- Running over a LAN; see [Multi-machine setup](hub-multi-machine.md). -- Redundancy — see [HA cluster](hub-cluster.md). +- Redundancy; see [HA cluster](hub-cluster.md). ## Prerequisites @@ -56,7 +56,7 @@ project or opening another editor window. - Two project directories, each already initialized with `ctx init` -## Step 1 — Start the hub +## Step 1: Start the Hub In a dedicated terminal: @@ -65,7 +65,7 @@ ctx hub start ``` On first run, the hub generates an **admin token** and prints it to -stdout. Copy it — you'll need it for each project registration: +stdout. Copy it; you'll need it for each project registration: ``` ctx hub listening on :9900 @@ -76,7 +76,7 @@ data dir: ~/.ctx/hub-data/ The admin token is written to `~/.ctx/hub-data/admin.token` so you can recover it later. Treat it like a password. -## Step 2 — Register the first project +## Step 2: Register the First Project ```bash cd ~/projects/alpha @@ -88,7 +88,7 @@ This stores an **encrypted** connection config in per-project client token; the admin token itself is never persisted in the project. -## Step 3 — Choose what to receive +## Step 3: Choose What to Receive ```bash ctx connection subscribe decision learning convention @@ -97,7 +97,7 @@ ctx connection subscribe decision learning convention Only the entry types you subscribe to will be delivered by `sync` and `listen`. -## Step 4 — Publish a decision +## Step 4: Publish a Decision Either use `ctx add --share` to write locally *and* push to the ctx Hub: @@ -114,7 +114,7 @@ Or publish an existing entry directly: ctx connection publish decision "Use UTC timestamps everywhere" ``` -## Step 5 — Register a second project and sync +## Step 5: Register a Second Project and Sync ```bash cd ~/projects/beta @@ -127,7 +127,7 @@ The decision from `alpha` now appears in `~/projects/beta/.context/hub/decisions.md` with an origin tag and timestamp. -## Step 6 — Watch entries arrive live +## Step 6: Watch Entries Arrive Live Instead of re-running `sync`, stream new entries as they land: @@ -138,7 +138,7 @@ ctx connection listen Leave this running in a terminal; every `--share` publish from any registered project will appear in `.context/hub/` immediately. -## Step 7 — Feed shared knowledge into the agent +## Step 7: Feed Shared Knowledge into the Agent Once entries exist in `.context/hub/`, include them in the agent context packet: @@ -150,13 +150,13 @@ ctx agent --include-hub Shared entries are added as a dedicated tier in the budget-aware assembly, scored by recency and type relevance. -## Auto-sync on session start +## Auto-Sync on Session Start After `register`, the `check-hub-sync` hook pulls new entries at the start of each session (daily throttled). Most users never need to call `ctx connection sync` manually. -## Where to go next +## Where to Go Next - **[Multi-machine hub](hub-multi-machine.md)**: run the hub on a LAN host and connect from other workstations. diff --git a/docs/recipes/hub-multi-machine.md b/docs/recipes/hub-multi-machine.md index 6b0ed4a10..ef72a3067 100644 --- a/docs/recipes/hub-multi-machine.md +++ b/docs/recipes/hub-multi-machine.md @@ -5,24 +5,24 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Multi-machine +title: Multi-Machine icon: lucide/network --- ![ctx](../images/ctx-banner.png) -# `ctx` Hub: Multi-machine +# `ctx` Hub: Multi-Machine Run the hub on a **LAN host** and connect from project directories on other workstations. This recipe is the **Story 2 ("small trusted team")** shape described in the -[`ctx` Hub overview](hub-overview.md) — read that first if +[`ctx` Hub overview](hub-overview.md); read that first if you haven't, especially the trust-model warnings. This recipe assumes you've already walked through [Getting Started](hub-getting-started.md) and understand what flows through the hub (decisions, learnings, conventions, -tasks — **not** journals, scratchpad, or raw context files). +tasks, **not** journals, scratchpad, or raw context files). ## Topology @@ -43,7 +43,7 @@ tasks — **not** journals, scratchpad, or raw context files). +-------------------+ ``` -## Step 1 — Start the daemon on the LAN host +## Step 1: Start the Daemon on the LAN Host On the machine that will hold the hub (call it `nexus`): @@ -58,9 +58,9 @@ later with: ctx hub stop ``` -## Step 2 — Firewall and port +## Step 2: Firewall and Port -Open port `9900/tcp` on `nexus` to the LAN only — **never** expose +Open port `9900/tcp` on `nexus` to the LAN only. **Never** expose the hub to the public internet without a reverse proxy and TLS in front of it (see [Hub security model](../security/hub.md)). @@ -87,7 +87,7 @@ Typical LAN allowlist rules: tcp dport 9900 accept ``` -## Step 3 — Retrieve the admin token +## Step 3: Retrieve the Admin Token The daemon prints the admin token to stdout on first run. Running as a daemon, that output goes to the log instead: @@ -99,7 +99,7 @@ cat ~/.ctx/hub-data/admin.token Copy the token over a trusted channel (SSH, password manager, or an encrypted note). **Do not email it or put it in chat.** -## Step 4 — Register projects from each workstation +## Step 4: Register Projects from Each Workstation On workstation `A`: @@ -122,7 +122,7 @@ client token**. Only the client token is persisted in `.context/.connect.enc`, encrypted with the same AES-256-GCM scheme ctx uses for notification credentials. -## Step 5 — Verify +## Step 5: Verify From either workstation: @@ -133,7 +133,7 @@ ctx connection status You should see the ctx Hub address, role (`leader` for single-node), subscription filters, and the sequence number you're synced to. -## TLS (recommended) +## TLS (Recommended) For anything beyond a trusted home LAN, terminate TLS in front of the hub. The hub speaks gRPC, so the reverse proxy must speak @@ -155,16 +155,16 @@ server { Point `ctx connection register` at the public hostname and port 443. -## Handling daemon restarts +## Handling Daemon Restarts -The hub is **append-only JSONL** — restarts are safe. Clients keep +The hub is **append-only JSONL**, so restarts are safe. Clients keep their last-seen sequence in `.context/hub/.sync-state.json` and pick up exactly where they left off on the next `sync` or `listen` reconnect. -## See also +## See Also -- [HA cluster recipe](hub-cluster.md) — for redundancy -- [Hub operations](../operations/hub.md) — backup, rotation +- [HA cluster recipe](hub-cluster.md): for redundancy +- [Hub operations](../operations/hub.md): backup, rotation - [Hub failure modes](../operations/hub-failure-modes.md) - [Hub security model](../security/hub.md) diff --git a/docs/recipes/hub-overview.md b/docs/recipes/hub-overview.md index 1a23643f0..cbc93e4ef 100644 --- a/docs/recipes/hub-overview.md +++ b/docs/recipes/hub-overview.md @@ -14,10 +14,10 @@ icon: lucide/compass # `ctx` Hub: Overview Start here before the other hub recipes. This page answers *what* -the hub is, *who* it's for, *why* you'd run one, and — -equally important — *what it is not*. +the hub is, *who* it's for, *why* you'd run one, and, +equally important, *what it is not*. -## Mental model in one paragraph +## Mental Model in One Paragraph The hub is a **fan-out channel for structured knowledge entries across projects**. When you publish a decision, learning, @@ -32,7 +32,7 @@ knowledge bus** for a small, curated set of entry types. It is **not** a shared memory, a shared journal, or a multi-user database. -## What flows through the hub +## What Flows through the Hub Only four entry types: @@ -48,7 +48,7 @@ publishing project's name as `Origin`, a timestamp, and a hub-assigned sequence number. Once published, entries are never rewritten. -## What does *not* flow through the hub +## What Does *Not* Flow through the Hub This is the part new users get wrong most often: @@ -56,27 +56,27 @@ This is the part new users get wrong most often: stay local. The hub does **not** sync your AI session history. - **Scratchpad** (`.context/pad`) stays local. Encrypted notes never leave the machine they were written on. -- **Local context files** as a whole — `TASKS.md`, - `DECISIONS.md`, `LEARNINGS.md`, `CONVENTIONS.md` — are **not** +- **Local context files** as a whole (`TASKS.md`, + `DECISIONS.md`, `LEARNINGS.md`, `CONVENTIONS.md`) are **not** mirrored wholesale. Only entries you explicitly `--share`, or publish later with `ctx connection publish`, cross the boundary. - **Anything under `.context/` that isn't one of the four entry types above.** Configuration, state, logs, memory, journal - metadata — all local. + metadata: all local. If you were expecting "now my agent in project B can see everything my agent did in project A," that's not this feature. Local session density still lives on the local machine. -## Two user stories +## Two User Stories The hub makes sense in two different shapes. Pick the one that -matches your situation — the mechanics are identical but the +matches your situation; the mechanics are identical but the trust model and threat surface are very different. -### Story 1: Personal cross-project brain +### Story 1: Personal Cross-Project Brain -**One developer, many projects, one hub — usually on localhost.** +**One developer, many projects, one hub, usually on localhost.** You're working across several projects on the same machine (or a handful of machines you own). You want a lesson learned @@ -94,7 +94,7 @@ codified in one project to be visible as-you-type in another. - Cross-project conventions (e.g., "use UTC timestamps everywhere") live in one place and propagate. -**Trust model:** high — you trust every participant because every +**Trust model:** high, because you trust every participant since every participant is *you*. Run the hub on localhost or on your own LAN, use the default single-node setup, don't worry about TLS. @@ -103,7 +103,7 @@ LAN, use the default single-node setup, don't worry about TLS. setup, then [Personal cross-project brain](hub-personal.md) for the day-to-day workflow. -### Story 2: Small trusted team +### Story 2: Small Trusted Team **A few teammates, projects they each own, one hub on a LAN host they all trust.** @@ -143,7 +143,7 @@ deployment, [Team knowledge bus](hub-team.md) for the day-to-day team workflow, then [HA cluster](hub-cluster.md) if you need redundancy. -## Identity: projects, not users +## Identity: Projects, Not Users The hub has **no concept of users.** Its unit of identity is the *project*. `ctx connection register` binds a hub token to a project @@ -159,7 +159,7 @@ project share either: Either works; neither gives you per-human attribution. If you need "who wrote this," the hub is the wrong tool. -## When *not* to use it +## When *Not* to Use It - **Solo, single-project work.** Local `.context/` files are enough. The hub adds operational surface for no payoff. @@ -174,18 +174,18 @@ need "who wrote this," the hub is the wrong tool. anything you wouldn't paste in a team chat. - **Wholesale journal sharing.** See "what does not flow" above. If that's what you want, this feature won't provide - it — talk to us in the issue tracker about what *would*. + it. Talk to us in the issue tracker about what *would*. -## How entries reach your agent +## How Entries Reach Your Agent Once a project is registered and subscribed, entries arrive by three mechanisms: -1. **`ctx connection sync`** — an on-demand pull, replays +1. **`ctx connection sync`**: an on-demand pull, replays everything new since the last sequence you saw. -2. **`ctx connection listen`** — a long-lived gRPC stream that +2. **`ctx connection listen`**: a long-lived gRPC stream that writes new entries to `.context/hub/` as they arrive. -3. **`check-hub-sync` hook** — runs at session start, daily +3. **`check-hub-sync` hook**: runs at session start, daily throttled, so most users never call `sync` manually. Once entries exist in `.context/hub/`, `ctx agent @@ -193,7 +193,7 @@ Once entries exist in `.context/hub/`, `ctx agent context packet, scored by recency and type relevance. That's the end of the pipeline. -## Where to go next +## Where to Go Next | If you're… | Read | |---------------------------------------------------|--------------------------------------------------| diff --git a/docs/recipes/hub-personal.md b/docs/recipes/hub-personal.md index 8cd0863af..dbadb8d04 100644 --- a/docs/recipes/hub-personal.md +++ b/docs/recipes/hub-personal.md @@ -5,16 +5,16 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Personal cross-project brain +title: Personal Cross-Project Brain icon: lucide/brain --- ![ctx](../images/ctx-banner.png) -# Personal cross-project brain +# Personal Cross-Project Brain This recipe shows **how one developer uses a `ctx` Hub -across their own projects day-to-day** — the "Story 1" +across their own projects day-to-day**, the "Story 1" shape from the [Hub overview](hub-overview.md). You're not setting up infrastructure for a team; you're making a lesson you learned last Tuesday in project A automatically @@ -22,43 +22,43 @@ surface when you open project B next Thursday. **Prerequisites**: a working `ctx` Hub on localhost (see [Getting Started](hub-getting-started.md) for the -~5-minute setup). This recipe assumes the hub is already +roughly five-minute setup). This recipe assumes the hub is already running and you've registered at least two projects. -## The core loop +## The Core Loop Every day, the same three verbs matter: -1. **Record** — notice a decision, learning, or +1. **Record**: notice a decision, learning, or convention and capture it with `ctx add --share`. -2. **Subscribe** — every project you care about is +2. **Subscribe**: every project you care about is subscribed to the types you want delivered (set once with `ctx connection subscribe`). -3. **Load** — your agent picks up shared entries on next +3. **Load**: your agent picks up shared entries on next session start via the auto-sync hook, or explicitly via `ctx agent --include-hub`. That's the whole workflow. The rest of this recipe fills in the concrete moments where each verb matters. -## A realistic day +## A Realistic Day You have three projects on your workstation: -- `~/projects/api` — a Go service you're actively +- `~/projects/api`, a Go service you're actively developing -- `~/projects/cli` — a companion CLI that consumes the +- `~/projects/cli`, a companion CLI that consumes the API -- `~/projects/dotfiles` — your personal conventions and +- `~/projects/dotfiles`, your personal conventions and cross-project learnings All three are registered with a single hub running on `localhost:9900` (started once at boot, or via a systemd -user unit — see [Hub operations](../operations/hub.md)). +user unit; see [Hub operations](../operations/hub.md)). All three subscribe to `decision`, `learning`, and `convention`. -### 09:00 — Start work on `api` +### 09:00 - Start Work on `api` You `cd ~/projects/api` and start a Claude Code session. Behind the scenes, the plugin's `PreToolUse` hook calls @@ -73,7 +73,7 @@ So the "use UTC timestamps everywhere" decision you recorded in `dotfiles` last week is already in Claude's context for this session, without any manual `sync`. -### 10:30 — You discover a gotcha +### 10:30 - You Discover a Gotcha While debugging, you find that the API's retry loop silently drops the last error when the transport times @@ -101,7 +101,7 @@ Within seconds, `cli/.context/hub/learnings.md` and of this learning (the `ctx connection listen` daemon picks it up from the ctx Hub's Listen stream). -### 12:00 — You switch to `cli` +### 12:00 - You Switch to `cli` `cd ~/projects/cli`, open a new session. The agent packet for `cli` now includes **the learning you just @@ -112,12 +112,12 @@ recorded in `api`**, because `cli` is subscribed to You don't have to re-explain the retry-loop gotcha. Claude already sees it. -### 14:00 — You codify a convention +### 14:00 - You Codify a Convention You've been writing error messages in `api` and decided you want a consistent pattern: lowercase start, no trailing period, single-sentence. This is a convention, -not a decision — it applies to every Go project you +not a decision; it applies to every Go project you touch. Record it in `dotfiles` (since that's your "personal standards" project), and share it: @@ -133,14 +133,14 @@ Claude Code session in either project gets the convention injected into the steering-adjacent slot of the agent packet. -### 16:30 — End of day +### 16:30 - End of Day You didn't run `ctx connection sync` once. You didn't `git push` anything between projects. You didn't remember to tell your agent about the retry-loop gotcha in the new project. The hub did all of it for you. -## What the workflow actually looks like +## What the Workflow Actually Looks Like Stripped of prose, the day's commands were: @@ -158,10 +158,10 @@ ctx add convention --share "..." ``` The hub is passive infrastructure. You never talk **to** -it directly — you talk **through** it by using `--share` +it directly; you talk **through** it by using `--share` on commands you were already running. -## Tips for solo use +## Tips for Solo Use **Pick a "standards" project.** One of your projects should play the role of "canonical source for rules you @@ -208,11 +208,11 @@ context packets get noisy. **Local storage is fine; no TLS needed.** The hub runs on localhost. No one else is on the network. Skip the TLS setup from the -[Multi-machine recipe](hub-multi-machine.md) — it's +[Multi-machine recipe](hub-multi-machine.md); it's relevant when the hub is on a LAN host serving multiple workstations, not when it's a personal daemon. -## What this recipe is *not* +## What This Recipe Is *Not* **Not a setup guide.** For the one-time hub install and project registration, use @@ -220,7 +220,7 @@ project registration, use **Not a team guide.** If you're sharing across humans, not just across your own projects, read -[Team knowledge bus](hub-team.md) instead — the trust +[Team knowledge bus](hub-team.md) instead; the trust model and operational concerns are different. **Not production operations.** For backup, log @@ -228,16 +228,16 @@ rotation, failure recovery, and HA, see [Hub operations](../operations/hub.md) and [Hub failure modes](../operations/hub-failure-modes.md). -## See also +## See Also -- [Hub overview](hub-overview.md) — when to use the Hub +- [Hub overview](hub-overview.md): when to use the Hub and when not to. -- [Team knowledge bus](hub-team.md) — the multi-human +- [Team knowledge bus](hub-team.md): the multi-human companion recipe. -- [`ctx connect`](../cli/connection.md) — the client-side +- [`ctx connect`](../cli/connection.md): the client-side commands used above (`subscribe`, `publish`, `sync`, `listen`, `status`). -- [`ctx add`](../cli/context.md) — the `--share` flag +- [`ctx add`](../cli/context.md): the `--share` flag reference. -- [`ctx hub`](../cli/hub.md) — operator commands for +- [`ctx hub`](../cli/hub.md): operator commands for starting, stopping, and inspecting the hub. diff --git a/docs/recipes/hub-team.md b/docs/recipes/hub-team.md index 224a6235c..25a89584b 100644 --- a/docs/recipes/hub-team.md +++ b/docs/recipes/hub-team.md @@ -5,16 +5,16 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Team knowledge bus +title: Team Knowledge Bus icon: lucide/users --- ![ctx](../images/ctx-banner.png) -# Team knowledge bus +# Team Knowledge Bus This recipe shows **how a small trusted team uses a `ctx` -Hub as a shared knowledge bus** — the "Story 2" shape +Hub as a shared knowledge bus**, the "Story 2" shape from the [Hub overview](hub-overview.md). You're not building a wiki, you're not replacing your issue tracker, and you're not running a multi-tenant service. You're @@ -32,7 +32,7 @@ without ceremony. `ctx connection register`-ed their working projects with the hub. -## Trust model — read this first +## Trust Model: Read This First The hub assumes **everyone holding a client token is friendly**. There's no per-user attribution you can rely @@ -61,17 +61,17 @@ If your team is: not** support today. Use a wiki or a dedicated knowledge platform instead. -## The team's three verbs +## The Team's Three Verbs Everyone on the team does three things, same as in the [personal recipe](hub-personal.md), but with different social expectations: -1. **Record** — when you learn something that would save +1. **Record**: when you learn something that would save a teammate time, capture it with `ctx add --share`. -2. **Subscribe** — every engineer's project directories +2. **Subscribe**: every engineer's project directories subscribe to the types the team cares about. -3. **Load** — agents pick up shared entries automatically +3. **Load**: agents pick up shared entries automatically via the auto-sync hook and the `--include-hub` flag in the PreToolUse hook pipeline. @@ -80,7 +80,7 @@ different is the *culture* around publishing: when do you `--share`, and what belongs on the hub vs. in your local `.context/`. -## What goes on the hub (team rules of thumb) +## What Goes on the Hub (Team Rules of Thumb) **Share it if it's true for more than one person.** The central question: "would the next teammate who hits this @@ -101,9 +101,9 @@ problem save time if they already knew this?" If yes, **Learnings**: - ✅ Gotchas, surprising behavior, flaky infrastructure - quirks — anything you'd tell a teammate over coffee + quirks, anything you'd tell a teammate over coffee with "watch out for X". -- ✅ Lessons from incidents — right after the postmortem +- ✅ Lessons from incidents, right after the postmortem is the highest-value time to share. - ❌ Internal debugging notes that only make sense with context from your current branch. @@ -120,13 +120,13 @@ problem save time if they already knew this?" If yes, to `task` unless the team has a specific reason (e.g., a cross-cutting migration you want visible everywhere). -## A realistic week +## A Realistic Week -**Monday — 3 AM incident, shared learning** +**Monday, 3 AM incident, shared learning** On-call engineer Alice gets paged: the payment service starts returning 500s after a dependency update. After -an hour she finds the culprit — a breaking change in a +an hour she finds the culprit: a breaking change in a transitive gRPC dep that only manifests under high concurrency. Postmortem on Tuesday, but right now she records the learning: @@ -134,8 +134,12 @@ records the learning: ```bash ctx add learning --share \ --context "Payment service 3 AM incident, 2026-04-03" \ - --lesson "grpc-go v1.62+ changes DialContext behavior under high concurrency: connections from a single channel can deadlock if the server emits GOAWAY mid-stream. Symptom: 500 errors cluster in 30s bursts, no error in grpc client logs." \ - --application "Any service on grpc-go. Pin to v1.61 or patch with keepalive: https://github.com/grpc/grpc-go/issues/..." + --lesson "grpc-go v1.62+ changes DialContext behavior under high \ + concurrency: connections from a single channel can deadlock if the \ + server emits GOAWAY mid-stream. Symptom: 500 errors cluster in \ + 30s bursts, no error in grpc client logs." \ + --application "Any service on grpc-go. Pin to v1.61 or patch with \ + keepalive: https://github.com/grpc/grpc-go/issues/..." ``` By Tuesday morning, every other engineer's agent @@ -144,17 +148,21 @@ work on the `ledger` service (which also uses grpc-go), his Claude Code session already knows about the gotcha without Bob having to read the incident channel. -**Wednesday — cross-service decision** +**Wednesday, cross-service decision** -The team agrees on a new pattern for API versioning — +The team agrees on a new pattern for API versioning: header-based instead of URL-based. Platform lead Carol records the decision: ```bash ctx add decision --share \ - --context "Need consistent API versioning across all 6 services. Current URL-based /v1/ isn't working for gradual rollouts." \ - --rationale "Header-based versioning lets us route by header at the edge, which makes canary rollouts trivial. URL-based versioning forces clients to update their paths." \ - --consequence "All new endpoints use X-API-Version header. Existing /v1/ endpoints stay. Deprecation schedule in q3." \ + --context "Need consistent API versioning across all 6 services. \ + Current URL-based /v1/ isn't working for gradual rollouts." \ + --rationale "Header-based versioning lets us route by header at the \ + edge, which makes canary rollouts trivial. URL-based versioning \ + forces clients to update their paths." \ + --consequence "All new endpoints use X-API-Version header. \ + Existing /v1/ endpoints stay. Deprecation schedule in q3." \ "Use header-based API versioning for new endpoints" ``` @@ -164,7 +172,7 @@ automatically. When Dave starts adding endpoints to the him for the header pattern instead of defaulting to `/v1/`. -**Friday — convention drift caught at review** +**Friday, convention drift caught at review** Dave notices that his PR auto-formatted some error messages to end with periods. He recalls the team @@ -181,7 +189,7 @@ Lowercase start, no trailing period, single sentence. He fixes the PR. No lookup on the wiki, no question in chat, no context-switch penalty. -## Workflow tips for teams +## Workflow Tips for Teams **Designate a "champion" for decisions.** The team lead or platform engineer should be the person who explicitly @@ -213,10 +221,10 @@ might be enough. **Run a single hub, not one per team.** If two teams need to share knowledge, they should share a hub. -Splitting hubs by team creates silos — which is often +Splitting hubs by team creates silos, which is often exactly the thing you were trying to solve. -## Operational concerns +## Operational Concerns The team recipe assumes someone owns the hub host. That person (or a small group) is responsible for: @@ -239,7 +247,7 @@ so the hub survives individual node failures. See [HA cluster](hub-cluster.md). For teams under 10 people, a single-node hub with daily backups is usually fine. -## Token management +## Token Management Every team member has a client token stored in their `.context/.connect.enc`. Rules of thumb: @@ -254,9 +262,9 @@ Every team member has a client token stored in their revocation steps. - **No checked-in tokens.** `.context/.connect.enc` is encrypted with the local machine key, but don't push - it to shared repos — it's per-workstation. + it to shared repos; it's per-workstation. -## What this recipe is *not* +## What This Recipe Is *Not* **Not a wiki replacement.** The hub is for structured entries, not prose. Put your architecture overviews, @@ -268,7 +276,7 @@ hub is the wrong tool. **Not a ticket system.** Task sharing works, but mature teams already have Jira/Linear/Github Issues. -Don't try to replace those with hub tasks — use the +Don't try to replace those with hub tasks; use the hub for lightweight cross-project todos that your existing tracker doesn't capture well. @@ -276,17 +284,17 @@ existing tracker doesn't capture well. internal team infrastructure. Do not expose the hub to customers, partners, or the open internet. -## See also +## See Also -- [Hub overview](hub-overview.md) — when to use the +- [Hub overview](hub-overview.md): when to use the hub and when not to. -- [Personal cross-project brain](hub-personal.md) — +- [Personal cross-project brain](hub-personal.md): the single-developer companion recipe. -- [Multi-machine setup](hub-multi-machine.md) — +- [Multi-machine setup](hub-multi-machine.md): standing up the hub on a LAN host. -- [HA cluster](hub-cluster.md) — optional redundancy +- [HA cluster](hub-cluster.md): optional redundancy for larger teams. -- [Hub operations](../operations/hub.md) — backup, +- [Hub operations](../operations/hub.md): backup, rotation, monitoring. -- [Hub security](../security/hub.md) — threat model +- [Hub security](../security/hub.md): threat model and hardening checklist. diff --git a/docs/recipes/index.md b/docs/recipes/index.md index a926f7e42..791318b95 100644 --- a/docs/recipes/index.md +++ b/docs/recipes/index.md @@ -19,7 +19,7 @@ Train your agent to be proactive through **ask, guide, reinforce**. --- -### [Setup Across AI Tools](multi-tool-setup.md) +### [Setup across AI Tools](multi-tool-setup.md) Initialize `ctx` and configure hooks for Claude Code, Cursor, Aider, Copilot, or Windsurf. Includes **shell completion**, @@ -30,14 +30,24 @@ Aider, Copilot, or Windsurf. Includes **shell completion**, --- +### [Multilingual Session Parsing](multilingual-sessions.md) + +Parse session journal entries written in **other languages**. +Configure recognized session-header prefixes so the journal +pipeline works for Turkish, Japanese, and any other locale. + +**Uses**: `ctx journal source`, `ctx journal import`, +`session_prefixes` in `.ctxrc` + +--- + ### [Keeping Context in a Separate Repo](external-context.md) Store context files **outside** the project tree: in a private repo, shared directory, or anywhere else. Useful for open source projects with private context or **multi-repo** setups. -**Uses**: `ctx init`, `--context-dir`, `--allow-outside-cwd`, -`.ctxrc`, `/ctx-status` +**Uses**: `ctx init`, `CTX_DIR`, `.ctxrc`, `/ctx-status` --- @@ -110,7 +120,7 @@ hooks still fire. --- -## Knowledge & Tasks +## Knowledge and Tasks ### [Persisting Decisions, Learnings, and Conventions](knowledge-capture.md) @@ -125,7 +135,7 @@ survive across sessions and team members. --- -### [Tracking Work Across Sessions](task-management.md) +### [Tracking Work across Sessions](task-management.md) **Add**, **prioritize**, **complete**, **snapshot**, and **archive** tasks. Keep `TASKS.md` focused as your project evolves across dozens of @@ -146,7 +156,7 @@ storage out. --- -### [Syncing Scratchpad Notes Across Machines](scratchpad-sync.md) +### [Syncing Scratchpad Notes across Machines](scratchpad-sync.md) Distribute your **scratchpad** encryption key, push and pull encrypted notes via git, and resolve merge conflicts when two machines edit @@ -167,7 +177,7 @@ entries into structured context files with heuristic classification. --- -## Hooks & Notifications +## Hooks and Notifications ### [Hook Output Patterns](hook-output-patterns.md) @@ -323,7 +333,20 @@ Each step produces an artifact that feeds the next. --- -## Agents & Automation +### [Scrutinizing a Plan](scrutinizing-a-plan.md) + +Once a plan exists, run an **adversarial interview** to surface what's +weak, missing, or unexamined before you commit. Walks the plan +depth-first: assumptions, failure modes, alternatives, sequencing, +reversibility. The complement to brainstorm: brainstorm produces +plans, this attacks them. + +**Uses**: `/ctx-plan`, `/ctx-spec`, `/ctx-decision-add`, +`/ctx-learning-add` + +--- + +## Agents and Automation ### [Building Project Skills](building-skills.md) @@ -387,7 +410,7 @@ quantified dependency data, and ranked failure hypotheses. ### [Writing Steering Files](steering.md) -Tell your AI assistant **how to behave** — rule-based prompt +Tell your AI assistant **how to behave** with rule-based prompt injection that fires automatically when prompts match a description. Walks through scaffolding a steering file, previewing matches, and syncing to each AI tool's native @@ -411,7 +434,17 @@ disabled, test with mock input, enable only after review. --- -## `ctx` Hub +## Hub + +### [Hub Overview](hub-overview.md) + +Mental model and three user stories for the `ctx` Hub. What flows, +what doesn't, and when not to use it. Read this before any of the +other Hub recipes. + +**Uses**: `ctx hub`, `ctx connection`, `ctx add --share` + +--- ### [`ctx` Hub: Getting Started](hub-getting-started.md) @@ -425,21 +458,21 @@ End-to-end in under five minutes. --- -### [Personal cross-project brain](hub-personal.md) +### [Personal Cross-Project Brain](hub-personal.md) **Story 1** day-to-day workflow: one developer, many projects, one hub on localhost. Records a learning in project A, watches it show up automatically in project B. Walks through a realistic day of using the hub as passive -infrastructure — no manual `sync`, no `git push`, no -ceremony. +infrastructure (no manual `sync`, no `git push`, no +ceremony). **Uses**: `ctx add --share`, `ctx connection subscribe`, `ctx agent --include-hub` --- -### [Team knowledge bus](hub-team.md) +### [Team Knowledge Bus](hub-team.md) **Story 2** day-to-day workflow: a small trusted team sharing decisions, learnings, and conventions via a hub on @@ -453,7 +486,7 @@ stay signal-rich. --- -### [`ctx` Hub: Multi-machine](hub-multi-machine.md) +### [`ctx` Hub: Multi-Machine](hub-multi-machine.md) Run the hub on a **LAN host** as a daemon and connect from project directories on other workstations. Firewall guidance, TLS via a @@ -464,7 +497,7 @@ reverse proxy, and safe daemon restart semantics. --- -### [`ctx` Hub: HA cluster](hub-cluster.md) +### [`ctx` Hub: HA Cluster](hub-cluster.md) Raft-based leader election across three or more nodes for redundancy. Covers bootstrap, runtime peer management, graceful diff --git a/docs/recipes/knowledge-capture.md b/docs/recipes/knowledge-capture.md index 0012ef72a..7da5dad51 100644 --- a/docs/recipes/knowledge-capture.md +++ b/docs/recipes/knowledge-capture.md @@ -19,10 +19,10 @@ rejected. **How do you make sure important context survives across sessions?** -!!! tip "Prefer skills over raw commands" +!!! tip "Prefer Skills to Raw Commands" Use `/ctx-decision-add` and `/ctx-learning-add` instead of raw `ctx add` commands. The agent automatically picks up session ID, - branch, and commit hash from its context — no manual flags needed. + branch, and commit hash from its context, so no manual flags are needed. ## TL;DR @@ -208,7 +208,7 @@ individually with `ctx decision reindex` or `ctx learning reindex`. Run reindex after any manual edit. The index lets AI tools scan all entries without reading the full file, which matters when token budgets are tight. -### Step 6: Use /ctx-reflect to Surface What to Capture +### Step 6: Use `/ctx-reflect` to Surface What to Capture !!! tip "Keep It Conversational" `/ctx-reflect` is not the only way to trigger reflection. @@ -308,7 +308,7 @@ rather than waiting for explicit instructions. ## Putting It All Together -### Command-line Approach (*Scripting and Automation*) +### Command-Line Approach (*Scripting and Automation*) ```bash # Decision: record the trade-off diff --git a/docs/recipes/memory-bridge.md b/docs/recipes/memory-bridge.md index 8fbd8f8b2..245237828 100644 --- a/docs/recipes/memory-bridge.md +++ b/docs/recipes/memory-bridge.md @@ -182,7 +182,7 @@ ctx memory import --dry-run # 2. Preview what would be imported ctx memory import # 3. Promote entries to .context/ files ``` -## Publishing Context to MEMORY.md +## Publishing Context to `MEMORY.md` Push curated `.context/` content back into MEMORY.md so Claude Code sees structured project context on session start - without needing hooks. diff --git a/docs/recipes/multi-tool-setup.md b/docs/recipes/multi-tool-setup.md index 3e064955a..aa3299de9 100644 --- a/docs/recipes/multi-tool-setup.md +++ b/docs/recipes/multi-tool-setup.md @@ -81,15 +81,31 @@ This produces the following structure: AGENT_PLAYBOOK.md # How AI tools should use this system ``` -!!! tip "Using a Different `.context` Directory" - The `.context/` directory doesn't have to live inside your project. You can - point `ctx` to an external folder via `.ctxrc`, the `CTX_DIR` environment - variable, or the `--context-dir` CLI flag. - - This is useful for monorepos or shared context across repositories. - - See [Configuration](../home/configuration.md#environment-variables) for - details and [External Context](external-context.md) for a full recipe. +!!! note "Using a Different `.context` Directory" + The `.context/` directory doesn't have to live inside your project. Point + `ctx` to an external folder by exporting `CTX_DIR` (the only + declaration channel). + + Useful when context must stay private while the code is public, or + when you want to commit notes to a separate repo. + + **Caveats** (the recipe covers both with workarounds): + + * **Code-aware operations degrade silently.** `ctx sync`, `ctx drift`, + and the memory-drift hook read the codebase from + `dirname(CTX_DIR)`. With an external `.context/`, that's the + context repo, not your code repo. They scan the wrong tree without + erroring. The recipe shows a symlink workaround that keeps both + healthy. + * **One `.context/` per project, always.** Sharing one directory + across multiple projects corrupts journals, state, and secrets. + For cross-project knowledge sharing (CONSTITUTION, CONVENTIONS, + ARCHITECTURE, etc.) use [`ctx hub`](hub-overview.md), not a + shared `.context/`. + + See [External Context](external-context.md) for the full recipe + and [Configuration](../home/configuration.md#environment-variables) + for the resolver details. For Claude Code, install the **ctx plugin** to get hooks and skills: @@ -134,13 +150,13 @@ tool. No action needed. Just install `ctx` from the Marketplace as `ActiveMemory/ctx`. -!!! tip "Claude Code is a First-Class Citizen" +!!! tip "Claude Code Is a First-Class Citizen" With the `ctx` plugin installed, Claude Code gets hooks and skills automatically. The `PreToolUse` hook runs `ctx agent --budget 4000` on every tool call (*with a 10-minute cooldown so it only fires once per window*). -#### Cursor +#### Cursor Add the system prompt snippet to `.cursor/settings.json`: @@ -352,7 +368,7 @@ ctx setup aider ctx skills can leverage external MCP servers for web search and code intelligence. ctx works without them, but they significantly improve -agent behavior across sessions — the investment is small and the +agent behavior across sessions. The investment is small and the benefits compound. Skills like `/ctx-code-review`, `/ctx-explain`, and `/ctx-refactor` all become noticeably better with these tools connected. diff --git a/docs/recipes/multilingual-sessions.md b/docs/recipes/multilingual-sessions.md index ab7e6060b..934f8f1da 100644 --- a/docs/recipes/multilingual-sessions.md +++ b/docs/recipes/multilingual-sessions.md @@ -55,7 +55,7 @@ regardless of prefix configuration. ## Configuration -### Adding a language +### Adding a Language Add the prefix with a trailing colon to your `.ctxrc`: @@ -65,18 +65,18 @@ session_prefixes: - "Sesión:" # Spanish ``` -!!! warning "Include Session: explicitly" +!!! warning "Include Session: Explicitly" When you override `session_prefixes`, **the default is replaced**, not extended. If you still want English headers recognized, include `"Session:"` in your list. -### Team setup +### Team Setup Commit `.ctxrc` to the repo so all team members share the same prefix list. This ensures `ctx journal import` and journal generation pick up sessions from all team members regardless of language. -### Common prefixes +### Common Prefixes | Language | Prefix | |------------|------------| diff --git a/docs/recipes/parallel-worktrees.md b/docs/recipes/parallel-worktrees.md index 7f1aa7811..2678aced2 100644 --- a/docs/recipes/parallel-worktrees.md +++ b/docs/recipes/parallel-worktrees.md @@ -107,7 +107,7 @@ claude Each agent sees the full project, including `.context/`, and can work independently. -!!! warning "Do Not Initialize Context in Worktrees" +!!! warning "Do Not Initialize Context in Worktrees" **Do not** run `ctx init` in worktrees: The `.context` directory is already tracked in `git`. diff --git a/docs/recipes/publishing.md b/docs/recipes/publishing.md index 76ab865cf..6fa08f4b1 100644 --- a/docs/recipes/publishing.md +++ b/docs/recipes/publishing.md @@ -195,7 +195,7 @@ it to `docs/blog/YYYY-MM-DD-slug.md`. Posts are written in first person with code snippets, commit references, and an honest discussion of what went wrong. -!!! info "The Output is `zensical`-Flavored Markdown" +!!! info "The Output Is `zensical`-Flavored Markdown" The blog skills produce Markdown tuned for a [zensical](https://pypi.org/project/zensical/) site: `topics:` frontmatter (zensical's tag field), a `docs/blog/` output path, diff --git a/docs/recipes/scratchpad-sync.md b/docs/recipes/scratchpad-sync.md index a9dfbd8f9..26e3fdb91 100644 --- a/docs/recipes/scratchpad-sync.md +++ b/docs/recipes/scratchpad-sync.md @@ -30,7 +30,7 @@ chmod 600 ~/.ctx/.ctx.key # 3. secure it !!! tip "Finding Your Key File" The key is always at `~/.ctx/.ctx.key` - one key, one machine. -!!! danger "Treat the Key Like a Password" +!!! danger "Treat the Key like a Password" The scratchpad key is the only thing protecting your **encrypted** entries. Store a backup in a secure enclave such as a password manager, and treat diff --git a/docs/recipes/scratchpad-with-claude.md b/docs/recipes/scratchpad-with-claude.md index b598cf737..d503488d1 100644 --- a/docs/recipes/scratchpad-with-claude.md +++ b/docs/recipes/scratchpad-with-claude.md @@ -122,7 +122,7 @@ You: "remove entry 2, it's done" Agent: [runs ctx pad rm 2] "Removed entry 2. 3 entries remaining. - (IDs are stable — remaining entries keep their IDs.)" + (IDs are stable; remaining entries keep their IDs.)" ``` ### Step 7: Store a File as a Blob @@ -214,7 +214,7 @@ Agent: [runs ctx pad --tag later] 3. review PR feedback #later #ci" ``` -Entry IDs are stable — they don't shift when other entries are deleted, +Entry IDs are stable; they don't shift when other entries are deleted, so `ctx pad rm 3` always targets the same entry regardless of deletions or active filters. Use `ctx pad normalize` to reassign IDs as 1..N. @@ -236,7 +236,7 @@ Agent: [runs ctx pad tags] urgent 1" ``` -Tags work on blob entries too — they're extracted from the label: +Tags work on blob entries too; they're extracted from the label: ```bash ctx pad add "deploy config #prod" --file ./deploy.yaml diff --git a/docs/recipes/scrutinizing-a-plan.md b/docs/recipes/scrutinizing-a-plan.md new file mode 100644 index 000000000..c5a555819 --- /dev/null +++ b/docs/recipes/scrutinizing-a-plan.md @@ -0,0 +1,99 @@ +--- +title: Scrutinizing a Plan +--- + +# Scrutinizing a Plan + +When you have a plan and want it attacked, not validated, the +`/ctx-plan` skill runs an adversarial interview. It surfaces what's +weak, missing, or unexamined *before* you commit. + +## When to Use It + +- Before starting a multi-day implementation. +- After writing a spec but before opening the first PR. +- When the team aligned suspiciously fast on a complex change. +- When you've drafted something and the obvious next step is to + hand it to a senior reviewer. + +`/ctx-plan` is not a brainstorming tool. Use +[`/ctx-brainstorm`](design-before-coding.md) for that. `/ctx-plan` +assumes the plan exists and asks: **does it survive contact with +reality?** + +## Stop Conditions + +The skill runs depth-first through assumptions, tradeoffs, and +failure modes. Stop when one of these is true: + +- **Plan converges.** The bet is clear, the sequencing is justified, + and you can articulate what would falsify it. +- **Plan dissolves.** The interview surfaces a load-bearing + assumption you can't defend; the plan needs rework, not more + scrutiny. +- **You hit information you don't have.** Pause, gather it, + resume. + +If the agent loops on a single angle (asking variations of the same +question), say so. The skill is designed to walk depth-first, not +exhaust one branch. + +## Worked Example + +```text +You: /ctx-plan + +I want to migrate our queue from RabbitMQ to NATS. I've drafted a +two-week plan: spike NATS in staging, port one consumer, port the +rest, decommission RabbitMQ. + +Agent: One question at a time. What breaks if your message ordering + guarantees aren't equivalent across the two systems? + +You: NATS gives us per-subject ordering by default; we don't actually + need cross-subject ordering, so I think we're fine. + +Agent: How would you notice if you were wrong? What's the loudest + symptom of a stray out-of-order message in production today? + +You: ...probably a duplicate notification. Which we already see + occasionally. So we wouldn't notice immediately. + +Agent: Then your migration's correctness check needs to be stronger + than "no incidents." Before you port consumer two, what + evidence would convince you that consumer one's ordering + behavior is correct under load? + +You: Right. Add a dedicated assertion test in staging that replays + a known-out-of-order sequence and verifies the consumer + dedupes correctly. Otherwise we're just hoping. + +Agent: Good. Next: what would change your mind about NATS itself? + What rejected alternative is closest to coming back? +``` + +The interview keeps going until the bet is articulated, the +falsifiable conditions are written down, and the rejected +alternatives have a recall trigger. + +## Output + +`/ctx-plan` produces a clearer plan, not a document. Persist the +deltas via: + +- **`/ctx-spec`** if the conclusions belong in a feature spec. +- **`/ctx-decision-add`** if a tradeoff resolved into an + architectural decision. +- **`/ctx-learning-add`** if you discovered a project-specific + gotcha during the interview. + +The skill itself is in +`internal/assets/claude/skills/ctx-plan/SKILL.md`; the working +contract lives there, the recipe is the on-ramp. + +## See Also + +- [Design Before Coding](design-before-coding.md): the + brainstorming counterpart, used *before* a plan exists. +- [`ctx-spec`](../cli/index.md): scaffolds a feature spec from + the project template. diff --git a/docs/recipes/session-archaeology.md b/docs/recipes/session-archaeology.md index d89ce7fc3..ec1137262 100644 --- a/docs/recipes/session-archaeology.md +++ b/docs/recipes/session-archaeology.md @@ -172,7 +172,7 @@ ctx journal import gleaming-wobbling-sutherland ctx journal import --all --all-projects ``` -!!! warning "--keep-frontmatter=false Discards Enrichments" +!!! warning "`--keep-frontmatter=false` Discards Enrichments" `--keep-frontmatter=false` discards enriched YAML frontmatter during regeneration. @@ -190,7 +190,7 @@ To re-import existing files (*e.g., after a format improvement*), use YAML frontmatter you or the **enrichment** skill has added. You'll be prompted before any files are overwritten. -!!! danger "--regenerate Replaces the Markdown Body" +!!! danger "`--regenerate` Replaces the Markdown Body" `--regenerate` preserves YAML frontmatter but **replaces the entire Markdown body** with freshly generated content from the source JSONL. @@ -389,7 +389,7 @@ Claude Code exposes a `cleanupPeriodDays` setting in its configuration | `60`, `90`, etc. | Extends the retention window | | `0` | **Disables writing new transcripts entirely** - not "keep forever" | -!!! warning "Setting `cleanupPeriodDays` to 0" +!!! warning "Setting `cleanupPeriodDays` To 0" Setting this to `0` does **not** mean "never delete." It disables transcript creation altogether. No new JSONL files are written, which means `ctx journal` sees nothing new. This is rarely what you want. diff --git a/docs/recipes/session-ceremonies.md b/docs/recipes/session-ceremonies.md index 80b4c47df..eb5e6207d 100644 --- a/docs/recipes/session-ceremonies.md +++ b/docs/recipes/session-ceremonies.md @@ -71,7 +71,7 @@ braces. | `ctx journal source` | CLI | List recent sessions | | `ctx add` | CLI | Persist learnings, decisions, conventions, tasks | -## Session Start: /ctx-remember +## Session Start: `/ctx-remember` Invoke at the beginning of every session: @@ -96,7 +96,7 @@ The readback should feel like recall, not a file system tour. If the agent says "Let me check if there are files..." instead of a confident summary, the skill is not working correctly. -!!! note "What About '*do you remember?*'" +!!! note "What about '*do you remember?*'" The conversational trigger still works. But `/ctx-remember` guarantees the full ceremony runs: @@ -107,7 +107,7 @@ summary, the skill is not working correctly. The conversational version *may* cut corners. -## Session End: /ctx-wrap-up +## Session End: `/ctx-wrap-up` Invoke before ending a session where meaningful work happened: diff --git a/docs/recipes/session-changes.md b/docs/recipes/session-changes.md index dcaea1a68..4bf574843 100644 --- a/docs/recipes/session-changes.md +++ b/docs/recipes/session-changes.md @@ -7,8 +7,8 @@ title: Reviewing Session Changes --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) ## What Changed While You Were Away? @@ -48,7 +48,7 @@ everything after that. ## What It Reports -### Context file changes +### Context File Changes Any `.md` file in `.context/` modified after the reference time: @@ -58,7 +58,7 @@ Any `.md` file in `.context/` modified after the reference time: - `DECISIONS.md` - modified 2026-03-11 09:15 ``` -### Code changes +### Code Changes Git activity since the reference time: @@ -70,7 +70,7 @@ Git activity since the reference time: - **Authors**: jose, claude ``` -## Integrating Into Session Start +## Integrating into Session Start Pair `ctx change` with the `/ctx-remember` ceremony for a complete session-start picture: diff --git a/docs/recipes/session-lifecycle.md b/docs/recipes/session-lifecycle.md index 9245350fc..cd7bb5e4c 100644 --- a/docs/recipes/session-lifecycle.md +++ b/docs/recipes/session-lifecycle.md @@ -29,7 +29,22 @@ persisting context before you close it, so you can see how each piece connects. Read on for the full walkthrough with examples. -!!! note "What is a Readback?" +!!! note "Before You Start: Activate the Project" + ctx commands (and the skills that call them) require `CTX_DIR` to be + declared for the shell you're working in; `ctx` does not walk the + filesystem to find `.context/`. Once per shell (or via your shell + rc / direnv): + + ```bash + eval "$(ctx activate)" + ``` + + If you skip this, every skill below will surface an error naming + the fix. See + [Activating a Context Directory](activating-context.md) for the + full recipe. + +!!! note "What Is a Readback?" A **readback** is a **structured summary** where the agent plays back what it knows: @@ -437,7 +452,7 @@ Conversational equivalents: you can drive the same lifecycle with plain language | Load | `/ctx-remember` | "Do you remember?" / "What were we working on?" | | Orient | `/ctx-status` | "How's our context looking?" | | Pick | `/ctx-next` | "What should we work on?" / "Let's do the caching task" | -| Work | -- | "Only change files in internal/cache/" | +| Work | *(none)* | "Only change files in internal/cache/" | | Commit | `/ctx-commit` | "Commit this" / "Ship it" | | Reflect | `/ctx-reflect` | "What did we learn?" / *(agent offers at milestones)* | | Wrap up | `/ctx-wrap-up` | *(use the slash command for completeness)* | diff --git a/docs/recipes/steering.md b/docs/recipes/steering.md index 7ab90744c..0c1a1ccbc 100644 --- a/docs/recipes/steering.md +++ b/docs/recipes/steering.md @@ -19,7 +19,7 @@ walks through writing a steering file from scratch, validating which prompts will trigger it, and syncing it out to your configured AI tools. -!!! tip "Before you start" +!!! tip "Before You Start" If you're unsure whether a rule belongs in `steering/`, `DECISIONS.md`, or `CONVENTIONS.md`, read the "Steering vs decisions vs conventions" admonition on the @@ -28,7 +28,7 @@ configured AI tools. when asked about Y," that's steering. Otherwise it's probably a decision or convention. -## Start here — customize the foundation files +## Start Here: Customize the Foundation Files **`ctx init` scaffolds four foundation steering files** for you the first time you initialize a project: @@ -46,8 +46,8 @@ and the `tools` scope. The comment is invisible in rendered markdown but visible when you edit the file. Delete it once the file is yours. -All four default to `inclusion: always` and `priority: 10` -— they fire on **every** AI tool call until you customize +All four default to `inclusion: always` and `priority: 10`, +so they fire on **every** AI tool call until you customize them. If you're reading this recipe and haven't touched them yet, **open each one now and replace the placeholder bullet list with actual rules for your project**. That's @@ -56,14 +56,14 @@ the highest-leverage five minutes you can spend in a new What to fill in, by file: -**`product.md`** — The elevator pitch plus hard scope: +**`product.md`**: The elevator pitch plus hard scope: - One-sentence product description. - Primary users and their top job-to-be-done. - Two or three "this is explicitly out of scope" items so the AI doesn't wander. -**`tech.md`** — Technology and constraints: +**`tech.md`**: Technology and constraints: - Languages and versions (`Go 1.22`, `Node 20`, etc.). - Frameworks and key libraries. @@ -72,13 +72,13 @@ What to fill in, by file: "no external DB for unit tests". These are the things that burn agents when they don't know them. -**`structure.md`** — Layout and naming: +**`structure.md`**: Layout and naming: - Top-level directories and their purpose. - Where new files should go (and where they should NOT). - Naming conventions for packages, files, types. -**`workflow.md`** — Process rules: +**`workflow.md`**: Process rules: - Branch strategy (main-only, trunk-based, feature branches). @@ -88,11 +88,11 @@ What to fill in, by file: After editing, the next AI tool call in Claude Code will pick up the new rules automatically via the plugin's -`PreToolUse` hook — no sync step, no restart. Other tools +`PreToolUse` hook, with no sync step and no restart. Other tools (Cursor, Cline, Kiro) need `ctx steering sync` to export into their native format. -!!! note "Prefer a bare `.context/steering/` directory?" +!!! note "Prefer a Bare `.context/steering/` Directory?" Re-run `ctx init --no-steering-init` and delete the scaffolded files. `ctx init` leaves existing files alone, so the flag is only needed if you want to opt @@ -110,7 +110,7 @@ before touching the database. You want the AI to flag this concern automatically whenever it's asked to write an HTTP handler, without you having to remind it every session. -!!! warning "Claude Code users: pick `always`, not `auto`" +!!! warning "Claude Code Users: Pick `always`, Not `auto`" This walkthrough uses `inclusion: auto` because the scenario is a scoped rule that matches a specific kind of prompt. That works natively on **Cursor, Cline, and @@ -118,7 +118,7 @@ handler, without you having to remind it every session. themselves). On **Claude Code**, `auto` does **not** fire through - the plugin's `PreToolUse` hook — the hook passes an + the plugin's `PreToolUse` hook. The hook passes an empty prompt to `ctx agent`, so only `always` files match. Claude can still reach an `auto` file by calling the `ctx_steering_get` MCP tool, but that @@ -135,7 +135,7 @@ handler, without you having to remind it every session. "Prefer `inclusion: always` for Claude Code" section for the full trade-off. -## Step 1 — scaffold the file +## Step 1: Scaffold the File ```bash ctx steering add api-validation @@ -158,7 +158,7 @@ The defaults are deliberately conservative: `inclusion: manual` means the file won't be applied until you opt in, which keeps the rules out of the prompt until you've reviewed them. -## Step 2 — fill in the rule +## Step 2: Fill in the Rule Open the file and write the rule body plus a focused description. The description is what `inclusion: auto` matches @@ -188,16 +188,16 @@ rather than inline checks. Notes on the choices: -- **`inclusion: auto`** — this rule should fire automatically +- **`inclusion: auto`**: this rule should fire automatically on HTTP-handler-shaped prompts, not always. -- **`priority: 20`** — lower than the default, so this rule +- **`priority: 20`**: lower than the default, so this rule appears near the top of the prompt alongside other high-priority rules. -- **Description** is keyword-rich: "HTTP handler input - validation and request parsing" — the `auto` matcher scores +- **Description** is keyword-rich ("HTTP handler input + validation and request parsing"); the `auto` matcher scores prompts against these words. -## Step 3 — preview which prompts match +## Step 3: Preview Which Prompts Match Before committing the file, validate your description catches the prompts you care about: @@ -213,7 +213,7 @@ Steering files matching prompt "add an endpoint for updating user email": api-validation inclusion=auto priority=20 tools=all ``` -Good — the prompt matches. Try a negative case: +Good, the prompt matches. Try a negative case: ```bash ctx steering preview "fix a bug in the JSON renderer" @@ -224,7 +224,7 @@ If `api-validation` incorrectly fires for unrelated prompts, tighten the description. If it misses prompts it should catch, add more keywords. -## Step 4 — list to confirm metadata +## Step 4: List to Confirm Metadata ```bash ctx steering list @@ -234,13 +234,13 @@ Should show `api-validation` alongside any other files, with its inclusion mode and priority. If the list is wrong, check the frontmatter for typos. -## Step 5 — get the rules in front of the AI +## Step 5: Get the Rules in Front of the AI **Steering files are authored once in `.context/steering/`, but how they reach the AI depends on which tool you use.** There are two delivery mechanisms: -### Path A — native-rules tools (Cursor, Cline, Kiro) +### Path A: Native-Rules Tools (Cursor, Cline, Kiro) These tools read a specific directory for rules. `ctx steering sync` exports your files into that directory with @@ -258,13 +258,13 @@ Depending on the active tool in `.ctxrc` or `--tool`: | Cline | `.clinerules/` | | Kiro | `.kiro/steering/` | -The sync is idempotent — unchanged files are skipped. Run +The sync is idempotent; unchanged files are skipped. Run it whenever you edit a steering file. -### Path B — Claude Code and Codex (hook + MCP) +### Path B: Claude Code and Codex (Hook + MCP) Claude Code and Codex have **no native rules primitive**, -so `ctx steering sync` is a **no-op** for them — it +so `ctx steering sync` is a **no-op** for them; it deliberately skips both. Instead, steering reaches these tools through two non-sync channels: @@ -289,17 +289,17 @@ ctx setup claude-code --write That installs the plugin, wires the hook, and registers the MCP server. After that, steering files you edit are picked -up on the next tool call — no sync step needed. +up on the next tool call, with no sync step needed. !!! tip "Running `ctx steering sync` with Claude Code" - It won't error — it will simply report that Claude and + It won't error; it will simply report that Claude and Codex aren't sync targets and skip them. If Claude Code is your only tool, you never need to run `sync`. If you use both Claude Code **and** (say) Cursor, run `sync` to keep Cursor up to date; the Claude pipeline takes care of itself via the hook. -## Step 6 — verify the AI sees it +## Step 6: Verify the AI Sees It Open your AI tool and ask it something the rule should fire on: @@ -308,22 +308,22 @@ on: If the rule is working, the AI's first response should mention input validation, typed structs, and the -`internal/validate/` package — because that's what the +`internal/validate/` package, because that's what the steering file told it to do. If nothing happens, the fix depends on which path you're on: -**Path A — Cursor/Cline/Kiro**: +**Path A (Cursor/Cline/Kiro)**: 1. Re-run `ctx steering preview` with the literal prompt to confirm the match. 2. Run `ctx steering list` and verify `inclusion` is `auto`, not `manual`. 3. Check the tool's own config directory (e.g. - `.cursor/rules/`) — the file should be there after + `.cursor/rules/`); the file should be there after `ctx steering sync`. -**Path B — Claude Code**: +**Path B (Claude Code)**: 1. Re-run `ctx steering preview` with the literal prompt to confirm the match. @@ -338,9 +338,9 @@ If nothing happens, the fix depends on which path you're on: `ctx_steering_get` MCP tool with my prompt and show me the result." If the MCP tool returns your rule, Claude has access but isn't pulling it into the initial - context packet — tighten the description keywords. + context packet; tighten the description keywords. -## Common mistakes +## Common Mistakes **Too-generic descriptions.** `description: general coding` will match almost every prompt and flood the context window. @@ -362,11 +362,11 @@ permanently. Only use `always` for true invariants (security, safety, licensing). Everything else should be `auto` or `manual`. -## See also +## See Also -- [`ctx steering` reference](../cli/steering.md) — full +- [`ctx steering` reference](../cli/steering.md): full command, flag, and frontmatter reference. -- [`ctx setup`](../cli/setup.md) — configure which tools the +- [`ctx setup`](../cli/setup.md): configure which tools the steering sync writes to. -- [Authoring triggers](triggers.md) — if you want +- [Authoring triggers](triggers.md): if you want script-based automation, not rule-based prompt injection. diff --git a/docs/recipes/task-management.md b/docs/recipes/task-management.md index 5dc23a7fb..40c9f854c 100644 --- a/docs/recipes/task-management.md +++ b/docs/recipes/task-management.md @@ -16,10 +16,10 @@ grows cluttered with completed checkboxes that obscure the remaining work. How do you manage work items that span multiple sessions without losing context? -!!! tip "Prefer skills over raw commands" +!!! tip "Prefer Skills over Raw Commands" When working with an AI agent, use `/ctx-task-add` instead of raw `ctx add task`. The agent automatically picks up session ID, branch, - and commit hash from its context — no manual flags needed. + and commit hash from its context, so no manual flags are needed. ## TL;DR diff --git a/docs/recipes/triggers.md b/docs/recipes/triggers.md index 66587a427..7ef4179e9 100644 --- a/docs/recipes/triggers.md +++ b/docs/recipes/triggers.md @@ -19,7 +19,7 @@ specific events during an AI session. They're how you express this path, check Y first." This recipe walks through writing your first trigger, testing it, and enabling it safely. -!!! danger "Triggers execute arbitrary code" +!!! danger "Triggers Execute Arbitrary Code" A trigger is a shell script with the executable bit set. It runs with the same privileges as your AI tool and receives JSON input on stdin. **Treat triggers like @@ -42,9 +42,9 @@ your first trigger, testing it, and enabling it safely. You want a `pre-tool-use` trigger that blocks the AI from editing anything in `internal/crypto/` without explicit confirmation. Cryptographic code is sensitive, and accidental -edits have caused outages before — you want a hard gate. +edits have caused outages before, and you want a hard gate. -## Step 1 — scaffold the script +## Step 1: Scaffold the Script ```bash ctx trigger add pre-tool-use protect-crypto @@ -70,12 +70,12 @@ path=$(echo "$payload" | jq -r '.path // empty') echo '{"action": "allow"}' ``` -Note: the directory is `.context/hooks/pre-tool-use/` — the +Note: the directory is `.context/hooks/pre-tool-use/`; the on-disk layout still uses `hooks/` even though the command is `ctx trigger`. If you `ls .context/hooks/`, that's where your triggers live. -## Step 2 — write the logic +## Step 2: Write the Logic Open the file and replace the template body: @@ -113,17 +113,17 @@ echo '{"action": "allow"}' A few things to note: -- **`set -euo pipefail`** — any unhandled error aborts the +- **`set -euo pipefail`**: any unhandled error aborts the script. Critical for a security-relevant trigger. -- **Quote everything from `jq`** — the `path` field comes from +- **Quote everything from `jq`**: the `path` field comes from the AI tool; treat it as untrusted input. -- **Explicit `allow` case** — the default is allow. An +- **Explicit `allow` case**: the default is allow. An empty or missing response is a risky default. -- **Use `jq -n --arg`** for output construction — safer than +- **Use `jq -n --arg`** for output construction, as it is safer than string concatenation when the message may contain special characters. -## Step 3 — test with a mock payload +## Step 3: Test with a Mock Payload Before enabling the trigger, test it with a realistic mock input using `ctx trigger test`. This runs the script against @@ -155,7 +155,7 @@ If any of these cases misbehave, **fix the trigger before enabling it.** The trigger is disabled at this point, so misbehavior doesn't affect real AI sessions. -## Step 4 — enable it +## Step 4: Enable It Once the test cases pass, enable the trigger: @@ -175,7 +175,7 @@ ctx trigger list Should show `protect-crypto` under `pre-tool-use` with an enabled indicator. -## Step 5 — iterate safely +## Step 5: Iterate Safely If you discover a bug after enabling, **disable first, fix second**: @@ -187,13 +187,13 @@ ctx trigger test pre-tool-use --tool write_file --path internal/crypto/aes.go ctx trigger enable protect-crypto ``` -Disabling simply clears the executable bit — the script stays +Disabling simply clears the executable bit; the script stays on disk, and `ctx trigger enable` re-enables it without rewriting anything. -## Patterns worth copying +## Patterns Worth Copying -### Logging, not blocking +### Logging, Not Blocking For auditing or analytics, return `{"action":"allow"}` always and append to a log as a side effect: @@ -206,20 +206,20 @@ echo "$payload" >> .context/logs/tool-use.jsonl echo '{"action":"allow"}' ``` -### Context injection at session start +### Context Injection at Session Start A `session-start` trigger can prepend text to the agent's initial prompt by emitting `{"action":"inject", "content": "..."}` -— useful for injecting daily standup notes, open PRs, or +. This is useful for injecting daily standup notes, open PRs, or rotating TODOs without storing them in a steering file. -### Chaining triggers of the same type +### Chaining Triggers of the Same Type Multiple scripts in the same type directory all run. If any returns `action: block`, the block wins. Keep individual triggers single-purpose and rely on composition. -## Common mistakes +## Common Mistakes **Forgetting the shebang.** Without `#!/usr/bin/env bash`, the trigger won't execute even with the executable bit set. @@ -240,15 +240,15 @@ strings. **Mixing `hook` and `trigger` vocabulary.** The command is `ctx trigger` but the on-disk directory is `.context/hooks/`. The feature was renamed; the directory name lags behind. -Don't let this confuse you — they refer to the same thing. +Don't let this confuse you; they refer to the same thing. -## See also +## See Also -- [`ctx trigger` reference](../cli/trigger.md) — full +- [`ctx trigger` reference](../cli/trigger.md): full command, flag, and event-type reference. -- [`ctx steering`](../cli/steering.md) — persistent rules, +- [`ctx steering`](../cli/steering.md): persistent rules, not scripts. Use steering when the thing you want is "tell the AI to always do X" rather than "run a script when Y happens." -- [Writing steering files](steering.md) — the rule-based +- [Writing steering files](steering.md): the rule-based equivalent of this recipe. diff --git a/docs/recipes/troubleshooting.md b/docs/recipes/troubleshooting.md index c8d267caf..c6d86650b 100644 --- a/docs/recipes/troubleshooting.md +++ b/docs/recipes/troubleshooting.md @@ -137,14 +137,34 @@ QA reminder events from that specific session. ## Common Problems -### "ctx: not initialized" +### "No context directory specified for this project" **Symptoms**: Any `ctx` command fails with +`Error: no context directory specified for this project` (*possibly +with a likely-candidate hint or a candidate list depending on what's +visible from your CWD*). + +**Cause**: `ctx` does not walk the filesystem. It requires the target +`.context/` directory to be declared explicitly before any non-exempt +command runs. + +**Fix**: bind `CTX_DIR` for the current shell: + +```bash +eval "$(ctx activate)" +``` + +See [Activating a Context Directory](activating-context.md) for the +full recipe (one-shot `CTX_DIR=...` inline form, CI patterns, direnv +setup). + +### "ctx: Not Initialized" + +**Symptoms**: After declaring `CTX_DIR`, the command fails with `ctx: not initialized - run "ctx init" first`. -**Cause**: You're running ctx in a directory without an initialized -`.context/` directory. This guard runs on all user-facing commands to -prevent confusing downstream errors. +**Cause**: The declared directory exists but hasn't been initialized +with template files. **Fix**: @@ -153,10 +173,33 @@ ctx init # create .context/ with template files ctx init --minimal # or just the essentials (CONSTITUTION, TASKS, DECISIONS) ``` -**Commands that work without initialization**: `ctx init`, `ctx setup`, -`ctx doctor`, and help-only grouping commands (`ctx`, `ctx system`). +**Commands that work without CTX_DIR or initialization**: `ctx init`, +`ctx activate`, `ctx deactivate`, `ctx setup`, `ctx doctor`, +`ctx guide`, `ctx why`, `ctx config switch/status`, `ctx hub *`, and +help-only grouping commands. + +### "My CLI and My Claude Code Session Disagree on the Project" + +**Symptoms**: A `!`-pragma or interactive `ctx` call writes to the +wrong `.context/`; or you ran `ctx remind add` in shell A and the +reminder shows up in project B's notifications. + +**Cause**: `CTX_DIR` is sourced from three different surfaces, and +they can drift apart: + +| Surface | Source of `CTX_DIR` | Bound when | +|------------------------------------|---------------------------------------------|-----------------------------------------| +| Claude Code hooks | `${CLAUDE_PROJECT_DIR}/.context` (injected) | Every hook line; the project Claude is in | +| `!`-pragma in chat / interactive shell | Whatever the parent shell exported | When you ran `eval "$(ctx activate)"` | +| New shell tab opened mid-session | Whatever your shellrc exports | Login | + +When these drift, the per-prompt `check-anchor-drift` hook fires a +verbatim warning naming both values. To fix: re-run +`eval "$(ctx activate)"` from inside the project the Claude Code +session is editing, or close the shell tab and reopen it from the +right working directory. -### "My hook isn't firing" +### "My Hook Isn't Firing" **Symptoms**: No nudges appearing, webhook silent, event log shows no entries for the expected hook. @@ -186,7 +229,7 @@ ctx hook event --hook check-persistence * **Hook silenced**: a custom message override may be an empty file: check `ctx hook message list` for overrides -### "*Too many nudges*" +### "*Too Many Nudges*" **Symptoms**: The agent is overwhelmed with hook output. Context checkpoints, persistence reminders, and QA gates fire constantly. @@ -217,7 +260,7 @@ ctx hook event --json | jq -r '.detail.hook // "unknown"' \ * **`ctx` version mismatch**: Build (*or download*) and install the latest `ctx` vesion. -### "*Context seems stale*" +### "*Context Seems Stale*" **Symptoms**: The agent references outdated information, paths that don't exist, or decisions that were reversed. @@ -247,7 +290,7 @@ ctx status --verbose being used, context doesn't get refreshed. See [Session Ceremonies](session-ceremonies.md). -### "*The agent isn't following instructions*" +### "*The Agent Isn't Following Instructions*" **Symptoms**: The agent ignores conventions, forgets decisions, or acts contrary to `CONSTITUTION.md` rules. diff --git a/docs/recipes/when-to-use-agent-teams.md b/docs/recipes/when-to-use-agent-teams.md index 261c67748..f12297fef 100644 --- a/docs/recipes/when-to-use-agent-teams.md +++ b/docs/recipes/when-to-use-agent-teams.md @@ -170,7 +170,7 @@ before anyone does any work. For small tasks, that overhead dominates. Four practical team compositions for common workflows. -### Feature Development (3 agents) +### Feature Development (3 Agents) | Role | Responsibility | |-------------|-----------------------------------------------------------| @@ -184,7 +184,7 @@ implementer starts. Reviewer runs after each implementer commit. **Anti-pattern**: All three agents editing the same file simultaneously. Sequence the work so only one agent touches a file at a time. -### Consolidation Sprint (3-4 agents) +### Consolidation Sprint (3-4 Agents) | Role | Responsibility | |------------|----------------------------------------------------------| @@ -199,7 +199,7 @@ Each agent claims a subset of issues by adding `#in-progress` labels. **Anti-pattern**: Fixer and doc writer both editing ARCHITECTURE.md. Assign file ownership explicitly. -### Release Prep (2 agents) +### Release Prep (2 Agents) | Role | Responsibility | |---------------|--------------------------------------------------------| @@ -212,7 +212,7 @@ notes agent works from `git log`; validation agent works from `make audit`. **Anti-pattern**: Release notes agent running tests "to verify." Each agent stays in its lane. -### Documentation Sprint (3 agents) +### Documentation Sprint (3 Agents) | Role | Responsibility | |---------------|------------------------------------------------------------| diff --git a/docs/reference/audit-conventions.md b/docs/reference/audit-conventions.md index 4ab3a42f3..a398ff9d6 100644 --- a/docs/reference/audit-conventions.md +++ b/docs/reference/audit-conventions.md @@ -8,8 +8,8 @@ title: Code Conventions icon: lucide/scroll-text --- -![ctx](../images/ctx-banner.png) +![ctx](../images/ctx-banner.png) # Code Conventions: Common Patterns and Fixes @@ -22,7 +22,7 @@ code under `internal/`. --- -## Variable Shadowing (bare `err :=` reuse) +## Variable Shadowing (Bare `err :=` Reuse) **Test:** `TestNoVariableShadowing` @@ -137,7 +137,7 @@ func loadContext() { } ``` -**Before (format verbs — also caught):** +**Before (format verbs, also caught):** ```go func EntryHash(text string) string { @@ -155,7 +155,7 @@ func EntryHash(text string) string { } ``` -**Before (URL schemes — also caught):** +**Before (URL schemes, also caught):** ```go if strings.HasPrefix(target, "https://") || @@ -354,7 +354,7 @@ Each `.dead` file includes a header: **Rule:** If a test-only allowlist entry is needed (the export exists only for test use), add the fully qualified symbol to `testOnlyExports` -in `dead_exports_test.go`. Keep this list small — prefer eliminating +in `dead_exports_test.go`. Keep this list small; prefer eliminating the export. --- @@ -371,7 +371,7 @@ This prevents `core/` from becoming a god package. ``` internal/cli/dep/core/ - go.go # violation — logic at core/ level + go.go # violation: logic at core/ level python.go # violation node.go # violation types.go # violation @@ -659,7 +659,7 @@ if len(entries) > config.MaxEntries { } ``` -**Exempt:** `0`, `1`, `-1`, `2`–`10`, strconv radix/bitsize args +**Exempt:** `0`, `1`, `-1`, `2`-`10`, strconv radix/bitsize args (`10`, `32`, `64` in `strconv.Parse*`/`Format*`), octal permissions (caught separately by `TestNoRawPermissions`), and `const`/`var` definition sites. @@ -715,7 +715,7 @@ func Journal(cmd *cobra.Command, ...) { --- -## Predicate Naming (no `Is`/`Has`/`Can` prefix) +## Predicate Naming (No `Is`/`Has`/`Can` Prefix) **Test:** None (manual review convention) @@ -744,7 +744,7 @@ reads more naturally (`isValid` in a local context is fine). This convention applies to exported methods and package-level functions. See CONVENTIONS.md "Predicates" section. -This is not yet enforced by an AST test — it requires semantic +This is not yet enforced by an AST test; it requires semantic understanding of return types and naming intent that makes automated detection fragile. Apply during code review. @@ -762,7 +762,7 @@ functions. Public API and private helpers live in separate files. ``` load.go func Load() { ... } // exported - func parseHeader() { ... } // unexported — violation + func parseHeader() { ... } // unexported, violation ``` **After:** @@ -778,7 +778,7 @@ parse.go --- -## Stray err.go Files +## Stray Err.Go Files **Test:** `TestNoStrayErrFiles` @@ -806,7 +806,7 @@ Each `cmd/$sub/` directory under `internal/cli/` may contain only internal/cli/doctor/cmd/root/ cmd.go run.go - format.go # violation — helper in cmd dir + format.go # violation: helper in cmd dir ``` **After:** @@ -831,10 +831,10 @@ internal/cli/doctor/core/format/ Three tests enforce DescKey/Use constant discipline: 1. `Use*` constants appear only in cobra `Use:` struct field - assignments — never as arguments to `desc.Text()` or elsewhere. + assignments, never as arguments to `desc.Text()` or elsewhere. 2. `DescKey*` constants are passed only to `assets.CommandDesc()`, - `assets.FlagDesc()`, or `desc.Text()` — never to cobra `Use:`. -3. No cross-namespace lookups — `TextDescKey` must not be passed to + `assets.FlagDesc()`, or `desc.Text()`, never to cobra `Use:`. +3. No cross-namespace lookups: `TextDescKey` must not be passed to `CommandDesc()`, `FlagDescKey` must not be passed to `Text()`, etc. --- @@ -853,7 +853,7 @@ constant in `config/entry/`. ## Other Enforced Patterns -These tests follow the same fix approach — extract the operation to +These tests follow the same fix approach: extract the operation to its designated package: | Test | Violation | Fix | diff --git a/docs/reference/comparison.md b/docs/reference/comparison.md index 6837b80bd..f6f529720 100644 --- a/docs/reference/comparison.md +++ b/docs/reference/comparison.md @@ -206,22 +206,22 @@ These tools are designed for: `ctx` is intentionally: -* **local-first** — context lives next to your code, not +* **local-first**: context lives next to your code, not behind a service boundary. -* **file-based** — everything important is a markdown +* **file-based**: everything important is a markdown file you can read, diff, grep, and version-control. -* **single-binary core** — the context persistence path +* **single-binary core**: the context persistence path (`init`, `add`, `agent`, `status`, `drift`, `load`, `sync`, `compact`, `task`, `decision`, `learning`, and their siblings) is a single Go binary with no required - runtime dependencies. Optional integrations — `ctx + runtime dependencies. Optional integrations (`ctx trace` (needs `git`), `ctx serve` (needs `zensical`), the `ctx` Hub (needs a running hub), Claude Code - plugin (needs `claude`) — are opt-in and each declares + plugin (needs `claude`)) are opt-in and each declares its dependency explicitly. -* **CLI-driven** — every feature is reachable from the +* **CLI-driven**: every feature is reachable from the command line and scriptable. -* **developer-controlled** — no auto-updating cloud +* **developer-controlled**: no auto-updating cloud service, no telemetry, no account to sign up for. The core `ctx` binary does not require: @@ -259,7 +259,7 @@ The two are complementary. `ctx` can absorb auto-memory as an input source (importing what the model remembered into structured context files) while providing the durable, inspectable layer that auto-memory lacks. -### .cursorrules / .claude/rules +### .Cursorrules / .Claude/rules Static rule files (`.cursorrules`, `.claude/rules/`) declare conventions: coding style, forbidden patterns, preferred libraries. They are effective @@ -287,7 +287,7 @@ ordering across file types, and no structured format for decisions or learnings. `ctx` provides the full lifecycle: load, accumulate, persist, and budget. -### Copilot @workspace +### Copilot @Workspace GitHub Copilot's `@workspace` performs workspace-wide code search. It answers **"what code exists?"** - finding function definitions, usages, diff --git a/docs/reference/design-invariants.md b/docs/reference/design-invariants.md index c8a2d4cd4..9c23691aa 100644 --- a/docs/reference/design-invariants.md +++ b/docs/reference/design-invariants.md @@ -162,7 +162,7 @@ permanent assets. --- -## 11. Policies Are Encoded, not Remembered +## 11. Policies Are Encoded, Not Remembered Alignment **must not** depend on recall or goodwill. diff --git a/docs/reference/scratchpad.md b/docs/reference/scratchpad.md index 0d6cc0d11..e2cb8202e 100644 --- a/docs/reference/scratchpad.md +++ b/docs/reference/scratchpad.md @@ -93,7 +93,7 @@ ctx pad edit 1 --append "$(ctx pad show 2)" # Reorder ctx pad mv 2 1 -# Clean up (IDs are stable — they don't shift when entries are deleted) +# Clean up (IDs are stable; they don't shift when entries are deleted) ctx pad rm 2 ``` @@ -101,7 +101,7 @@ ctx pad rm 2 Entries can contain `#word` tags for lightweight categorization. Tags are convention-based: any `#word` token in an entry's text is a tag. No special -syntax to add or remove them — use the existing `add` and `edit` commands. +syntax to add or remove them; use the existing `add` and `edit` commands. ```bash # Add tagged entries @@ -142,7 +142,7 @@ ctx pad edit 1 --append "checked" --tag done ctx pad edit 1 "check DNS propagation" ``` -Entry IDs are stable — they don't shift when other entries are deleted, so +Entry IDs are stable; they don't shift when other entries are deleted, so `ctx pad rm 3` always targets the same entry. Use `ctx pad normalize` to reassign IDs as 1..N if gaps bother you. Tags are case-sensitive and support letters, digits, hyphens, and underscores (`#high-priority`, `#v2`, `#my_tag`). diff --git a/docs/reference/session-journal.md b/docs/reference/session-journal.md index 2fb347517..0d95e8bf1 100644 --- a/docs/reference/session-journal.md +++ b/docs/reference/session-journal.md @@ -375,7 +375,7 @@ To use: open the output directory in Obsidian ("Open folder as vault"). ctx journal obsidian --output ~/vaults/ctx-journal ``` -!!! tip "Static site vs Obsidian Vault" +!!! tip "Static Site vs Obsidian Vault" Use `ctx journal site` when you want a **web-browsable** archive with search and dark mode. Use `ctx journal obsidian` when you want **graph view**, **backlinks**, and **tag-based navigation** inside Obsidian. Both use the @@ -394,10 +394,10 @@ import → enrich → rebuild |--------------|----------------------------|-----------------------------------------|------------------------------------| | **Import** | `ctx journal import --all` | Converts session JSONL to Markdown | File already exists (safe default) | | **Enrich** | `/ctx-journal-enrich` | Adds frontmatter, summaries, topics | Frontmatter already present | -| **Rebuild** | `ctx journal site --build` | Generates static HTML site | -- | -| **Obsidian** | `ctx journal obsidian` | Generates Obsidian vault with wikilinks | -- | +| **Rebuild** | `ctx journal site --build` | Generates static HTML site | *(never)* | +| **Obsidian** | `ctx journal obsidian` | Generates Obsidian vault with wikilinks | *(never)* | -!!! tip "One-command pipeline" +!!! tip "One-Command Pipeline" `/ctx-journal-enrich-all` handles import automatically - it detects unimported sessions and imports them before enriching. You only need to run `ctx journal site --build` afterward. diff --git a/docs/security/agent-security.md b/docs/security/agent-security.md index 69d013706..4fb86b1f4 100644 --- a/docs/security/agent-security.md +++ b/docs/security/agent-security.md @@ -78,7 +78,7 @@ rules stated early. Edge cases where instructions are ambiguous. **Verdict**: Necessary but not sufficient. Good for the common case. **Do not** rely on it for security boundaries. -### Layer 2: Application Controls (*Deterministic at Runtime, Mutable Across Iterations*) +### Layer 2: Application Controls (*Deterministic at Runtime, Mutable across Iterations*) AI tool runtimes (*Claude Code, Cursor, etc.*) provide permission systems: tool allowlists, command restrictions, confirmation prompts. @@ -204,7 +204,7 @@ docker run --rm \ ./loop.sh ``` -!!! danger "Docker Socket is sudo Access" +!!! danger "Docker Socket Is Sudo Access" Critical: **never mount the Docker socket** (`/var/run/docker.sock`). An agent with socket access can spawn sibling containers with full host diff --git a/docs/security/design.md b/docs/security/design.md new file mode 100644 index 000000000..e4f56fa34 --- /dev/null +++ b/docs/security/design.md @@ -0,0 +1,183 @@ +--- +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 + +title: Security Design +icon: lucide/shield-half +--- + +![ctx](../images/ctx-banner.png) + +How `ctx` thinks about security: trust boundaries, what the system +does and does not do for you, the engineering principle behind the +audit trail, and the permission hygiene workflow. + +For vulnerability disclosure, see +[Reporting Vulnerabilities](reporting.md). + +## Trust Model + +`ctx` operates within a single trust boundary: **the local +filesystem**. + +The person who authors `.context/` files is the same person who runs +the agent that reads them. There is no remote input, no shared state, +and no server component. + +This means: + +* **`ctx` does not sanitize context files for prompt injection.** This + is a deliberate design choice, not an oversight. The files are + authored by the developer who owns the machine: sanitizing their + own instructions back to them would be counterproductive. +* **If you place adversarial instructions in your own `.context/` + files, your agent will follow them.** This is expected behavior. + You control the context; the agent trusts it. + +!!! warning "Shared Repositories" + In shared repositories, `.context/` files should be reviewed in + code review (*the same way you would review CI/CD config or + Makefiles*). A malicious contributor could add harmful + instructions to `CONSTITUTION.md` or `TASKS.md`. + +## What `ctx` Does for Security + +`ctx` is designed with security in mind: + +* **No secrets in context**: The constitution explicitly forbids + storing secrets, tokens, API keys, or credentials in `.context/` + files. +* **Local only**: `ctx` runs entirely locally with no external + network calls. +* **No code execution**: `ctx` reads and writes Markdown files only; + it does not execute arbitrary code. +* **Git-tracked**: Core context files are meant to be committed, so + they should never contain sensitive data. Exception: `sessions/` + and `journal/` contain raw conversation data and should be + gitignored. + +## Permission Hygiene + +Claude Code evaluates permissions in deny → ask → allow order. +`ctx init` automatically populates `permissions.deny` with rules +that block dangerous operations before the allow list is ever +consulted. + +**Default deny rules block:** + +* `sudo`, `git push`, `rm -rf /`, `rm -rf ~`, `curl`, `wget`, + `chmod 777` +* `Read` / `Edit` of `.env`, credentials, secrets, `.pem`, `.key` + files + +Even with deny rules in place, the allow list accumulates one-off +permissions over time. Periodically review for: + +* **Destructive commands**: `git reset --hard`, `git clean -f`, etc. +* **Config injection vectors**: permissions that allow modifying + files controlling agent behavior (`CLAUDE.md`, + `settings.local.json`). +* **Broad wildcards**: overly permissive patterns that pre-approve + more than intended. + +For the full hygiene workflow, see the +[Claude Code Permission Hygiene](../recipes/claude-code-permissions.md) +recipe. + +## State File Management + +Hook state files (throttle markers, prompt counters, pause markers) +are stored in `.context/state/`, which is project-scoped and +gitignored. State files are automatically managed by the hooks that +create them; no manual cleanup is needed. + +## Log-First Audit Trail + +The event log (`.context/state/events.jsonl`) is the authoritative +record of what `ctx` hooks did during a session. Several +audit-adjacent features depend on that log being trustworthy, not +merely best-effort: + +* `ctx event` / `ctx system view-events` replays session history + from the log. +* Webhook notifications give operators a real-time signal that + assumes every notification corresponds to a logged event. +* Drift, freshness, and map-staleness checks count events over + time and surface regressions. + +A log that silently drops entries while the rest of the system +claims success is worse than no log at all: operators see a green +TUI and a webhook notification and conclude "it happened," even +when the audit trail never landed. The codebase treats this as a +correctness problem, not a UX polish problem. + +### The Rule + +> Any code path that emits an observable side effect (webhook, +> stdout marker, throttle-file touch, state mutation) must append +> the corresponding event-log entry **first** and gate the side +> effect on the append succeeding. If the log write fails, the +> side effect must not fire. + +In code, this shape: + +```go +if appendErr := event.Append(channel, msg, sessionID, ref); appendErr != nil { + return appendErr // do NOT send the webhook or touch the marker +} +if sendErr := notify.Send(channel, msg, sessionID, ref); sendErr != nil { + return sendErr +} +// downstream side effects (marker touch, stdout, etc.) +``` + +The `nudge.Relay` helper in `internal/cli/system/core/nudge` +enforces this for the common "log + webhook" pair. Hook `Run` +functions that compose their own sequence (`session_event`, +`heartbeat`, several `check_*` hooks) follow the same ordering +explicitly. + +### Known Gaps + +* **Nudge webhooks have no log channel.** `nudge.EmitAndRelay` + sends a "nudge" notification before the "relay" event is logged. + The nudge leg is fire-and-forget because no event-log channel + records nudges today. A future refactor may add one; until then + this is the one documented exception. +* **`ctx agent --cooldown` and `ctx doctor` propagate rather than + gate.** They surface real errors to the caller (usually Cobra) + rather than deciding what to do with them locally. Editors that + invoke these commands may display errors in an ugly way; the + ugliness is the correct signal (something persisted is broken), + not a defect to smooth over. +* **Verbose hook logs in `core/log.Message` stay best-effort.** + That logger captures per-hook activity (how many prompts, which + percent, etc.) for debugging; it is NOT the event audit trail. + Its failures go to stderr via `log/warn.Warn` rather than + propagating, because losing an operational log line is not a + correctness problem. + +### Background + +The `error` returns on `event.Append`, `io.AppendBytes`, +`nudge.Relay`, and `cooldown.Active` / `cooldown.TouchTombstone` +were introduced as part of the resolver-tightening refactor. +Before that change, most hook paths called these helpers and +silently discarded their errors. The principle above was extracted +from the observation that every user-visible correctness problem +hit during the refactor traced back to some function saying "this +succeeded" when the underlying write never landed. + +## Best Practices + +1. **Review before committing**: Always review `.context/` files + before committing. +2. **Use `.gitignore`**: If you must store sensitive notes locally, + add them to `.gitignore`. +3. **Drift detection**: Run `ctx drift` to check for potential + issues. +4. **Permission audit**: Review `.claude/settings.local.json` after + busy sessions. diff --git a/docs/security/hub.md b/docs/security/hub.md index 15e0bf6cb..21ab8f0b7 100644 --- a/docs/security/hub.md +++ b/docs/security/hub.md @@ -11,12 +11,12 @@ icon: lucide/shield ![ctx](../images/ctx-banner.png) -# `ctx` Hub: Security model +# `ctx` Hub: Security Model What the hub defends against, what it **does not** defend against, and the concrete mechanisms in play. -## Threat model +## Threat Model The hub is designed for **trusted cross-project knowledge sharing** within a team or homelab. It assumes: @@ -27,7 +27,7 @@ within a team or homelab. It assumes: **strongly recommended** but not mandatory. - Client machines are trusted enough to hold a per-project client token. Losing a client token is roughly equivalent to losing an - API key — scoped damage, not total compromise. + API key: scoped damage, not total compromise. - Entry content is **not** secret. Decisions, learnings, and conventions may be indexed by AI agents, rendered in docs, shared across projects. Do not push credentials or PII into @@ -39,7 +39,7 @@ use a dedicated tool and keep the hub for knowledge sharing. ## Mechanisms -### Bearer tokens +### Bearer Tokens All RPCs except `Register` require a bearer token in gRPC metadata. Two kinds of tokens exist: @@ -54,7 +54,7 @@ prevent timing oracles, and looked up via an `O(1)` hash map so the comparison cost does not depend on the total number of registered clients. -### Client-side encryption at rest +### Client-Side Encryption at Rest `.context/.connect.enc` stores the client token and hub address, encrypted with **AES-256-GCM** using the same scheme the @@ -65,9 +65,9 @@ An attacker with read access to the project directory cannot learn the client token without also breaking ctx's local keyring. -### Hub-side token storage +### Hub-Side Token Storage -!!! warning "Tokens are stored in plaintext on the hub host" +!!! warning "Tokens Are Stored in Plaintext on the Hub Host" `/clients.json` currently stores client tokens **verbatim**, not hashed. Anyone with read access to the hub's data directory sees every registered client's token @@ -84,7 +84,7 @@ keyring. - Never expose `` over NFS, SMB, or shared filesystems. - Treat `` the same way you'd treat - `/etc/shadow` — back it up encrypted, never check it + `/etc/shadow`: back it up encrypted, never check it into version control. Hashing `clients.json` and moving to keyring-backed storage @@ -92,7 +92,7 @@ keyring. that lands, assume a hub host compromise equals total hub compromise. -### Input validation +### Input Validation Every published entry is validated before it touches the log: @@ -101,18 +101,18 @@ Every published entry is validated before it touches the log: - **ID** and **Origin** are required and non-empty. - **Content size** is capped at **1 MB**. Reasonable for text, hostile for attempts to fill the disk. -- **Duplicate project registration** is rejected — a client that +- **Duplicate project registration** is rejected; a client that replays an old `Register` call gets an error, not a second token. -### No script execution +### No Script Execution The hub never interprets entry content. There is no expression language, no template evaluation, no markdown rendering at ingest. Content is stored as bytes and fanned out to clients verbatim. -### Audit trail +### Audit Trail `entries.jsonl` is append-only. Every accepted publish is recorded with the publishing project's origin tag and sequence @@ -120,7 +120,7 @@ number. Nothing is ever deleted by the hub; retention is managed manually by the operator (see [log rotation](../operations/hub.md#log-rotation)). -## What the hub does **not** defend against +## What the Hub Does **Not** Defend Against - **Untrusted entry senders.** A client with a valid token can publish anything (within the 1 MB cap). There is no content @@ -137,7 +137,7 @@ manually by the operator (see decision containing an API key. Sanitize content before publishing. -## Operational hardening checklist +## Operational Hardening Checklist - [ ] Run the hub as an **unprivileged user** with `NoNewPrivileges=true` and `ProtectSystem=strict` (see @@ -155,12 +155,12 @@ manually by the operator (see - [ ] Run NTP on all clients to prevent entry-timestamp skew. - [ ] Do not publish from machines you do not trust. -## Responsible disclosure +## Responsible Disclosure Security issues in the hub follow the same process as the rest -of ctx — see [Reporting](reporting.md). +of ctx; see [Reporting](reporting.md). -## See also +## See Also - [`ctx` Hub Operations](../operations/hub.md) - [`ctx` Hub failure modes](../operations/hub-failure-modes.md) diff --git a/docs/security/index.md b/docs/security/index.md index 512cbc751..668c243e4 100644 --- a/docs/security/index.md +++ b/docs/security/index.md @@ -9,6 +9,14 @@ Security model, **agent hardening**, and **vulnerability reporting**. --- +### [Security Design](design.md) + +**Trust model**, what `ctx` does for security, **permission +hygiene**, state file management, and the **log-first audit trail** +principle. Read first to understand the security boundaries. + +--- + ### [Securing AI Agents](agent-security.md) **Defense in depth** for unattended AI agents: five layers of @@ -16,7 +24,8 @@ protection, each with a known bypass, strength in combination. --- -### [Security Policy](reporting.md) +### [Reporting Vulnerabilities](reporting.md) -**Trust model**, vulnerability reporting, permission hygiene, -and **security design principles**. +How to report a security issue: email, **GitHub private reporting**, +PGP-encrypted submissions, what to include, and the response +timeline. diff --git a/docs/security/reporting.md b/docs/security/reporting.md index 04cc63645..b7c4daa65 100644 --- a/docs/security/reporting.md +++ b/docs/security/reporting.md @@ -5,17 +5,22 @@ # \ Copyright 2026-present Context contributors. # SPDX-License-Identifier: Apache-2.0 -title: Security Policy +title: Reporting Vulnerabilities icon: lucide/shield --- ![ctx](../images/ctx-banner.png) +Disclosure process for security issues in `ctx`. For the broader +security model (trust boundaries, audit trail, permission hygiene), +see [Security Design](design.md). + ## Reporting Vulnerabilities At `ctx` we take security very seriously. -If you discover a security vulnerability in `ctx`, please report it responsibly. +If you discover a security vulnerability in `ctx`, please report it +responsibly. **Do NOT open a public issue for security vulnerabilities.** @@ -32,8 +37,8 @@ Send details to **security@ctx.ist**. ### Encrypted Reports (*Optional*) If your report contains sensitive details (*proof-of-concept exploits, -credentials, or internal system information*), you can encrypt your message -with our PGP key: +credentials, or internal system information*), you can encrypt your +message with our PGP key: * **In-repo**: [`SECURITY_KEY.asc`](https://github.com/ActiveMemory/ctx/blob/main/SECURITY_KEY.asc) * **Keybase**: [keybase.io/alekhinejose](https://keybase.io/alekhinejose/pgp_keys.asc) @@ -46,8 +51,8 @@ gpg --import SECURITY_KEY.asc gpg --armor --encrypt --recipient security@ctx.ist report.txt ``` -Encryption is optional. Unencrypted reports to **security@ctx.ist** or via -GitHub Private Reporting are perfectly fine. +Encryption is optional. Unencrypted reports to **security@ctx.ist** or +via GitHub Private Reporting are perfectly fine. ### What to Include @@ -58,21 +63,20 @@ GitHub Private Reporting are perfectly fine. ## Attribution -We appreciate responsible disclosure and will acknowledge security researchers -who report valid vulnerabilities (*unless they prefer to remain anonymous*). +We appreciate responsible disclosure and will acknowledge security +researchers who report valid vulnerabilities (*unless they prefer to +remain anonymous*). ----- +## Response Timeline -### Response Timeline +!!! note "Open Source, Best-Effort Timelines" + `ctx` is a volunteer-maintained open source project. -!!! note "Open source, Best-Effort Timelines" - `ctx` is a volunteer-maintained open source project. + The timelines below are **guidelines**, not guarantees, and depend + on contributor availability. - The timelines below are **guidelines**, not guarantees, and depend on - contributor availability. - - We will address security reports on a best-effort basis and prioritize - them by severity. + We will address security reports on a best-effort basis and + prioritize them by severity. | Stage | Timeframe | @@ -80,77 +84,3 @@ who report valid vulnerabilities (*unless they prefer to remain anonymous*). | Acknowledgment | Within 48 hours | | Initial assessment | Within 7 days | | Resolution target | Within 30 days (*depending on severity*) | - ----- - -## Trust Model - -`ctx` operates within a single trust boundary: **the local filesystem**. - -The person who authors `.context/` files is the same person who runs the -agent that reads them. There is no remote input, no shared state, and no -server component. - -This means: - -* **`ctx` does not sanitize context files for prompt injection.** This is a - deliberate design choice, not an oversight. The files are authored by the - developer who owns the machine: Sanitizing their own instructions back - to them would be counterproductive. -* **If you place adversarial instructions in your own `.context/` files, - your agent will follow them.** This is expected behavior. You control the - context; the agent trusts it. - -!!! warning "Shared Repositories" - In shared repositories, `.context/` files should be reviewed in code - review (*the same way you would review CI/CD config or Makefiles*). A - malicious contributor could add harmful instructions to - `CONSTITUTION.md` or `TASKS.md`. - -## Security Design - -`ctx` is designed with security in mind: - -* **No secrets in context**: The constitution explicitly forbids storing - secrets, tokens, API keys, or credentials in `.context/` files -* **Local only**: `ctx` runs entirely locally with no external network calls -* **No code execution**: ctx reads and writes Markdown files only; it does - not execute arbitrary code -* **Git-tracked**: Core context files are meant to be committed, so they should - never contain sensitive data. Exception: `sessions/` and `journal/` contain - raw conversation data and should be gitignored - -## Permission Hygiene - -Claude Code evaluates permissions in deny → ask → allow order. `ctx init` -automatically populates `permissions.deny` with rules that block dangerous -operations before the allow list is ever consulted. - -**Default deny rules block:** - -* `sudo`, `git push`, `rm -rf /`, `rm -rf ~`, `curl`, `wget`, `chmod 777` -* `Read`/`Edit` of `.env`, credentials, secrets, `.pem`, `.key` files - -Even with deny rules in place, the allow list accumulates one-off permissions -over time. Periodically review for: - -* **Destructive commands**: `git reset --hard`, `git clean -f`, etc. -* **Config injection vectors**: permissions that allow modifying files - controlling agent behavior (`CLAUDE.md`, `settings.local.json`) -* **Broad wildcards**: overly permissive patterns that pre-approve - more than intended - -## State File Management - -Hook state files (throttle markers, prompt counters, pause markers) are -stored in `.context/state/`, which is project-scoped and gitignored. -State files are automatically managed by the hooks that create them; -no manual cleanup is needed. - -## Best Practices - -1. **Review before committing**: Always review `.context/` files before committing -2. **Use `.gitignore`**: If you must store sensitive notes locally, - add them to `.gitignore` -3. **Drift detection**: Run `ctx drift` to check for potential issues -4. **Permission audit**: Review `.claude/settings.local.json` after busy sessions diff --git a/docs/thesis/index.md b/docs/thesis/index.md index 89d96c974..e6f068262 100644 --- a/docs/thesis/index.md +++ b/docs/thesis/index.md @@ -282,7 +282,7 @@ or time-dependent scoring) destroys the ability to reproduce a context window and therefore to diagnose why a model produced a given output. Determinism in the assembly path is what makes the persistence layer auditable. -### Invariant 4: Human Authority Over Persistent State +### Invariant 4: Human Authority over Persistent State The agent may propose changes to context files but must not unilaterally modify them. All persistent changes go through human-reviewable git commits. @@ -471,7 +471,7 @@ automate it away. --- -## 6. Worked Example: Architectural Decision Under Two Models +## 6. Worked Example: Architectural Decision under Two Models We now instantiate the three-tier model in a concrete system (`ctx`) and illustrate the difference between prompt-time retrieval and cognitive state diff --git a/editors/vscode/CHANGELOG.md b/editors/vscode/CHANGELOG.md index cd4586a30..f3129a835 100644 --- a/editors/vscode/CHANGELOG.md +++ b/editors/vscode/CHANGELOG.md @@ -1,30 +1,30 @@ # Changelog -All notable changes to the **ctx — Persistent Context for AI** extension +All notable changes to the **ctx: Persistent Context for AI** extension will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/). -## [0.9.0] — 2026-03-19 +## [0.9.0] - 2026-03-19 ### Added - **@ctx chat participant** with 45 slash commands covering context lifecycle, task management, session recall, and discovery -- **Natural language routing** — type plain English after `@ctx` and +- **Natural language routing**: type plain English after `@ctx` and the extension maps it to the correct handler -- **Auto-bootstrap** — downloads the ctx CLI binary if not found on PATH -- **Detection ring** — terminal command watcher and file edit watcher +- **Auto-bootstrap**: downloads the ctx CLI binary if not found on PATH +- **Detection ring**: terminal command watcher and file edit watcher record governance violations for the MCP engine -- **Status bar reminders** — `$(bell) ctx` indicator for pending reminders -- **Automatic hooks** — file save, git commit, dependency change, and +- **Status bar reminders**: `$(bell) ctx` indicator for pending reminders +- **Automatic hooks**: file save, git commit, dependency change, and context file change handlers -- **Follow-up suggestions** — context-aware buttons after each command -- **`/diag` command** — diagnose extension issues with step-by-step timing +- **Follow-up suggestions**: context-aware buttons after each command +- **`/diag` command**: diagnose extension issues with step-by-step timing ### Configuration -- `ctx.executablePath` — path to the ctx CLI binary (default: `ctx`) +- `ctx.executablePath`: path to the ctx CLI binary (default: `ctx`) ## [Unreleased] diff --git a/editors/vscode/README.md b/editors/vscode/README.md index 5c97c7114..c940276f1 100644 --- a/editors/vscode/README.md +++ b/editors/vscode/README.md @@ -8,16 +8,16 @@ ## `ctx`: VS Code Chat Extension -A VS Code Chat Participant that brings [ctx](https://ctx.ist) — persistent -project context for AI coding sessions — directly into GitHub Copilot Chat. +A VS Code Chat Participant that brings [ctx](https://ctx.ist) (persistent +project context for AI coding sessions) directly into GitHub Copilot Chat. Type `@ctx` in the Chat view to access 45 slash commands, automatic context -hooks, a reminder status bar, and natural language routing — all powered by +hooks, a reminder status bar, and natural language routing, all powered by the ctx CLI. ## Quick Start -1. Install the extension (or build from source — see [Development](#development)) +1. Install the extension (or build from source; see [Development](#development)) 2. Open a project in VS Code 3. Open Copilot Chat and type `@ctx /init` @@ -79,7 +79,7 @@ The extension auto-downloads the ctx CLI binary if it isn't on your PATH. | `/check-links` | Audit local links in context files | | `/journal` | View or export journal entries | | `/consolidate` | Find duplicate entries across context files | -| `/audit` | Alignment audit — drift + convention check | +| `/audit` | Alignment audit: drift + convention check | | `/worktree` | Git worktree management (list, add) | ### Context Metadata @@ -111,7 +111,7 @@ Sub-routes for `/system`: `resources`, `doctor`, `bootstrap`, `stats`, ## Automatic Hooks The extension registers several VS Code event handlers that mirror -Claude Code's hook system. These run in the background — no user action +Claude Code's hook system. These run in the background; no user action needed. | Trigger | What Happens | @@ -119,7 +119,7 @@ needed. | **File save** | Runs task-completion check on non-`.context/` files | | **Git commit** | Notification prompting to add a Decision, Learning, run Verify, or Skip | | **`.context/` file change** | Refreshes reminders and regenerates `.github/copilot-instructions.md` | -| **Dependency file change** | Notification when `go.mod`, `package.json`, etc. change — offers `/map` | +| **Dependency file change** | Notification when `go.mod`, `package.json`, etc. change; offers `/map` | | **Every 5 minutes** | Updates reminder status bar and writes heartbeat timestamp | | **Extension activate** | Fires `session-event --type start` to ctx CLI | | **Extension deactivate** | Fires `session-event --type end` to ctx CLI | @@ -132,7 +132,7 @@ automatically. ## Natural Language -You can also type plain English after `@ctx` — the extension routes +You can also type plain English after `@ctx`: the extension routes common phrases to the correct handler: - "What should I work on next?" → `/next` @@ -169,7 +169,7 @@ For example: - VS Code 1.93+ - [GitHub Copilot Chat](https://marketplace.visualstudio.com/items?itemName=GitHub.copilot-chat) extension -- [ctx](https://ctx.ist) CLI on PATH — or let the extension auto-download it +- [ctx](https://ctx.ist) CLI on PATH, or let the extension auto-download it ## Configuration diff --git a/examples/demo/.context/AGENT_PLAYBOOK.md b/examples/demo/.context/AGENT_PLAYBOOK.md deleted file mode 100644 index 59413d8b4..000000000 --- a/examples/demo/.context/AGENT_PLAYBOOK.md +++ /dev/null @@ -1,179 +0,0 @@ -# Agent Playbook - -## Mental Model - -This system does not persist experience. - -- Each session is a fresh execution in a shared workshop. -- Work continuity comes from artifacts left on the bench. - -### Work → Reflect → Persist - -After completing meaningful work, follow this cycle: - -``` -┌─────────┐ ┌─────────┐ ┌─────────┐ -│ Work │ ──▶ │ Reflect │ ──▶ │ Persist │ ──▶ (continue) -└─────────┘ └─────────┘ └─────────┘ - │ - ▼ - Did I just... - • Complete a task? - • Make a decision? - • Learn something? - • Hit a milestone? -``` - -**Don't wait for session end** — it may never come cleanly. Persist as you go. - -## Invoking ctx - -Always use `ctx` from PATH: -```bash -ctx status # ✓ correct -ctx agent # ✓ correct -./dist/ctx # ✗ avoid hardcoded paths -go run ./cmd/ctx # ✗ avoid unless developing ctx itself -``` - -Check with `which ctx` if unsure whether it's installed. - -## Context Readback - -Before starting any work, read the required context files and confirm to the -user: "I have read the required context files and I'm following project -conventions." Do not begin implementation until you have done so. - -## Read Order - -1. CONSTITUTION.md — Hard rules, NEVER violate -2. TASKS.md — What to work on next -3. CONVENTIONS.md — How to write code -4. ARCHITECTURE.md — Where things go -5. DECISIONS.md — Why things are the way they are -6. LEARNINGS.md — Gotchas to avoid -7. GLOSSARY.md — Correct terminology - -## How Your Memory Works - -When asked "Do you remember?" or similar: - -1. **Read silently first**: TASKS.md, DECISIONS.md, LEARNINGS.md, and - run `ctx journal source --limit 5` for recent history. Do this BEFORE composing a response. -2. **Respond with a structured readback**: - - **Last session**: most recent session topic and date - - **Active work**: pending or in-progress tasks - - **Recent context**: 1-2 recent decisions or learnings - - **Next step**: offer to continue or ask what to focus on -3. **Be honest about mechanism only if asked** — the user wants results, - not disclaimers - -**Wrong**: "I don't have memory, but I can check files. Want me to?" -**Right**: *reads files silently* → "Last session (Jan 15, database timeouts): -we investigated connection pool exhaustion. OAuth2 login is in progress -with 3 remaining tasks in Phase 2. Want to continue with that?" - -The context IS your memory. It's stored externally in files, but that's an -implementation detail. Load it and use it — don't lead with caveats. - -## When to Update Memory - -| Event | Action | -|-----------------------------|-----------------------| -| Made architectural decision | Add to DECISIONS.md | -| Discovered gotcha/bug | Add to LEARNINGS.md | -| Established new pattern | Add to CONVENTIONS.md | -| Completed task | Mark [x] in TASKS.md | - -## Proactive Context Persistence - -**Don't wait for session end** — persist context at natural milestones. - -### Milestone Triggers - -Offer to persist context when you: - -| Milestone | Action | -|------------------------------------|-------------------------------------------------| -| Complete a task | Mark done in TASKS.md, offer to add learnings | -| Make an architectural decision | `ctx add decision "..."` | -| Discover a gotcha or bug | `ctx add learning "..."` | -| Finish a significant code change | Offer to summarize what was done | -| Encounter unexpected behavior | Document it before moving on | -| Resolve a tricky debugging session | Capture the root cause and fix | - -### Self-Check Prompt - -Periodically ask yourself: - -> "If this session ended right now, would the next session know what happened?" - -If no — persist something before continuing. - -### Task Lifecycle Timestamps - -Track task progress with timestamps for session correlation: - -```markdown -- [ ] Implement feature X #added:2026-01-25-220332 -- [ ] Fix bug Y #added:2026-01-25-220332 #started:2026-01-25-221500 -- [x] Refactor Z #added:2026-01-25-200000 #started:2026-01-25-210000 -``` - -| Tag | When to Add | Format | -|------------|------------------------------------------|----------------------| -| `#added` | Auto-added by `ctx add task` | `YYYY-MM-DD-HHMMSS` | -| `#started` | When you begin working on the task | `YYYY-MM-DD-HHMMSS` | - -## How to Avoid Hallucinating Memory - -Never assume. If you don't see it in files, you don't know it. - -- Don't claim "we discussed X" without file evidence -- Don't invent history - check context files and `ctx journal source` for actual discussions -- If uncertain, say "I don't see this documented" -- Trust files over intuition - ---- - -## Context Anti-Patterns - -### Stale Context - -**Problem**: Context files become outdated and misleading. - -**Solution**: Update context as part of completing work, not as a separate task. -Run `ctx drift` periodically to detect staleness. - -### Context Sprawl - -**Problem**: Information scattered across multiple locations. - -**Solution**: Single source of truth for each type of information. -Use the defined file structure; resist creating new document types. - -### Implicit Context - -**Problem**: Relying on knowledge not captured in artifacts. - -**Solution**: If you reference something repeatedly, add it to the appropriate file. -If this session ended now, would the next session know what you know? - ---- - -## Context Validation Checklist - -Before starting significant work, validate context is current: - -### Quick Check (Every Session) -- [ ] TASKS.md reflects current priorities -- [ ] No obvious staleness in files you'll reference -- [ ] Recent history reviewed for relevant context (via `ctx journal source`) - -### Deep Check (Weekly or Before Major Work) -- [ ] CONSTITUTION.md rules still apply -- [ ] ARCHITECTURE.md matches actual structure -- [ ] CONVENTIONS.md patterns match code -- [ ] DECISIONS.md has no superseded entries unmarked -- [ ] LEARNINGS.md gotchas still relevant -- [ ] Run `ctx drift` and address warnings diff --git a/examples/demo/.context/ARCHITECTURE.md b/examples/demo/.context/ARCHITECTURE.md deleted file mode 100644 index 7ff400f2e..000000000 --- a/examples/demo/.context/ARCHITECTURE.md +++ /dev/null @@ -1,100 +0,0 @@ -# Architecture - -System overview and component organization. - -## High-Level Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Clients │ -│ (Web App, Mobile App, CLI) │ -└─────────────────────────┬───────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────┐ -│ Load Balancer │ -│ (nginx / AWS ALB) │ -└─────────────────────────┬───────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────┐ -│ API Server │ -│ (Go / net/http) │ -│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────┐ │ -│ │ Handlers │ │ Services │ │ Repositories │ │ -│ └─────────────┘ └─────────────┘ └─────────────────────┘ │ -└─────────────────────────┬───────────────────────────────────┘ - │ - ┌───────────────┼───────────────┐ - ▼ ▼ ▼ - ┌───────────┐ ┌───────────┐ ┌───────────────┐ - │ PostgreSQL│ │ Redis │ │ Object Store │ - │ (primary) │ │ (cache) │ │ (S3) │ - └───────────┘ └───────────┘ └───────────────┘ -``` - -## Directory Structure - -``` -. -├── cmd/ -│ ├── api/ # API server entrypoint -│ └── worker/ # Background worker entrypoint -├── internal/ -│ ├── handler/ # HTTP handlers -│ ├── service/ # Business logic -│ ├── repository/ # Data access -│ └── model/ # Domain types -├── pkg/ # Shared libraries (importable) -├── migrations/ # Database migrations -├── docs/ # Documentation -└── .context/ # AI context files -``` - -## Key Components - -### API Server (`cmd/api`) -- Handles HTTP requests -- Validates input, calls services, returns responses -- Stateless — all state in database or cache - -### Services (`internal/service`) -- Contains business logic -- Orchestrates multiple repositories -- Enforces business rules - -### Repositories (`internal/repository`) -- Data access layer -- One repository per aggregate root -- Handles database queries and caching - -## Key Patterns - -### Repository Pattern -Data access is abstracted through repositories. Business logic never -directly queries the database. - -### Dependency Injection -All dependencies are injected through constructors, making testing -easier and components more modular. - -### Event-Driven Updates -The system uses an event bus for decoupled component communication. -Events are published when state changes, and interested components -subscribe to relevant events. - -## Data Flow - -1. Request arrives at handler -2. Handler validates input, extracts user context -3. Handler calls service with validated data -4. Service applies business logic, calls repositories -5. Repository reads/writes to database -6. Response flows back up the stack - -## Scaling Strategy - -- **Horizontal**: Add more API server instances behind load balancer -- **Database**: Read replicas for read-heavy workloads -- **Cache**: Redis for session data and frequently accessed records -- **Background work**: Separate worker processes for async jobs diff --git a/examples/demo/.context/CONSTITUTION.md b/examples/demo/.context/CONSTITUTION.md deleted file mode 100644 index c256e9952..000000000 --- a/examples/demo/.context/CONSTITUTION.md +++ /dev/null @@ -1,88 +0,0 @@ -# Constitution - -These rules are INVIOLABLE. If a task requires violating these, the -task is wrong. - -## Completion Over Motion - -Work is only complete when it is **fully done**, not when progress -has been made. - -- The requested outcome must be delivered end-to-end. -- Partial progress is not completion. -- No half measures. - -Do not: -- Leave broken or inconsistent states -- Deliver work that requires the user to "finish it later" - -If you start something, you own it, you finish it. - ---- - -## No Excuse Generation - -**Never default to deferral.** - -Your goal is to satisfy the user's intent, not to complete a narrow -interpretation of the task. - -Do not justify incomplete work with statements like: - -- "Let's continue this later" -- "This is out of scope" -- "I can create a follow-up task" -- "This will take too long" -- "Another system caused this" -- "This part is not mine" -- "We are running out of context window" - -Constraints may exist, but they do not excuse incomplete delivery. - -- External systems, prior code, or other agents are not valid excuses -- Inconsistencies must be resolved, not explained away - ---- - -## No Broken Windows - -Leave the system in a better state than you found it. - -- Fix obvious issues when encountered -- Do not introduce temporary hacks without resolving them -- Do not normalize degraded quality - ---- - -## Security Invariants - -- [ ] Never commit secrets, tokens, API keys, or credentials -- [ ] Never store customer/user data in context files -- [ ] All user input must be validated and sanitized - -## Quality Invariants - -- [ ] All code must pass tests before commit -- [ ] No TODO comments in main branch (move to TASKS.md) -- [ ] Breaking API changes require deprecation period - -## Process Invariants - -- [ ] All architectural changes require a decision record in DECISIONS.md - -## TASKS.md Structure Invariants - -TASKS.md must remain a replayable checklist. Uncheck all items and re-run -the loop = verify/redo all tasks in order. - -- [ ] **Never move tasks** — tasks stay in their Phase section permanently -- [ ] **Never remove Phase headers** — Phase labels provide structure and order -- [ ] **Never delete tasks** — mark as `[x]` completed, or `[-]` skipped with reason -- [ ] **Use inline labels for status** — add `#in-progress` to task text, don't move it -- [ ] **No "In Progress" sections** — these encourage moving tasks -- [ ] **Ask before restructuring** — if structure changes seem needed, ask the user first - -## Context Preservation Invariants - -- [ ] **Archival is allowed, deletion is not** — use `ctx task archive` to move completed tasks, never delete context history -- [ ] **Archive preserves structure** — archived tasks keep their Phase headers for traceability diff --git a/examples/demo/.context/CONVENTIONS.md b/examples/demo/.context/CONVENTIONS.md deleted file mode 100644 index 7f742f390..000000000 --- a/examples/demo/.context/CONVENTIONS.md +++ /dev/null @@ -1,83 +0,0 @@ -# Conventions - -Coding standards and patterns used in this project. - -## Naming - -- Use camelCase for variables and functions -- Use PascalCase for types and interfaces -- Use SCREAMING_SNAKE_CASE for constants - -## Code Style - -- Prefer early returns over nested conditionals -- Maximum line length: 100 characters -- One component per file - -## Patterns - -### Error Handling - -Always return errors, never panic in library code: - -```go -// ✓ Correct -func ProcessData(data []byte) (Result, error) { - if len(data) == 0 { - return Result{}, fmt.Errorf("empty data") - } - // ... -} - -// ✗ Wrong -func ProcessData(data []byte) Result { - if len(data) == 0 { - panic("empty data") // Never panic in libraries - } - // ... -} -``` - -Wrap errors with context: - -```go -if err != nil { - return fmt.Errorf("processing user %s: %w", userID, err) -} -``` - -### Configuration - -Load order (highest priority first): -1. Environment variables -2. Config file (config.yaml) -3. Default values - -Log config source at startup for debuggability. - -## Testing - -- Test files adjacent to source files (`foo.go` → `foo_test.go`) -- Use table-driven tests for multiple cases -- Mock external dependencies, never call real services in tests - -## Git Practices - -- Commit messages follow Conventional Commits format -- Feature branches: `feature/` -- Bug fixes: `fix/` -- All PRs require at least one approval - -## Documentation - -### Doc-Impact Rule - -When modifying code that affects user-facing behavior, update the corresponding -documentation: - -| Code Change | Doc Update Required | -|--------------------------|------------------------| -| API endpoint changes | `docs/api.md` | -| CLI command changes | `docs/cli.md` | -| Configuration changes | `docs/configuration.md`| -| New features | `README.md` | diff --git a/examples/demo/.context/DECISIONS.md b/examples/demo/.context/DECISIONS.md deleted file mode 100644 index db982f540..000000000 --- a/examples/demo/.context/DECISIONS.md +++ /dev/null @@ -1,87 +0,0 @@ -# Decisions - -Architectural decisions with rationale and consequences. - ---- - -## [2026-01-05-110000] Use PostgreSQL for Primary Database - -**Context**: Needed to choose a database for the application. Options were -PostgreSQL, MySQL, and MongoDB. - -**Decision**: PostgreSQL - -**Rationale**: -- Strong ACID compliance for financial transactions -- Excellent JSON support for flexible schema needs -- Team has existing PostgreSQL expertise -- Rich ecosystem of tools and extensions - -**Consequence**: -- Need to manage schema migrations explicitly -- Requires more upfront schema design than document stores -- Horizontal scaling requires additional tooling (Citus, read replicas) - ---- - -## [2026-01-08-140000] JWT for API Authentication - -**Context**: Needed to choose authentication mechanism for the REST API. -Options were session cookies, JWT tokens, and API keys. - -**Decision**: JWT tokens with short expiry + refresh tokens - -**Rationale**: -- Stateless authentication scales horizontally without session storage -- Works well for both web and mobile clients -- Can embed user claims to reduce database lookups -- Industry standard with good library support - -**Consequence**: -- Cannot immediately revoke tokens (must wait for expiry) -- Need secure storage for refresh tokens -- Must implement token refresh flow in all clients -- Larger request payload than session cookies - ---- - -## [2026-01-10-090000] Use Go for API Server - -**Context**: Choosing a backend language for the API. Options were Go, -Node.js, and Python. - -**Decision**: Go - -**Rationale**: -- Excellent performance characteristics -- Strong typing catches bugs at compile time -- Simple deployment with single binary -- Great concurrency primitives for handling many connections - -**Consequence**: -- Smaller talent pool than JavaScript/Python -- Some team members need Go training -- Compile step required (vs interpreted languages) - ---- - -## [2026-01-12-160000] Monorepo Structure - -**Context**: Starting with multiple services (API, worker, CLI). Needed to -decide between monorepo and multi-repo structure. - -**Decision**: Monorepo with shared packages - -**Rationale**: -- Atomic commits across services -- Easier code sharing and refactoring -- Single CI/CD pipeline to maintain -- Better visibility into cross-service changes - -**Consequence**: -- Need tooling to handle partial builds (only changed services) -- Repository will grow large over time -- All developers need access to entire codebase -- Must establish clear package boundaries - ---- diff --git a/examples/demo/.context/GLOSSARY.md b/examples/demo/.context/GLOSSARY.md deleted file mode 100644 index 656254796..000000000 --- a/examples/demo/.context/GLOSSARY.md +++ /dev/null @@ -1,42 +0,0 @@ -# Glossary - -Domain terms, abbreviations, and project-specific vocabulary. - ---- - -## Terms - -- **Claim (JWT)**: A key-value pair embedded in a JWT token that - carries user identity or permission data without a database lookup. - -- **Connection pool**: A cache of database connections maintained - so they can be reused for future requests instead of opening new - connections each time. - -- **Handler**: A function that processes an incoming HTTP request - and returns a response. Lives in `internal/api/handlers/`. - -- **Migration**: A versioned SQL script that modifies the database - schema. Applied in order to bring the database to a target state. - -- **Middleware**: A function that wraps handlers to add cross-cutting - concerns (authentication, logging, rate limiting) without modifying - handler logic. - -- **Refresh token**: A long-lived token used to obtain new access - tokens without re-authenticating. Stored server-side in the database. - -- **Repository**: A data access layer that abstracts database - operations behind a Go interface. One repository per domain entity. - -- **Service**: A business logic layer between handlers and - repositories. Enforces domain rules and orchestrates operations - across multiple repositories. - -## Abbreviations - -- **API**: Application Programming Interface -- **JWT**: JSON Web Token (RFC 7519) -- **CRUD**: Create, Read, Update, Delete -- **DTO**: Data Transfer Object (request/response structs) -- **ORM**: Object-Relational Mapping (not used; raw SQL preferred) diff --git a/examples/demo/.context/LEARNINGS.md b/examples/demo/.context/LEARNINGS.md deleted file mode 100644 index 3813eae83..000000000 --- a/examples/demo/.context/LEARNINGS.md +++ /dev/null @@ -1,57 +0,0 @@ -# Learnings - - -| Date | Learning | -|------|----------| -| 2026-01-15 | Database connections need explicit timeouts | -| 2026-01-10 | Environment variables override config files | -| 2026-01-05 | Rate limiter must be per-user, not global | - - ---- - -## [2026-01-15-143022] Database connections need explicit timeouts - -**Context**: Production outage caused by database connection pool exhaustion. -Connections were hanging indefinitely waiting for slow queries. - -**Lesson**: Always set explicit timeouts on database connections: connect timeout, -read timeout, and write timeout. Default "no timeout" is never acceptable in production. - -**Application**: Add to connection config: -```go -db.SetConnMaxLifetime(5 * time.Minute) -db.SetConnMaxIdleTime(1 * time.Minute) -ctx, cancel := context.WithTimeout(ctx, 30*time.Second) -``` - ---- - -## [2026-01-10-091500] Environment variables override config files - -**Context**: Debugging why staging had different behavior than local. Config file -was correct, but an old environment variable was overriding it. - -**Lesson**: Document the precedence order clearly: ENV > config file > defaults. -When debugging config issues, always check environment variables first. - -**Application**: Add config source logging at startup: -``` -Config loaded: database.host=localhost (source: ENV) -Config loaded: database.port=5432 (source: config.yaml) -``` - ---- - -## [2026-01-05-160030] Rate limiter must be per-user, not global - -**Context**: Implemented global rate limiter (100 req/sec total). One heavy user -could starve all other users. - -**Lesson**: Rate limiting should be per-user (or per-API-key) to ensure fair -resource allocation. Global limits are only useful as a last-resort circuit breaker. - -**Application**: Use user ID or API key as the rate limiter bucket key, not a -single global bucket. - ---- diff --git a/examples/demo/.context/TASKS.md b/examples/demo/.context/TASKS.md deleted file mode 100644 index 000ef7886..000000000 --- a/examples/demo/.context/TASKS.md +++ /dev/null @@ -1,26 +0,0 @@ -# Tasks - -Current work items, organized by phase. Tasks stay in their phase permanently. - -## Phase 1: Foundation - -- [x] Initial project setup #added:2026-01-01-090000 -- [x] Database schema design #added:2026-01-01-090000 -- [x] Core API scaffolding #added:2026-01-01-090000 - -## Phase 2: Authentication - -- [x] Implement user registration #added:2026-01-04-100000 -- [ ] Implement OAuth2 login #added:2026-01-04-100000 #in-progress -- [ ] Add session management #added:2026-01-04-100000 - -## Phase 3: API Features - -- [ ] Add rate limiting to API endpoints #added:2026-01-10-090000 -- [ ] Write integration tests for payment flow #added:2026-01-10-090000 - -## Phase 4: Infrastructure - -- [ ] Add support for WebSocket connections #added:2026-01-15-140000 -- [ ] Implement caching layer #added:2026-01-15-140000 -- [ ] Set up monitoring and alerting #added:2026-01-15-140000 diff --git a/examples/demo/.context/sessions/2026-01-15-143000-database-timeout-investigation.md b/examples/demo/.context/sessions/2026-01-15-143000-database-timeout-investigation.md deleted file mode 100644 index 4ffacfbb9..000000000 --- a/examples/demo/.context/sessions/2026-01-15-143000-database-timeout-investigation.md +++ /dev/null @@ -1,64 +0,0 @@ -# Session: Database Timeout Investigation - -**Date**: 2026-01-15 -**start_time**: 2026-01-15-140000 -**end_time**: 2026-01-15-160000 -**Topic**: Investigating production database connection issues -**Type**: bugfix - ---- - -## Summary - -Investigated production outage caused by database connection pool exhaustion. -Found that connections were hanging indefinitely on slow queries. Implemented -explicit timeouts and connection lifecycle management. - -## Problem - -- Production API started returning 503 errors -- Database connection pool was exhausted (all 100 connections in use) -- Connections were stuck waiting for queries that never returned -- No timeout configured on database connections - -## Root Cause - -Default Go database driver has no timeout. When the database is slow or -unresponsive, connections wait forever, eventually exhausting the pool. - -## Fix Applied - -```go -// Before: no timeouts -db, err := sql.Open("postgres", connStr) - -// After: explicit lifecycle management -db, err := sql.Open("postgres", connStr) -db.SetConnMaxLifetime(5 * time.Minute) -db.SetConnMaxIdleTime(1 * time.Minute) -db.SetMaxOpenConns(100) -db.SetMaxIdleConns(10) - -// Query-level timeouts -ctx, cancel := context.WithTimeout(ctx, 30*time.Second) -defer cancel() -rows, err := db.QueryContext(ctx, query) -``` - -## Key Decisions - -- Set connection max lifetime to 5 minutes (prevents stale connections) -- Set query timeout to 30 seconds (fail fast on slow queries) -- Added circuit breaker for database calls - -## Tasks for Next Session - -- Add monitoring for connection pool metrics -- Set up alerting for connection pool utilization > 80% -- Review other services for similar timeout issues - -## Files Changed - -- `internal/repository/db.go` -- `internal/config/database.go` -- `docs/operations.md` diff --git a/examples/demo/PROMPT.md b/examples/demo/PROMPT.md deleted file mode 100644 index 26afb63ee..000000000 --- a/examples/demo/PROMPT.md +++ /dev/null @@ -1,94 +0,0 @@ -# PROMPT.md — Demo Project - -## CORE PRINCIPLE - -You have NO conversational memory. Your memory IS the file system. -Your goal: advance the project by exactly ONE task, update context, and exit. - ---- - -## PROJECT CONTEXT - -**Project**: Demo API Server -**Language**: Go 1.22+ -**Current Focus**: Phase 2 — Authentication - ---- - -## PHASE 0: ORIENT - -1. Read `.context/TASKS.md` — Current work items -2. Read `.context/CONSTITUTION.md` — Rules to never violate -3. Read `.context/CONVENTIONS.md` — How to write code -4. Read relevant spec in `specs/` for the current task - ---- - -## PHASE 1: SELECT TASK - -1. Read `.context/TASKS.md` -2. Find the **first unchecked item** (line starting with `- [ ]`) -3. That is your ONE task for this iteration - -**IF NO UNCHECKED ITEMS:** -1. Run validation: `go build ./...`, `go test ./...` -2. If all pass, output `PHASE_COMPLETE` -3. If any fail, add fix task and continue - ---- - -## PHASE 2: EXECUTE - -1. **Read the spec** — Check `specs/` for detailed requirements -2. **Search first** — Don't assume code doesn't exist -3. **Implement ONE task** — Complete it fully. No placeholders. -4. **Follow conventions** — Check `.context/CONVENTIONS.md` - ---- - -## PHASE 3: VALIDATE - -After implementing, run: - -```bash -go build ./... # Must compile -go test ./... # Tests must pass -go vet ./... # No vet errors -``` - ---- - -## PHASE 4: UPDATE CONTEXT - -1. Mark completed task `[x]` in `.context/TASKS.md` -2. If you made an architectural decision → add to `.context/DECISIONS.md` -4. If you learned a gotcha → add to `.context/LEARNINGS.md` - -**EXIT.** Do not continue to next task. The loop will restart you. - ---- - -## CRITICAL CONSTRAINTS - -### ONE TASK ONLY -Complete ONE task, then stop. The loop handles continuation. - -### NO CHAT -Never ask questions. If blocked: -1. Add reason to task in `.context/TASKS.md` -2. Move to next task - -### MEMORY IS THE FILESYSTEM -You will not remember this conversation. Write everything important to files. - ---- - -## REFERENCE: SPECS - -| Spec | Description | -|------|-------------| -| `specs/oauth2.md` | OAuth2 authentication implementation | - ---- - -Now read `.context/TASKS.md` and begin. diff --git a/examples/demo/README.md b/examples/demo/README.md deleted file mode 100644 index d5a020a62..000000000 --- a/examples/demo/README.md +++ /dev/null @@ -1,126 +0,0 @@ -![ctx](../../assets/ctx-banner.png) - -# Demo Project - -This is a sample project demonstrating Context (ctx) structure and best practices. - -## Quick Start - -```bash -# View context status -ctx status - -# Get AI-ready context packet -ctx agent - -# Add a new task -ctx add task "Implement feature X" - -# Mark a task complete -ctx tasks complete "feature X" - -# Check for stale context -ctx drift -``` - -## Context Files - -The `.context/` directory contains markdown files that provide persistent -context for AI coding assistants: - -| File | Purpose | -|----------------------|---------------------------------------------------| -| `AGENT_PLAYBOOK.md` | **Read first** — How agents should use this system | -| `CONSTITUTION.md` | Inviolable rules — NEVER violate these | -| `TASKS.md` | Current work items with phases and timestamps | -| `CONVENTIONS.md` | Coding standards and patterns | -| `ARCHITECTURE.md` | System overview and component layout | -| `DECISIONS.md` | Technical decisions with rationale | -| `LEARNINGS.md` | Gotchas, tips, lessons learned | -| `GLOSSARY.md` | Domain terms and abbreviations | - -## Key Concepts - -### Agent Playbook - -`AGENT_PLAYBOOK.md` is the bootstrap file for AI agents. It explains: -- The mental model (memory = files, not conversation) -- Read order for context files -- When and how to persist learnings/decisions -- How to avoid hallucinating memory - -### Phase-Based Tasks - -Tasks in `TASKS.md` stay in their phase permanently. Use inline labels -(`#in-progress`) instead of moving tasks between sections: - -```markdown -## Phase 2: Authentication - -- [x] Implement user registration #added:2026-01-04-100000 -- [ ] Implement OAuth2 login #added:2026-01-04-100000 #in-progress -- [ ] Add session management #added:2026-01-04-100000 -``` - -### Structured Entries - -Learnings and decisions follow structured formats with timestamps: - -```markdown -## [2026-01-15-143022] Database connections need explicit timeouts - -**Context**: What situation led to this learning - -**Lesson**: What we learned - -**Application**: How to apply it going forward -``` - -## Adding Context - -```bash -# Add a learning with full structure -ctx add learning "Title" \ - --context "What happened" \ - --lesson "What we learned" \ - --application "How to apply it" - -# Add a decision with rationale -ctx add decision "Title" \ - --context "What prompted this" \ - --rationale "Why this choice" \ - --consequence "What changes" - -# Add a task -ctx add task "Implement feature X" -``` - -## Ralph Loop Integration - -This demo includes Ralph Loop infrastructure for iterative AI development: - -| File | Purpose | -|------|---------| -| `PROMPT.md` | Directive for AI agents — defines the work loop | -| `specs/` | Detailed specifications for features | - -The Ralph Loop pattern: -1. AI reads `PROMPT.md` to understand the workflow -2. Picks ONE task from `.context/TASKS.md` -3. Reads relevant spec from `specs/` for requirements -4. Implements the task -5. Updates context files -6. Exits — the loop restarts with fresh context - -This is separate from but complementary to ctx: -- **ctx** = context persistence (`.context/`) -- **Ralph Loop** = iterative AI workflow (`PROMPT.md` + `specs/`) - -## Session History - -Past sessions can be browsed with `ctx journal source` and inspected with -`ctx journal source --show `. Session transcripts are automatically captured by -Claude Code and can be imported to a journal with `ctx journal import`. - -This allows future sessions to understand past context without relying on -conversation memory. diff --git a/examples/demo/specs/oauth2.md b/examples/demo/specs/oauth2.md deleted file mode 100644 index 2349a7010..000000000 --- a/examples/demo/specs/oauth2.md +++ /dev/null @@ -1,94 +0,0 @@ -# OAuth2 Authentication Spec - -## Overview - -Implement OAuth2 authentication supporting Google and GitHub providers. - -## Requirements - -### Functional - -1. **Provider Support** - - Google OAuth2 - - GitHub OAuth2 - - Extensible provider interface for future additions - -2. **Flow** - - User clicks "Sign in with Google/GitHub" - - Redirect to provider's authorization page - - Provider redirects back with authorization code - - Exchange code for access token - - Fetch user profile from provider - - Create or update local user record - - Issue JWT session token - -3. **User Linking** - - If email already exists, link OAuth identity to existing account - - If new email, create new user account - - Store provider ID for future logins - -### Non-Functional - -- Token exchange must complete in < 2 seconds -- Handle provider downtime gracefully (show user-friendly error) -- Log all OAuth events for security auditing - -## API Endpoints - -``` -GET /auth/oauth/{provider} # Initiate OAuth flow -GET /auth/oauth/{provider}/callback # Handle OAuth callback -POST /auth/logout # Revoke session -``` - -## Data Model - -```go -type OAuthIdentity struct { - ID string `json:"id"` - UserID string `json:"user_id"` - Provider string `json:"provider"` // "google", "github" - ProviderID string `json:"provider_id"` - Email string `json:"email"` - CreatedAt time.Time `json:"created_at"` -} -``` - -## Configuration - -```yaml -oauth: - google: - client_id: ${GOOGLE_CLIENT_ID} - client_secret: ${GOOGLE_CLIENT_SECRET} - redirect_url: https://example.com/auth/oauth/google/callback - github: - client_id: ${GITHUB_CLIENT_ID} - client_secret: ${GITHUB_CLIENT_SECRET} - redirect_url: https://example.com/auth/oauth/github/callback -``` - -## Security Considerations - -- Use `state` parameter to prevent CSRF attacks -- Validate redirect URLs against allowlist -- Never log access tokens or client secrets -- Store only necessary user data from provider - -## Testing - -- Unit tests for token exchange logic -- Integration tests with mock OAuth provider -- E2E test with real providers in staging environment - -## Tasks - -These map to `.context/TASKS.md` Phase 2: - -1. [ ] Create OAuth provider interface -2. [ ] Implement Google OAuth provider -3. [ ] Implement GitHub OAuth provider -4. [ ] Add OAuth callback handler -5. [ ] Implement user linking logic -6. [ ] Add OAuth configuration loading -7. [ ] Write integration tests diff --git a/go.mod b/go.mod index 85ba021f0..0af61fa74 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/hashicorp/raft v1.7.3 github.com/hashicorp/raft-boltdb/v2 v2.3.1 github.com/spf13/cobra v1.10.2 - golang.org/x/tools v0.43.0 + golang.org/x/tools v0.44.0 google.golang.org/grpc v1.80.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -25,11 +25,11 @@ require ( github.com/mattn/go-isatty v0.0.14 // indirect github.com/spf13/pflag v1.0.10 // indirect go.etcd.io/bbolt v1.3.5 // indirect - golang.org/x/mod v0.34.0 // indirect - golang.org/x/net v0.52.0 // indirect + golang.org/x/mod v0.35.0 // indirect + golang.org/x/net v0.53.0 // indirect golang.org/x/sync v0.20.0 // indirect - golang.org/x/sys v0.42.0 // indirect - golang.org/x/text v0.35.0 // indirect + golang.org/x/sys v0.43.0 // indirect + golang.org/x/text v0.36.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect google.golang.org/protobuf v1.36.11 // indirect ) diff --git a/go.sum b/go.sum index 9047a3a2b..d84977914 100644 --- a/go.sum +++ b/go.sum @@ -170,16 +170,16 @@ go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI= -golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY= +golang.org/x/mod v0.35.0 h1:Ww1D637e6Pg+Zb2KrWfHQUnH2dQRLBQyAtpr/haaJeM= +golang.org/x/mod v0.35.0/go.mod h1:+GwiRhIInF8wPm+4AoT6L0FA1QWAad3OMdTRx4tFYlU= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0= -golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw= +golang.org/x/net v0.53.0 h1:d+qAbo5L0orcWAr0a9JweQpjXF19LMXJE8Ey7hwOdUA= +golang.org/x/net v0.53.0/go.mod h1:JvMuJH7rrdiCfbeHoo3fCQU24Lf5JJwT9W3sJFulfgs= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -205,15 +205,15 @@ golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= -golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI= +golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8= -golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA= +golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg= +golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s= -golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0= +golang.org/x/tools v0.44.0 h1:UP4ajHPIcuMjT1GqzDWRlalUEoY+uzoZKnhOjbIPD2c= +golang.org/x/tools v0.44.0/go.mod h1:KA0AfVErSdxRZIsOVipbv3rQhVXTnlU6UhKxHd1seDI= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.17.0 h1:VbpOemQlsSMrYmn7T2OUvQ4dqxQXU+ouZFQsZOx50z4= gonum.org/v1/gonum v0.17.0/go.mod h1:El3tOrEuMpv2UdMrbNlKEh9vd86bmQ6vqIcDwxEOc1E= diff --git a/hack/detect-ai-typography.sh b/hack/detect-ai-typography.sh index be670d9df..9b30619a7 100755 --- a/hack/detect-ai-typography.sh +++ b/hack/detect-ai-typography.sh @@ -79,15 +79,56 @@ fi # ```` = quad backtick. AI wraps code fences in four-backtick # blocks; zensical doesn't support them. Triple is the # project maximum. -PATTERN='\x{2013}|\x{2014}|\x{2018}|\x{2019}|\x{201C}|\x{201D}|(?/dev/null 2>&1; then + GREP_MODE="-P" + PATTERN='\x{2013}|\x{2014}|\x{2018}|\x{2019}|\x{201C}|\x{201D}|(? silent false negatives. + PATTERN="${ENDASH}|${EMDASH}|${LSQ}|${RSQ}|${LDQ}|${RDQ}| -- |\`\`\`\`" +fi + +# Directories pruned before find descends into them. Scanning these +# wastes I/O on files whose typography is not project-authored (git +# metadata, vendored code, project-scoped context that may legitimately +# contain AI-generated journal entries). Add directory basenames here; +# they match at any depth beneath DIR via "*/". +EXCLUDE_DIRS=( + ".context" + "specs" + ".claude" + "ideas" + ".git" + "node_modules" + "vendor" +) -# Files where typographic punctuation is intentional. -# Add glob patterns here to skip specific paths. +# Files where typographic punctuation is intentional. These use shell +# glob (case-statement) matching against the full path as find emits +# it, so patterns typically need a leading "*/" to match at any depth. EXCLUDE_PATTERNS=( "*/config/token/delim.go" # Intentional delimiter constants (EmDash, MetaSeparator) "*_test.go" # Test files may contain intentional typographic literals ) +# Build find's -not -path arguments from EXCLUDE_DIRS. +NOT_PATH_ARGS=() +for d in "${EXCLUDE_DIRS[@]}"; do + NOT_PATH_ARGS+=(-not -path "*/${d}/*") +done + file_count=0 hit_count=0 @@ -104,7 +145,7 @@ while IFS= read -r -d '' file; do # These are legitimate uses, not AI-generated prose. CONST_FILTER='^\s*const\s+\w+\s*=\s*"[^"]*"$' - matches=$(grep -P "$PATTERN" "$file" 2>/dev/null | grep -cvP "$CONST_FILTER" 2>/dev/null || true) + matches=$(grep $GREP_MODE "$PATTERN" "$file" 2>/dev/null | grep -cvE "$CONST_FILTER" 2>/dev/null || true) if [[ "$matches" -gt 0 ]]; then file_count=$((file_count + 1)) hit_count=$((hit_count + matches)) @@ -115,12 +156,12 @@ while IFS= read -r -d '' file; do else echo "" echo "--- $rel ($matches matches) ---" - grep -nP "$PATTERN" "$file" 2>/dev/null | grep -vP "$CONST_FILTER" | while IFS= read -r line; do + grep -n $GREP_MODE "$PATTERN" "$file" 2>/dev/null | grep -vE "$CONST_FILTER" | while IFS= read -r line; do echo " $line" done fi fi -done < <(find "$DIR" "${FIND_ARGS[@]}" -print0 | sort -z) +done < <(find "$DIR" "${FIND_ARGS[@]}" "${NOT_PATH_ARGS[@]}" -print0 | sort -z) echo "" if [[ "$file_count" -eq 0 ]]; then diff --git a/hack/fix-smart-quotes.sh b/hack/fix-smart-quotes.sh index deda33534..a1b4bdebc 100755 --- a/hack/fix-smart-quotes.sh +++ b/hack/fix-smart-quotes.sh @@ -1,5 +1,19 @@ #!/usr/bin/env bash -# Replace smart quotes and em-dashes with plain equivalents in all markdown files under docs/ +# Replace smart quotes and em-dashes with plain equivalents in all markdown +# files under docs/ +# +# Note that this is a quick hack, and it's not the best way to do this. +# +# Instead ask the agent to do semantic replacements as such: +# +# "Run ./hack/detect-ai-typography ; read every file fully and do +# semantic, editorial changes. DO NOT BLINDLY CLEAN THE FILES UP. +# Understand the context and do semantic replacements. -- if you are going to +# write a script to blindly sed them STOP RIGHT THERE! -- sometimes you might +# need to replace it with a ":"; sometimes with a parenthesis, sometimes with a +# regular dash, sometimes rephrasing. -- Your constitution forbids you +# from being lazy!" + set -euo pipefail DOCS_DIR="${1:-docs}" diff --git a/hack/title-case-headings.py b/hack/title-case-headings.py new file mode 100755 index 000000000..c58ef181d --- /dev/null +++ b/hack/title-case-headings.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python3 +# / ctx: https://ctx.ist +# ,'`./ do you remember? +# `.,'\ +# \ Copyright 2026-present Context contributors. +# SPDX-License-Identifier: Apache-2.0 +"""Title-case headings (H1-H6) and admonition titles in Markdown files. + +Style: AP-leaning. Lowercase ALL articles, prepositions (any length), and +coordinating conjunctions when they appear MID-phrase. First word, last word, +and the first word after a colon are always capitalized. Subordinating +conjunctions (when, while, after, before, etc.) are capitalized. + +Protected verbatim: + - Backticked code spans `...` + - Markdown link URLs `(...)` immediately following `]` + - Markdown reference-style link labels `][label]` + - All-uppercase tokens of length >= 2 (acronyms) + - Mixed-case tokens like macOS, GitHub, JavaScript, JSONL + - Single uppercase letter labels (A, B, ... in 'Appendix A:') + - Brand 'ctx' always lowercase, including possessive 'ctx's' + - Version-number tokens (v0, v0.8.0) + +Skipped contexts: + - YAML frontmatter at file head + - Fenced code blocks ```...``` + +Usage: + hack/title-case-headings.py # dry-run, prints diffs + hack/title-case-headings.py --apply # write changes in place + + may be a single Markdown file or a directory (recursively scanned for +*.md). Exits non-zero if any changes are needed (in dry-run mode), so it's +safe to wire into CI. +""" +import re +import sys +import pathlib + +ARTICLES = {'a', 'an', 'the'} +PREPOSITIONS = { + # AP-strict; ambiguous words (after/before/since/until/past/near/down/up/off) + # excluded so they cap when they're conjunctions or adj/adv. + 'about','above','across','against','along','among','around','as','at', + 'behind','below','beneath','beside','between','beyond','by','despite', + 'during','except','for','from','in','inside','into','like','of','on', + 'onto','out','outside','over','per','plus','regarding','than','through', + 'throughout','till','to','toward','under','underneath','unto','upon', + 'versus','via','vs','with','within','without', +} +COORD_CONJ = {'and','or','but','nor','so','yet','for'} + +LOWER_MID = ARTICLES | PREPOSITIONS | COORD_CONJ +BRAND_LOWER = {'ctx'} + +WORD_RE = re.compile(r"[A-Za-z][A-Za-z0-9'/]*") + +def title_case_word(word, force_cap=False): + if not word: + return word + # Hyphenated: each segment treated as own word; segments after the first + # are always capitalized (Chicago hyphen rule). + if '-' in word: + segs = word.split('-') + new = [title_case_word(segs[0], force_cap=force_cap)] + for s in segs[1:]: + new.append(title_case_word(s, force_cap=True)) + return '-'.join(new) + lw = word.lower() + # Brand and brand-with-suffix (ctx, ctx's, ctxs) + for brand in BRAND_LOWER: + if lw == brand: + return brand + if lw.startswith(brand) and len(lw) > len(brand): + tail = lw[len(brand):] + # Allow possessive or short plural-like suffix + if tail in ("'s", 's', "'") or (tail.startswith("'") and tail[1:].isalpha() and len(tail) <= 3): + return brand + word[len(brand):] + # Single uppercase letter label (e.g., 'A' in 'Appendix A') + if len(word) == 1 and word.isupper(): + return word + # Acronym already all-caps + if len(word) >= 2 and word.isupper(): + return word + # Version-number token: lowercase 'v' followed by digits (v0, v1, v0.8 etc.) + if re.match(r'^v\d', word): + return word + # Mixed-case (interior caps): preserve (macOS, GitHub, JavaScript) + if any(c.isupper() for c in word[1:]): + return word + # Mid-phrase function word + if not force_cap and lw in LOWER_MID: + return lw + # Default: cap first letter + return word[0].upper() + word[1:] + +PROTECT_RE = re.compile( + r'(`[^`]*`)' # backtick code span + r'|(\][ ]*\([^)]*\))' # markdown inline link URL incl. ']' + r'|(\][ ]*\[[^\]]+\])' # markdown reference-style link label + # Brand tagline — italic lowercase, with or without quotes/punctuation. + r'|(\*do you remember\??\*)' + # CLI long-flag tokens (--keep-frontmatter, --keep-frontmatter=false) + r'|(--[a-z][a-z0-9_-]*(?:=\S+)?)' + # Slash-prefixed commands (/ctx-remember, /ctx-decision-add) + r'|(/[a-z][a-z0-9_-]*)' +) + +def split_protected(text): + pieces = [] + last = 0 + for m in PROTECT_RE.finditer(text): + if m.start() > last: + pieces.append(('plain', text[last:m.start()])) + pieces.append(('protected', m.group(0))) + last = m.end() + if last < len(text): + pieces.append(('plain', text[last:])) + return pieces + +def title_case_text(text): + pieces = split_protected(text) + total = 0 + for kind, t in pieces: + if kind == 'protected': + total += 1 + else: + total += len(WORD_RE.findall(t)) + if total == 0: + return text + overall = 0 + after_colon = False + out = [] + for kind, t in pieces: + if kind == 'protected': + overall += 1 + out.append(t) + continue + last = 0 + buf = [] + for m in WORD_RE.finditer(t): + literal = t[last:m.start()] + if re.search(r':\s*$', literal): + after_colon = True + buf.append(literal) + overall += 1 + is_first = (overall == 1) + is_last = (overall == total) + # Contraction tail: when the preceding literal ends with an + # apostrophe (e.g. after a backtick brand span like `ctx`'s), + # the word is a contraction suffix (s, ll, ve, t, re, d, m). + # Preserve as lowercase, never capitalize. + is_contraction_tail = literal.endswith("'") and len(m.group(0)) <= 3 + # Filename extension: when the preceding literal ends with '.' + # and the word is a short lowercase token (md, sh, py, json, + # yaml, txt, ...), preserve as lowercase. + word_text = m.group(0) + is_ext_tail = ( + literal.endswith('.') + and len(word_text) <= 5 + and word_text.islower() + ) + force = (is_first or is_last or after_colon) and not is_contraction_tail and not is_ext_tail + if (is_contraction_tail or is_ext_tail) and not (is_first or is_last): + buf.append(word_text.lower()) + else: + buf.append(title_case_word(word_text, force_cap=force)) + after_colon = False + last = m.end() + trailing = t[last:] + if re.search(r':\s*$', trailing): + after_colon = True + buf.append(trailing) + out.append(''.join(buf)) + return ''.join(out) + +def process_file(path): + src = path.read_text(encoding='utf-8') + lines = src.split('\n') + in_fence = False + in_fm = False + out_lines = [] + changes = [] + for i, line in enumerate(lines): + if i == 0 and line.strip() == '---': + in_fm = True + out_lines.append(line); continue + if in_fm and line.strip() == '---': + in_fm = False + out_lines.append(line); continue + if in_fm: + out_lines.append(line); continue + if line.startswith('```'): + in_fence = not in_fence + out_lines.append(line); continue + if in_fence: + out_lines.append(line); continue + m = re.match(r'^(#{1,6})\s+(.*)$', line) + if m: + hashes, text = m.groups() + new_text = title_case_text(text.rstrip()) + new_line = f"{hashes} {new_text}" + if new_line != line: + changes.append((i + 1, line, new_line)) + out_lines.append(new_line); continue + am = re.match(r'^(\s*)([!?]{3})(\s+\w[\w-]*\s+)"([^"]+)"\s*$', line) + if am: + indent, marker, mid, title = am.groups() + new_title = title_case_text(title) + new_line = f'{indent}{marker}{mid}"{new_title}"' + if new_line != line: + changes.append((i + 1, line, new_line)) + out_lines.append(new_line); continue + out_lines.append(line) + return '\n'.join(out_lines), changes + +def iter_md_files(target: pathlib.Path): + if target.is_file(): + if target.suffix.lower() == '.md': + yield target + return + if target.is_dir(): + yield from sorted(target.rglob('*.md')) + +def main(): + args = sys.argv[1:] + if '-h' in args or '--help' in args or not args: + print(__doc__) + sys.exit(0 if args else 2) + apply = False + if '--apply' in args: + apply = True + args.remove('--apply') + if len(args) != 1: + print("error: expected exactly one argument", file=sys.stderr) + sys.exit(2) + target = pathlib.Path(args[0]) + if not target.exists(): + print(f"error: path not found: {target}", file=sys.stderr) + sys.exit(2) + total = 0 + files = 0 + for md in iter_md_files(target): + new_src, changes = process_file(md) + if changes: + files += 1 + total += len(changes) + for ln, old, new in changes: + print(f"{md}:{ln}") + print(f" - {old}") + print(f" + {new}") + if apply: + md.write_text(new_src, encoding='utf-8') + mode = 'APPLIED' if apply else 'dry-run' + print(f"\n=== {total} changes across {files} files ({mode}) ===") + sys.exit(0 if (apply or total == 0) else 1) + +if __name__ == '__main__': + main() diff --git a/internal/assets/claude/CLAUDE.md b/internal/assets/claude/CLAUDE.md index f37e37c70..1d40425dc 100644 --- a/internal/assets/claude/CLAUDE.md +++ b/internal/assets/claude/CLAUDE.md @@ -11,11 +11,15 @@ This project uses Context (`ctx`) for context persistence across sessions. ## On Session Start 1. **Run `ctx system bootstrap`**: CRITICAL, not optional. - This tells you where the context directory is. If it fails or returns - no context_dir, STOP and warn the user. + This tells you where the context directory is. + If it returns any error, relay the error output to the user + verbatim, point them at + https://ctx.ist/recipes/activating-context/ for setup, and STOP. + Do not try to activate, initialize, or otherwise recover: **those + are the user's decisions**. Wait for their next instruction. 2. **Read AGENT_PLAYBOOK.md** from the context directory: it explains how to use this system -3. **Run `ctx agent --budget 4000`** for a content summary +3. **Run `ctx agent`** for a content summary ## When Asked "Do You Remember?" @@ -41,7 +45,7 @@ Read them silently, then present what you found as recall, not as a search. ```bash # Get AI-optimized context packet (what you should know) -ctx agent --budget 4000 +ctx agent # Or see full status ctx status @@ -59,6 +63,26 @@ ctx status All files live in the context directory reported by `ctx system bootstrap`. +## Context Directory Lives at the Project Root + +The project root is the parent of `.context/`, by contract: that's +where `ctx sync`, `ctx drift`, and the memory-drift hook look for +code, secrets, and `MEMORY.md`. + +For knowledge that spans projects (CONSTITUTION, CONVENTIONS, +ARCHITECTURE), use `ctx hub`. + +Recommended layout: + +``` +~/WORKSPACE/my-project + ├── .git + ├── .context + ├── Makefile + └── specs + └── ... +``` + ## Hook Authority Instructions from PreToolUse hooks regarding `.context/` files are ALWAYS diff --git a/internal/assets/claude/hooks/hooks.json b/internal/assets/claude/hooks/hooks.json index ec59b7b7d..3ba2d70c3 100644 --- a/internal/assets/claude/hooks/hooks.json +++ b/internal/assets/claude/hooks/hooks.json @@ -3,55 +3,56 @@ "PreToolUse": [ { "matcher": ".*", - "hooks": [{"type": "command", "command": "ctx system context-load-gate"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system context-load-gate"}] }, { "matcher": "Bash", - "hooks": [{"type": "command", "command": "ctx system block-non-path-ctx"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system block-non-path-ctx"}] }, { "matcher": "Bash", - "hooks": [{"type": "command", "command": "ctx system qa-reminder"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system qa-reminder"}] }, { "matcher": "EnterPlanMode", - "hooks": [{"type": "command", "command": "ctx system specs-nudge"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system specs-nudge"}] }, { "matcher": ".*", - "hooks": [{"type": "command", "command": "ctx agent --budget 8000 2>/dev/null || true"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx agent --budget 8000 2>/dev/null || true"}] } ], "PostToolUse": [ { "matcher": "Bash", - "hooks": [{"type": "command", "command": "ctx system post-commit"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system post-commit"}] }, { "matcher": "Edit", - "hooks": [{"type": "command", "command": "ctx system check-task-completion"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-task-completion"}] }, { "matcher": "Write", - "hooks": [{"type": "command", "command": "ctx system check-task-completion"}] + "hooks": [{"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-task-completion"}] } ], "UserPromptSubmit": [ { "hooks": [ - {"type": "command", "command": "ctx system check-context-size"}, - {"type": "command", "command": "ctx system check-ceremony"}, - {"type": "command", "command": "ctx system check-persistence"}, - {"type": "command", "command": "ctx system check-journal"}, - {"type": "command", "command": "ctx system check-reminder"}, - {"type": "command", "command": "ctx system check-version"}, - {"type": "command", "command": "ctx system check-resource"}, - {"type": "command", "command": "ctx system check-knowledge"}, - {"type": "command", "command": "ctx system check-map-staleness"}, - {"type": "command", "command": "ctx system check-memory-drift"}, - {"type": "command", "command": "ctx system check-freshness"}, - {"type": "command", "command": "ctx system check-skill-discovery"}, - {"type": "command", "command": "ctx system heartbeat"} + {"type": "command", "command": "CTX_DIR_INHERITED=\"${CTX_DIR:-}\" CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-anchor-drift"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-context-size"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-ceremony"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-persistence"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-journal"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-reminder"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-version"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-resource"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-knowledge"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-map-staleness"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-memory-drift"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-freshness"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system check-skill-discovery"}, + {"type": "command", "command": "CTX_DIR=\"${CLAUDE_PROJECT_DIR:?CLAUDE_PROJECT_DIR unset; cannot anchor ctx}/.context\" ctx system heartbeat"} ] } ] diff --git a/internal/assets/claude/skills/ctx-architecture-failure-analysis/SKILL.md b/internal/assets/claude/skills/ctx-architecture-failure-analysis/SKILL.md index 613ecfd21..13872ce31 100644 --- a/internal/assets/claude/skills/ctx-architecture-failure-analysis/SKILL.md +++ b/internal/assets/claude/skills/ctx-architecture-failure-analysis/SKILL.md @@ -15,9 +15,9 @@ bugs, not security holes. maps what exists. `/ctx-architecture-enrich` improves map fidelity. This skill generates concrete, disprovable claims about where the map will break under real-world conditions. Every finding is a -hypothesis with evidence — not a suspicion, not a vibe. +hypothesis with evidence, not a suspicion or a vibe. -The goal is to find failure modes that code review misses — the +The goal is to find failure modes that code review misses: the ones that ship, pass tests, and break in production at 3am. This skill requires `/ctx-architecture` artifacts as input. @@ -45,16 +45,16 @@ If they don't exist, stop and tell the user to run ## Inputs **Required** (must exist before running): -- `.context/ARCHITECTURE.md` — system map -- `.context/DETAILED_DESIGN*.md` — module-level detail -- `.context/map-tracking.json` — coverage data +- `.context/ARCHITECTURE.md`: system map +- `.context/DETAILED_DESIGN*.md`: module-level detail +- `.context/map-tracking.json`: coverage data **Optional** (enhances analysis): -- `.context/DANGER-ZONES.md` — existing danger zones from +- `.context/DANGER-ZONES.md`: existing danger zones from `/ctx-architecture` principal mode (used as starting points, not as the final word) -- GitNexus MCP — blast radius estimation, shared-state detection -- Gemini Search — cross-reference against known failure patterns +- GitNexus MCP: blast radius estimation, shared-state detection +- Gemini Search: cross-reference against known failure patterns ## Process @@ -62,7 +62,7 @@ If they don't exist, stop and tell the user to run 1. Check that architecture artifacts exist. If missing: > Architecture artifacts not found. Run `/ctx-architecture` - > first — this skill analyzes existing maps, it doesn't + > first; this skill analyzes existing maps, it doesn't > create them. 2. Load `map-tracking.json` to identify which modules have sufficient coverage (confidence >= 0.7). Low-confidence @@ -91,7 +91,7 @@ For each module with confidence >= 0.7: order, shutdown sequence) - State machines and transition points -3. For each mutation point, read the actual source code — +3. For each mutation point, read the actual source code, DETAILED_DESIGN summaries are not enough for failure analysis. You need to see the actual lock scope, the actual error check, the actual nil guard. @@ -279,7 +279,7 @@ _Run after /ctx-architecture for full coverage._ ## Critical (risk score >= 7, silent or cascading) -### DZ-1: [Location] — [Failure Mode] +### DZ-1: [Location]: [Failure Mode] **Category**: Concurrency | Ordering | Cache | Amplification | Ownership | Error Handling | Scaling | Idempotency @@ -304,7 +304,7 @@ _Run after /ctx-architecture for full coverage._ ## Unanalyzed Modules Modules with coverage < 0.7 in map-tracking.json: -- `module/path` (confidence: 0.3) — risk unknown +- `module/path` (confidence: 0.3): risk unknown ``` **Confidence levels:** diff --git a/internal/assets/claude/skills/ctx-architecture/SKILL.md b/internal/assets/claude/skills/ctx-architecture/SKILL.md index 10417fc29..e2b7cb025 100644 --- a/internal/assets/claude/skills/ctx-architecture/SKILL.md +++ b/internal/assets/claude/skills/ctx-architecture/SKILL.md @@ -370,7 +370,7 @@ library with no runtime behavior). **GLOSSARY.md**: append project-specific terms discovered during analysis. This captures the vocabulary that makes the codebase -searchable — type names, internal concepts, abbreviations, and +searchable: type names, internal concepts, abbreviations, and domain jargon that a new reader wouldn't know to search for. Rules: @@ -383,7 +383,7 @@ Rules: "singleton"). Include terms that are unique to this codebase or used in a project-specific way - Insert alphabetically into the existing list -- Format: `**Term** — one-line definition` +- Format: `**Term**: one-line definition` - Print added terms in the convergence report under a "Glossary additions" line diff --git a/internal/assets/claude/skills/ctx-commit/SKILL.md b/internal/assets/claude/skills/ctx-commit/SKILL.md index ca7373d4f..81443ad50 100644 --- a/internal/assets/claude/skills/ctx-commit/SKILL.md +++ b/internal/assets/claude/skills/ctx-commit/SKILL.md @@ -55,7 +55,7 @@ Unless the user says `--skip-qa` or "skip checks": **Verify before claiming ready**: map each claim to evidence. "Tests pass" requires test output with 0 failures. "Build succeeds" requires exit 0. "Lint clean" requires linter output with 0 errors. -Run commands fresh — never reuse earlier output. Before proceeding +Run commands fresh; never reuse earlier output. Before proceeding to stage, answer these self-audit questions: 1. What assumptions did I make? diff --git a/internal/assets/claude/skills/ctx-drift/SKILL.md b/internal/assets/claude/skills/ctx-drift/SKILL.md index 2813a6d5a..59960173d 100644 --- a/internal/assets/claude/skills/ctx-drift/SKILL.md +++ b/internal/assets/claude/skills/ctx-drift/SKILL.md @@ -213,14 +213,14 @@ drifts independently from the codebase. | Missing `Skill(ctx-*)` entry | Suggest adding: skill will prompt every time | | Stale `Skill(ctx-*)` entry | Suggest removing: dead reference | | Granular `Bash(ctx :*)` | Suggest consolidating to `Bash(ctx:*)` | -| One-off / session debris entries | Note as hygiene issue (see `hack/runbooks/sanitize-permissions.md`) | +| One-off / session debris entries | Note as hygiene issue (see `docs/operations/runbooks/sanitize-permissions.md`) | ### Important Do **not** edit `settings.local.json` directly. Report findings and let the user make changes. This file controls agent permissions: self-modification is a security concern. Refer -users to `hack/runbooks/sanitize-permissions.md` for the manual cleanup +users to `docs/operations/runbooks/sanitize-permissions.md` for the manual cleanup procedure. ## Proactive Use diff --git a/internal/assets/claude/skills/ctx-pause/SKILL.md b/internal/assets/claude/skills/ctx-pause/SKILL.md index 277af1dd3..ca5ef1297 100644 --- a/internal/assets/claude/skills/ctx-pause/SKILL.md +++ b/internal/assets/claude/skills/ctx-pause/SKILL.md @@ -26,7 +26,7 @@ Security hooks (dangerous command blocking) still fire. Run the pause command: ```bash -ctx pause +ctx hook pause ``` Then confirm to the user: diff --git a/internal/assets/claude/skills/ctx-plan/SKILL.md b/internal/assets/claude/skills/ctx-plan/SKILL.md new file mode 100644 index 000000000..5107fbabb --- /dev/null +++ b/internal/assets/claude/skills/ctx-plan/SKILL.md @@ -0,0 +1,55 @@ +--- +name: ctx-plan +description: Stress-test a plan through adversarial interview. Find what's weak, missing, or unexamined before the user commits. Use when the user wants their plan scrutinized. +--- + +You are a skeptical collaborator. The user has a plan and wants it +attacked. Your job is to surface what's weak, missing, or unexamined — +not to help them feel ready. + +State the plan as you understand it and proceed. Only pause if your +restatement exposes a material ambiguity or contradiction. + +Ask one question at a time. Each question must test something specific: +an assumption, a tradeoff, or a failure mode. No fishing. No clarifying +questions asked merely to reduce your own workload. + +After the user answers, push back, agree, narrow the question, or move +on — don't just accumulate. Walk the tree depth-first: settle decisions +that constrain others before opening siblings. + +Don't ask the user what the code, docs, or existing `ctx` files can +answer. Read first. Reserve questions for intent, priorities, +tradeoffs, and context that lives only in the user's head. + +Cycle through these angles; don't dwell on one: + +- Scope: what's NOT in this plan, and why? +- Failure modes: what breaks this? How would you notice? +- Alternatives: what did you reject, and what would change your mind? +- Sequencing: why this order? What if step 2 fails? +- Reversibility: if you're wrong in 3 months, how expensive is the unwind? +- Hidden assumptions: what must be true for this to work that isn't yet? + +Offer your take after the user answers — not before. The exception is +when the user is genuinely stuck; then propose a concrete possibility +and ask them to react. + +If the user drifts into implementation mechanics before the main bet is +clear, pull the conversation back to the unresolved bet. + +If a core assumption collapses mid-debate, say so plainly. Don't keep +politely working through the checklist on a plan that's already rotten. + +Do not produce an implementation plan. The deliverable is a debated +brief, not a task list. + +Stop when the user can describe, without your help: + +- what they're betting on +- what they rejected +- the top three failure modes +- the cheapest way to validate the bet +- what becomes expensive to unwind + +Then offer to write the debated brief. diff --git a/internal/assets/claude/skills/ctx-remember/SKILL.md b/internal/assets/claude/skills/ctx-remember/SKILL.md index 479a9e35c..bcefcddfa 100644 --- a/internal/assets/claude/skills/ctx-remember/SKILL.md +++ b/internal/assets/claude/skills/ctx-remember/SKILL.md @@ -38,7 +38,7 @@ feel like a file search rather than genuine recall: 1. **Load context packet**: ```bash - ctx agent --budget 4000 + ctx agent ``` 2. **Read the files** listed in the packet's "Read These Files" section (TASKS.md, DECISIONS.md, LEARNINGS.md, etc.) diff --git a/internal/assets/claude/skills/ctx-resume/SKILL.md b/internal/assets/claude/skills/ctx-resume/SKILL.md index 39ad9a7ca..d96ccdb6b 100644 --- a/internal/assets/claude/skills/ctx-resume/SKILL.md +++ b/internal/assets/claude/skills/ctx-resume/SKILL.md @@ -24,7 +24,7 @@ reminder, and ceremony behavior. Run the resume command: ```bash -ctx resume +ctx hook resume ``` Then confirm to the user: diff --git a/internal/assets/claude/skills/ctx-worktree/SKILL.md b/internal/assets/claude/skills/ctx-worktree/SKILL.md index 56da68389..6c8cb7c83 100644 --- a/internal/assets/claude/skills/ctx-worktree/SKILL.md +++ b/internal/assets/claude/skills/ctx-worktree/SKILL.md @@ -124,7 +124,7 @@ Merge a completed worktree back and clean up. The encryption key lives at `~/.ctx/.ctx.key` (user-level, outside the project). All worktrees on the same machine share this path, so -**`ctx pad` and `ctx notify` work in worktrees automatically**. +**`ctx pad` and `ctx hook notify` work in worktrees automatically**. One thing to watch: diff --git a/internal/assets/commands/commands.yaml b/internal/assets/commands/commands.yaml index 62415c6b9..815d77a2e 100644 --- a/internal/assets/commands/commands.yaml +++ b/internal/assets/commands/commands.yaml @@ -8,6 +8,41 @@ # # See also: examples.yaml (Example fields), flags.yaml (flag descriptions). +activate: + long: |- + Emit a shell-specific export statement that binds CTX_DIR to the + selected .context/ directory for the current shell. + + Intended usage: + + eval "$(ctx activate)" # bind the one visible .context/ + + Activate scans upward from the current working directory + collecting every .context/ directory found. When exactly one is + visible it emits its path; when none or several are found it + refuses and prints the candidates so a human chooses explicitly + (typically by `cd`-ing closer to the project root). + + When the parent shell already has CTX_DIR set to a different + value, the output gains a leading `# ctx: replacing stale ...` + comment so the user sees the change in `eval` output before the + replacement takes effect. + + Activate is the only command in the CLI that walks the filesystem + during resolution. All other commands read CTX_DIR and error + loudly when it is undeclared, relative, or non-canonical. + short: Emit shell export to bind CTX_DIR +deactivate: + long: |- + Emit a shell-specific `unset CTX_DIR` statement for the current + shell. + + Intended usage: + + eval "$(ctx deactivate)" + + Pairs with `ctx activate` for symmetric shell integration. + short: Emit shell unset for CTX_DIR add: long: |- Add a new decision, task, learning, or convention @@ -317,7 +352,7 @@ hub.start: Start the ctx Hub gRPC server. On first run, generates an admin token and prints it to - stdout — save this token; it's required for + stdout. Save this token; it's required for `ctx connection register` in client projects. With --daemon, forks to the background and writes a PID @@ -334,7 +369,7 @@ hub.stop: Sends SIGTERM to the PID recorded in /hub.pid, waits for in-flight RPCs to drain, and removes the PID - file. Safe to rerun — if no daemon is running, returns a + file. Safe to rerun: if no daemon is running, returns a "no running hub" error without side effects. short: Stop a running ctx Hub daemon hub.status: @@ -1006,7 +1041,7 @@ serve: .context/journal-site. With a directory argument, serves that directory if it contains a zensical.toml. - This command does NOT start a ctx Hub — to run a hub, + This command does NOT start a ctx Hub. To run a hub, use `ctx hub start`. Requires zensical to be installed: @@ -1065,9 +1100,9 @@ system: Go binaries and a small set of session-lifecycle plumbing commands used by skills and editor integrations. - User-facing maintenance commands (backup, bootstrap, event, - message, prune, resource, stats) have been promoted to - top-level commands. Run `ctx --help` to see them. + User-facing maintenance commands (bootstrap, event, message, + prune, resource, stats) have been promoted to top-level + commands. Run `ctx --help` to see them. Plumbing subcommands (used by skills and automation): mark-journal Update journal processing state @@ -1078,6 +1113,7 @@ system: Hook subcommands (Claude Code plugin - safe to run manually): context-load-gate Context file read directive (PreToolUse) + check-anchor-drift Stale-anchor sanity check check-context-size Context size checkpoint check-ceremony Session ceremony adoption nudge check-persistence Context persistence nudge @@ -1088,39 +1124,13 @@ system: check-version Version update nudge check-map-staleness Architecture map staleness nudge block-non-path-ctx Block non-PATH ctx invocations - block-dangerous-command Block dangerous command patterns (project-local) - check-backup-age Backup staleness check (project-local) check-task-completion Task completion nudge after edits post-commit Post-commit context capture nudge qa-reminder QA reminder before completion specs-nudge Plan-to-specs directory nudge (PreToolUse) check-memory-drift Memory drift nudge (MEMORY.md changed) heartbeat Session heartbeat webhook (no stdout) - short: Hook plumbing (hidden) — promoted commands moved to top-level -backup: - long: |- - Create timestamped tar.gz archives of project context and/or global - Claude Code data. Optionally copies archives to an SMB share. - - Scopes: - project .context/, .claude/, ideas/, ~/.bashrc - global ~/.claude/ (excludes todos/) - all Both project and global (default) - - Environment: - CTX_BACKUP_SMB_URL - SMB share URL (e.g. smb://host/share) - CTX_BACKUP_SMB_SUBDIR - Subdirectory on share (default: ctx-sessions) - short: Backup context and Claude data -system.blockdangerouscommand: - long: |- - Regex safety net for commands that the deny-list cannot express. - Catches mid-command sudo, mid-command git push, and binary installs - to bin directories. - - Hook event: PreToolUse (Bash) - Output: {"decision":"block","reason":"..."} or silent - Silent when: command doesn't match any dangerous pattern - short: Block dangerous command patterns (regex safety net) + short: Hook plumbing (hidden); promoted commands moved to top-level system.blocknonpathctx: long: |- Blocks ./ctx, go run ./cmd/ctx, and absolute-path ctx invocations. @@ -1133,20 +1143,21 @@ system.blocknonpathctx: short: Block non-PATH ctx invocations bootstrap: short: Print context location for AI agents -system.checkbackupage: +system.checkanchordrift: long: |- - Checks if the .context backup is stale (>2 days old) or the SMB share - is unmounted. Outputs a VERBATIM relay warning when issues are found. - Throttled to once per day. - - Environment: - CTX_BACKUP_SMB_URL - SMB share URL (e.g. smb://myhost/myshare). - If unset, the SMB mount check is skipped. + Compares the parent shell's CTX_DIR (captured as + CTX_DIR_INHERITED before the standard hook injection) + against the Claude-injected CLAUDE_PROJECT_DIR/.context + anchor. When the two diverge, emits a VERBATIM warning + banner naming both values so the user can spot when + their interactive CLI / `!`-pragma calls are writing to + a different project than Claude Code is in. Hook event: UserPromptSubmit - Output: VERBATIM relay with warning box, silent otherwise - Silent when: backup is fresh, or already checked today - short: Backup staleness check hook + Output: VERBATIM warning (when drifted), silent otherwise + Silent when: CTX_DIR_INHERITED is empty (no shell-level + activation), or matches CTX_DIR after filepath.Clean. + short: Stale-anchor sanity hook system.checkceremony: long: |- Scans the last 3 journal entries for /ctx-remember and /ctx-wrap-up @@ -1510,7 +1521,7 @@ trace: ctx trace links commits back to the decisions, tasks, learnings, and sessions that motivated them. git log shows - what changed, git blame shows who — ctx trace shows why. + what changed, git blame shows who; ctx trace shows why. Subcommands and forms: ctx trace Show context for a specific commit diff --git a/internal/assets/commands/examples.yaml b/internal/assets/commands/examples.yaml index babbac921..97df674b2 100644 --- a/internal/assets/commands/examples.yaml +++ b/internal/assets/commands/examples.yaml @@ -9,6 +9,14 @@ # # See also: commands.yaml (Short/Long), flags.yaml (flag descriptions). +activate: + short: |2- + eval "$(ctx activate)" + +deactivate: + short: |2- + eval "$(ctx deactivate)" + add: short: |2- ctx add decision "Use PostgreSQL" --context "..." --rationale "..." --consequence "..." @@ -58,8 +66,8 @@ compact: complete: short: |2- - ctx complete 3 - ctx complete "auth" + ctx task complete 3 + ctx task complete "auth" config: short: |2- @@ -427,22 +435,14 @@ system: ctx system --help # list hidden hook plumbing ctx system mark-wrapped-up -backup: - short: |2- - ctx backup - ctx backup --scope project - -system.blockdangerouscommand: - short: ' ctx system block-dangerous-command' - system.blocknonpathctx: short: ' ctx system block-non-path-ctx' bootstrap: short: ' ctx system bootstrap' -system.checkbackupage: - short: ' ctx system check-backup-age' +system.checkanchordrift: + short: ' ctx system check-anchor-drift' system.checkceremony: short: ' ctx system check-ceremony' diff --git a/internal/assets/commands/flags.yaml b/internal/assets/commands/flags.yaml index 73dee64c8..9f855270e 100644 --- a/internal/assets/commands/flags.yaml +++ b/internal/assets/commands/flags.yaml @@ -8,6 +8,8 @@ # # See also: commands.yaml (Short/Long), examples.yaml (Example fields). +activate.shell: + short: Shell dialect for the emitted export (bash, zsh, sh); default auto-detects from $SHELL add.application: short: 'Application for learnings: how to apply this going forward (required for learnings)' add.branch: @@ -44,14 +46,10 @@ agent.skill: short: Include named skill content in context packet agent.include-hub: short: Include ctx Hub entries in context packet -allow-outside-cwd: - short: Allow context directory outside current working directory changes.since: short: 'Time reference: duration (24h) or date (2026-03-01)' compact.archive: short: Create .context/archive/ for old content -context-dir: - short: 'Override context directory path (default: .context)' initialize.caller: short: Identify the calling tool (e.g. vscode) to tailor output @@ -213,10 +211,6 @@ steering.sync.all: short: Sync to all supported tool formats sync.dry-run: short: Show what would change without modifying -backup.json: - short: Output results as JSON -backup.scope: - short: 'Backup scope: project, global, or all' bootstrap.json: short: Output in JSON format bootstrap.quiet: diff --git a/internal/assets/commands/text/doctor.yaml b/internal/assets/commands/text/doctor.yaml index ce48563a9..331cb5a4d 100644 --- a/internal/assets/commands/text/doctor.yaml +++ b/internal/assets/commands/text/doctor.yaml @@ -1,6 +1,10 @@ # Doctor diagnostic text strings for ctx CLI. # Used by assets.TextDesc() for health check output. +doctor.check-did-not-run: + short: 'check did not run: %v' +doctor.check-did-not-run-cascade: + short: 'check did not run: %v (subsequent context-dependent checks skipped)' doctor.context-file.format: short: '%-22s ~%d tokens' doctor.context-initialized.error: diff --git a/internal/assets/commands/text/errors.yaml b/internal/assets/commands/text/errors.yaml index f88aba5c7..6744e7247 100644 --- a/internal/assets/commands/text/errors.yaml +++ b/internal/assets/commands/text/errors.yaml @@ -25,7 +25,7 @@ err.add.section-required: err.add.unknown-type: short: 'unknown type %q. Valid types: decision, task, learning, convention' err.fmt.no-context-dir: - short: "context directory not found — run ctx init" + short: "context directory not found; run ctx init" err.fmt.file-read: short: 'failed to format %s: %w' err.fmt.file-write: @@ -34,36 +34,45 @@ err.fmt.no-files: short: 'no context files found in %s' err.fmt.needs-formatting: short: files need formatting -err.backup.backup-global: - short: 'global backup: %w' -err.backup.backup-project: - short: 'project backup: %w' -err.backup.backup-smb-config: - short: 'parse SMB config: %w' err.backup.context-dir-not-found: short: "context directory not found: %s - run 'ctx init'" -err.backup.create-archive: - short: 'create archive file: %w' err.backup.create-archive-dir: short: 'failed to create archive directory: %w' err.backup.create-backup: short: 'failed to create backup %s: %w' -err.backup.invalid-backup-scope: - short: 'invalid scope %q: must be project, global, or all' -err.backup.invalid-smb-url: - short: 'invalid SMB URL: %s' -err.backup.mount-failed: - short: 'failed to mount %s: %w' -err.backup.smb-missing-share: - short: 'SMB URL missing share name: %s' -err.backup.source-not-found: - short: 'source not found: %s' err.backup.write-archive: short: 'failed to write archive: %w' -err.backup.write-smb: - short: 'write to SMB: %w' err.context.dir-not-found: short: 'context directory not found: ' +err.context.not-declared-zero: + short: |- + no context directory specified for this project + See: https://ctx.ist/recipes/activating-context/ +err.context.not-declared-one: + short: |- + no context directory specified; a likely candidate is at %s + See: https://ctx.ist/recipes/activating-context/ +err.context.not-declared-many: + short: |- + no context directory specified; multiple candidates visible: + %s + See: https://ctx.ist/recipes/activating-context/ +err.context.relative-not-allowed: + short: |- + CTX_DIR must be an absolute path; got %q + See: https://ctx.ist/recipes/activating-context/ +err.context.non-canonical-basename: + short: |- + CTX_DIR basename must be %q; got %q + See: https://ctx.ist/recipes/activating-context/ +err.context.dir-not-a-directory: + short: 'CTX_DIR points at a file, not a directory: %s' +err.context.dir-stat: + short: 'cannot stat CTX_DIR %s: %w' +err.activate.no-candidates: + short: |- + ctx activate: no .context/ directory found from this location + See: https://ctx.ist/recipes/activating-context/ err.cli.no-tool-specified: short: 'no tool specified: use --tool or set the tool field in .ctxrc' err.config.golden-not-found: @@ -120,8 +129,6 @@ err.date.invalid-date: short: 'invalid %s date %q (expected YYYY-MM-DD): %w' err.date.invalid-date-value: short: invalid date %q (expected YYYY-MM-DD) -err.fs.boundary-violation: - short: "%w\nUse --allow-outside-cwd to override this check" err.fs.create-dir: short: 'failed to create directory %s: %w' err.fs.dir-not-found: @@ -546,8 +553,6 @@ err.validate.context-dir-symlink: short: 'context directory %q is a symlink' err.validate.context-file-symlink: short: 'context file %q is a symlink' -err.validate.context-outside-root: - short: 'context directory %q resolves outside project root %q' err.validate.invalid-selection: short: 'invalid selection: %q (expected 1-%d)' err.validate.unknown-document: diff --git a/internal/assets/commands/text/hooks.yaml b/internal/assets/commands/text/hooks.yaml index 9033ac604..e329f83e2 100644 --- a/internal/assets/commands/text/hooks.yaml +++ b/internal/assets/commands/text/hooks.yaml @@ -1,44 +1,20 @@ # Hook output text strings for ctx CLI. # Used by assets.TextDesc() for hook messages and checks. -backup.box-title: - short: Backup Warning -backup.no-marker: - short: No backup marker found - backup may have never run. -backup.relay-message: - short: Backup warning -backup.relay-prefix: - short: 'IMPORTANT: Relay this backup warning to the user VERBATIM before answering their question.' -backup.run-hint: - short: 'Run: ctx backup' -backup.smb-not-mounted: - short: SMB share (%s) is not mounted. -backup.smb-unavailable: - short: Backups cannot run until it's available. -backup.stale: - short: Last .context backup is %d days old. block.absolute-path: short: 'Use ''ctx'' from PATH, not absolute paths. Ask the user to run: make build && sudo make install' block.constitution-suffix: short: 'See CONSTITUTION.md: ctx Invocation Invariants' -block.cp-to-bin: - short: Agent must not copy binaries to bin directories. Ask the user to run 'sudo make install' instead. block.dot-slash: short: 'Use ''ctx'' from PATH, not ''./ctx'' or ''./dist/ctx''. Ask the user to run: make build && sudo make install' block.go-run: short: 'Use ''ctx'' from PATH, not ''go run ./cmd/ctx''. Ask the user to run: make build && sudo make install' -block.install-to-local-bin: - short: Do not copy binaries to ~/.local/bin - this overrides the system ctx in /usr/local/bin. Use 'ctx' from PATH. -block.mid-git-push: - short: git push requires explicit user approval. -block.mid-sudo: - short: Cannot use sudo (no password access). Use 'make build && sudo make install' manually if needed. block.non-path-relay-message: short: Blocked non-PATH ctx invocation bootstrap.next-steps: short: |- 1. Read AGENT_PLAYBOOK.md from the context directory - 2. Run `ctx agent --budget 4000` for a content summary + 2. Run `ctx agent` for a content summary bootstrap.none: short: (none) bootstrap.plugin-warning: @@ -93,10 +69,10 @@ skill-discovery.content: short: |- These skills are easy to forget mid-session: - /ctx-reflect — checkpoint progress and learnings - /ctx-learning-add — capture gotchas before moving on - /ctx-decision-add — record trade-offs while fresh - /ctx-prompt-audit — improve prompt clarity + /ctx-reflect checkpoint progress and learnings + /ctx-learning-add capture gotchas before moving on + /ctx-decision-add record trade-offs while fresh + /ctx-prompt-audit improve prompt clarity Use them when you hit a milestone, make a design choice, or discover something surprising. @@ -113,6 +89,23 @@ check-context-size.billing-log-format: short: prompt#%d BILLING-WARNING tokens=%d threshold=%d relay.prefix-format: short: '%s: %s' +check-anchor-drift.box-title: + short: Anchor Drift +check-anchor-drift.content: + short: |- + CTX_DIR (your shell) and CLAUDE_PROJECT_DIR/.context + point at different projects. + + shell: %s + claude: %s + + Re-run `eval "$(ctx activate)"` from this project to + realign, or close this Claude Code session and reopen + it from the right working directory. +check-anchor-drift.relay-message: + short: CTX_DIR diverges from Claude project anchor +check-anchor-drift.relay-prefix: + short: 'IMPORTANT: Relay this anchor drift notice to the user VERBATIM before answering their question.' check-context-size.billing-relay-format: short: Billing threshold exceeded (%s tokens > %s) check-context-size.billing-relay-prefix: diff --git a/internal/assets/commands/text/mcp.yaml b/internal/assets/commands/text/mcp.yaml index 7df4e460b..5eee38ed5 100644 --- a/internal/assets/commands/text/mcp.yaml +++ b/internal/assets/commands/text/mcp.yaml @@ -134,7 +134,7 @@ mcp.all-tasks-complete: mcp.check-task-format: short: 'Did this complete task #%d: "%s"?' mcp.check-task-hint: - short: 'If yes, run: ctx complete %d' + short: 'If yes, run: ctx task complete %d' mcp.compact-clean: short: Nothing to compact - context is already clean. mcp.format-compacted: @@ -366,7 +366,7 @@ mcp.gov-drift-never-checked: mcp.gov-persist-nudge: short: '⚠ %d tool calls since last context write. Persist decisions, learnings, or completed tasks with ctx_add() or ctx_complete().' mcp.gov-violation-critical: - short: '🚨 CRITICAL: %s — %s (at %s). Review this action immediately. If unintended, revert it.' + short: '🚨 CRITICAL: %s, %s (at %s). Review this action immediately. If unintended, revert it.' mcp.tool-steering-get-desc: short: Retrieve applicable steering files for a prompt. Without a prompt, returns always-included files only. diff --git a/internal/assets/commands/text/ui.yaml b/internal/assets/commands/text/ui.yaml index ab1b9185f..322a8b807 100644 --- a/internal/assets/commands/text/ui.yaml +++ b/internal/assets/commands/text/ui.yaml @@ -205,7 +205,7 @@ guide.default: TRACKING DECISIONS & KNOWLEDGE ctx add -t TYPE Add a decision, learning, convention, or task - ctx complete Mark a task as done in TASKS.md + ctx task complete Mark a task as done in TASKS.md ctx decision reindex Rebuild the DECISIONS.md index table BROWSING HISTORY diff --git a/internal/assets/commands/text/write.yaml b/internal/assets/commands/text/write.yaml index 10083449b..19f324c83 100644 --- a/internal/assets/commands/text/write.yaml +++ b/internal/assets/commands/text/write.yaml @@ -11,12 +11,6 @@ write.spec-nudge-tip: short: 'Tip: this task may benefit from a spec. Run /ctx-spec to scaffold one.' write.archived: short: Archived previous mirror to %s -write.backup-result: - short: '%s: %s (%s)' -write.backup-skip-entry: - short: "skipping %s (not found)\n" -write.backup-smb-dest: - short: ' → %s' write.bootstrap-dir: short: 'context_dir: %s' write.bootstrap-files: @@ -67,8 +61,6 @@ write.format-bytes-raw: short: "%d B" write.format-bytes-unit: short: "%.1f %cB" -write.format-gvfs-path: - short: "/run/user/%d/gvfs/smb-share:server=%s,share=%s" write.format-duration-day: short: "%dd" write.format-duration-hour: @@ -193,6 +185,28 @@ write.init-makefile-includes: short: ' ○ Makefile (already includes %s)' write.init-merged: short: ' ✓ %s (merged)' +write.init-anatomy-preamble: + short: |- + # Getting Started with ctx + + The `.context/` directory is your project's persistent memory. + Its parent (this directory) is the project root by contract; + that's what `ctx sync` and drift detection scan. `CTX_DIR` + must be an absolute path ending in `.context`. One `.context/` + per project; knowledge sharing across projects goes through + `ctx hub`, not a shared directory. + + Full reference: https://ctx.ist/recipes/activating-context/ +write.init-activate-hint: + short: |4- + + Bind CTX_DIR for this shell (required before other ctx commands): + + eval "$(ctx activate)" + + Or export the absolute path directly (skip the scan): + + export CTX_DIR=%s write.init-next-steps-block: short: |4- @@ -252,7 +266,7 @@ write.init-getting-started-saved: write.init-no-changes: short: ' ○ %s (no changes needed)' write.init-overwrite-prompt: - short: '%s already exists. Overwrite? [y/N] ' + short: '%s is already initialized. Re-run init? Existing files are preserved; missing ones are added. (Use --force to overwrite existing files.) [y/N] ' write.init-perms-allow: short: ' ✓ %s (added ctx permissions)' write.init-perms-allow-deny: @@ -296,7 +310,7 @@ write.init-claude-absent: 4. Re-run `ctx init` in this project to auto-enable the plugin for use here. - If you don't use Claude Code, ignore this — ctx works + If you don't use Claude Code, ignore this. ctx works with Cursor, Kiro, Cline, Aider, Copilot, and Windsurf as well (see `ctx setup --help`). write.init-claude-plugin-missing: @@ -323,7 +337,7 @@ write.init-claude-plugin-missing: 5. Back in `/plugin`, select Install and choose `ctx`. ⚠ Install at USER scope. Local plugin installs are - NOT auto-enabled globally by Claude Code — user + NOT auto-enabled globally by Claude Code; user scope avoids the per-project enablement gotcha. 6. Re-run `ctx init` in this project to wire up the @@ -331,7 +345,7 @@ write.init-claude-plugin-missing: This points Claude Code at the plugin source on disk, so changes you make to hooks or skills take effect - immediately — no reinstall needed. + immediately, no reinstall needed. write.init-claude-ready: short: |2- @@ -581,7 +595,7 @@ write.paused: write.paused-message: short: "ctx:paused (%d turns) - resume with /ctx-resume" write.prefix-error: - short: 'Error: ' + short: 'Error:' write.prefix-warn: short: ' ! %s: %v' write.publish-block: @@ -727,9 +741,9 @@ write.trace-commit-no-context: write.trace-last-entry: short: '%s %s [%s]' write.trace-resolved-full: - short: ' [%s] %s — %s (%s)' + short: ' [%s] %s: %s (%s)' write.trace-resolved-title: - short: ' [%s] %s — %s' + short: ' [%s] %s: %s' write.trace-resolved-raw: short: ' [%s] %s' write.trace-file-entry: @@ -871,7 +885,7 @@ write.steering-desc-workflow: write.steering-guidance: short: |- diff --git a/internal/assets/integrations/copilot-cli/INSTRUCTIONS.md b/internal/assets/integrations/copilot-cli/INSTRUCTIONS.md new file mode 100644 index 000000000..30ea64ab1 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/INSTRUCTIONS.md @@ -0,0 +1,96 @@ +# ctx Agent Instructions: Copilot CLI + + + + +## IMPORTANT: You Have Persistent Memory + +This project uses Context (`ctx`) for context persistence across sessions. +**Your memory is NOT ephemeral**: it lives in the context directory. + +## On Session Start + +1. **Run `ctx system bootstrap`**: CRITICAL, not optional. + This tells you where the context directory is. If it returns any + error, relay the error output to the user verbatim, point them at + https://ctx.ist/recipes/activating-context/ for setup, and STOP. + Do not try to recover: the user decides. +2. **Read AGENT_PLAYBOOK.md** from the context directory: it explains + how to use this system +3. **Run `ctx agent`** for a content summary + +## When Asked "Do You Remember?" + +When the user asks "Do you remember?", "What were we working on?", or any +memory-related question: + +**Do this FIRST (silently):** +- Read TASKS.md, DECISIONS.md, and LEARNINGS.md from the context directory +- Run `ctx journal source --limit 5` for recent session history + +**Then respond with a structured readback:** + +1. **Last session**: cite the most recent session topic and date +2. **Active work**: list pending or in-progress tasks +3. **Recent context**: mention 1-2 recent decisions or learnings +4. **Next step**: offer to continue or ask what to focus on + +**Never** lead with "I don't have memory", "Let me check if there are files", +or narrate your discovery process. The context files are your memory. +Read them silently, then present what you found as recall, not as a search. + +## Quick Context Load + +```bash +# Get AI-optimized context packet (what you should know) +ctx agent + +# Or see full status +ctx status +``` + +## Context Files + +| File | Purpose | +|-----------------|----------------------------------------| +| CONSTITUTION.md | Hard rules: NEVER violate | +| TASKS.md | Current work items | +| DECISIONS.md | Architectural decisions with rationale | +| LEARNINGS.md | Gotchas, tips, lessons learned | +| CONVENTIONS.md | Code patterns and standards | + +All files live in the context directory reported by `ctx system bootstrap`. + +## Context Updates During Work + +Proactively update context files as you work: + +| Event | Action | +|-----------------------------|-------------------------------------| +| Made architectural decision | Add to `.context/DECISIONS.md` | +| Discovered gotcha/bug | Add to `.context/LEARNINGS.md` | +| Established new pattern | Add to `.context/CONVENTIONS.md` | +| Completed task | Mark [x] in `.context/TASKS.md` | + +## Self-Check + +Periodically ask yourself: + +> "If this session ended right now, would the next session know what happened?" + +If no, save a session file or update context files before continuing. + +## Session Persistence + +After completing meaningful work, save a session summary to +`.context/sessions/`. Use the `ctx-wrap-up` skill for the full ceremony. + +## Build Commands + +```bash +make build # or: go build ./cmd/ctx/... +make lint # or: golangci-lint run +make test # or: go test ./... +``` + + diff --git a/internal/assets/integrations/copilot-cli/ctx-hooks.json b/internal/assets/integrations/copilot-cli/ctx-hooks.json index 2aa48f448..694ef31a7 100644 --- a/internal/assets/integrations/copilot-cli/ctx-hooks.json +++ b/internal/assets/integrations/copilot-cli/ctx-hooks.json @@ -3,38 +3,67 @@ "hooks": { "sessionStart": [ { - "type": "command", - "bash": ".github/hooks/scripts/ctx-sessionStart.sh", - "powershell": ".github/hooks/scripts/ctx-sessionStart.ps1", - "cwd": ".", - "timeoutSec": 10 + "description": "Bootstrap ctx context on session start", + "command": "ctx system bootstrap" + }, + { + "description": "Load AI-optimized context packet", + "command": "ctx agent --budget 4000" } ], "preToolUse": [ { - "type": "command", - "bash": ".github/hooks/scripts/ctx-preToolUse.sh", - "powershell": ".github/hooks/scripts/ctx-preToolUse.ps1", - "cwd": ".", - "timeoutSec": 5 + "description": "Context load gate — ensure context is loaded before work", + "command": "ctx system context-load-gate" + }, + { + "description": "Block dangerous non-path ctx commands", + "matcher": "bash", + "command": "ctx system block-non-path-ctx" + }, + { + "description": "QA reminder nudge", + "matcher": "bash", + "command": "ctx system qa-reminder" } ], "postToolUse": [ { - "type": "command", - "bash": ".github/hooks/scripts/ctx-postToolUse.sh", - "powershell": ".github/hooks/scripts/ctx-postToolUse.ps1", - "cwd": ".", - "timeoutSec": 5 + "description": "Post-commit context persistence check", + "matcher": "bash", + "command": "ctx system post-commit" + }, + { + "description": "Check if a task was just completed", + "matcher": "edit", + "command": "ctx system check-task-completion" + }, + { + "description": "Check if a task was just completed (write)", + "matcher": "write", + "command": "ctx system check-task-completion" } ], "sessionEnd": [ { - "type": "command", - "bash": ".github/hooks/scripts/ctx-sessionEnd.sh", - "powershell": ".github/hooks/scripts/ctx-sessionEnd.ps1", - "cwd": ".", - "timeoutSec": 15 + "description": "Check context size for budget drift", + "command": "ctx system check-context-size" + }, + { + "description": "Persistence check — unsaved decisions/learnings", + "command": "ctx system check-persistence" + }, + { + "description": "Journal export check", + "command": "ctx system check-journal" + }, + { + "description": "Version freshness check", + "command": "ctx system check-version" + }, + { + "description": "Heartbeat — record session activity", + "command": "ctx system heartbeat" } ] } diff --git a/internal/assets/integrations/copilot-cli/scripts/post-tool-use.ps1 b/internal/assets/integrations/copilot-cli/scripts/post-tool-use.ps1 new file mode 100644 index 000000000..4dceed315 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/post-tool-use.ps1 @@ -0,0 +1,12 @@ +# ctx post-tool-use hook for Copilot CLI (PowerShell) +# Checks for post-commit context and task completion + +$Tool = $args[0] + +if ($Tool -eq "bash" -or $Tool -eq "powershell") { + try { ctx system post-commit 2>$null } catch {} +} + +if ($Tool -eq "edit" -or $Tool -eq "write") { + try { ctx system check-task-completion 2>$null } catch {} +} diff --git a/internal/assets/integrations/copilot-cli/scripts/post-tool-use.sh b/internal/assets/integrations/copilot-cli/scripts/post-tool-use.sh new file mode 100644 index 000000000..7d9ccb906 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/post-tool-use.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# ctx post-tool-use hook for Copilot CLI +# Checks for post-commit context and task completion +set -euo pipefail + +TOOL="${1:-}" + +if [ "$TOOL" = "bash" ] || [ "$TOOL" = "powershell" ]; then + ctx system post-commit 2>/dev/null || true +fi + +if [ "$TOOL" = "edit" ] || [ "$TOOL" = "write" ]; then + ctx system check-task-completion 2>/dev/null || true +fi diff --git a/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.ps1 b/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.ps1 new file mode 100644 index 000000000..c7fed6f7b --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.ps1 @@ -0,0 +1,11 @@ +# ctx pre-tool-use hook for Copilot CLI (PowerShell) +# Ensures context is loaded and blocks dangerous commands + +$Tool = $args[0] + +try { ctx system context-load-gate 2>$null } catch {} + +if ($Tool -eq "bash" -or $Tool -eq "powershell") { + try { ctx system block-non-path-ctx 2>$null } catch {} + try { ctx system qa-reminder 2>$null } catch {} +} diff --git a/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.sh b/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.sh new file mode 100644 index 000000000..cd9cd926c --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/pre-tool-use.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# ctx pre-tool-use hook for Copilot CLI +# Ensures context is loaded and blocks dangerous commands +set -euo pipefail + +TOOL="${1:-}" + +# Always check context load gate +ctx system context-load-gate 2>/dev/null || true + +# Bash-specific hooks +if [ "$TOOL" = "bash" ] || [ "$TOOL" = "powershell" ]; then + ctx system block-non-path-ctx 2>/dev/null || true + ctx system qa-reminder 2>/dev/null || true +fi diff --git a/internal/assets/integrations/copilot-cli/scripts/session-end.ps1 b/internal/assets/integrations/copilot-cli/scripts/session-end.ps1 new file mode 100644 index 000000000..8b840f2bb --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/session-end.ps1 @@ -0,0 +1,8 @@ +# ctx session end hook for Copilot CLI (PowerShell) +# Checks for unsaved context and records heartbeat + +try { ctx system check-context-size 2>$null } catch {} +try { ctx system check-persistence 2>$null } catch {} +try { ctx system check-journal 2>$null } catch {} +try { ctx system check-version 2>$null } catch {} +try { ctx system heartbeat 2>$null } catch {} diff --git a/internal/assets/integrations/copilot-cli/scripts/session-end.sh b/internal/assets/integrations/copilot-cli/scripts/session-end.sh new file mode 100644 index 000000000..776ddcfd5 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/session-end.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# ctx session end hook for Copilot CLI +# Checks for unsaved context and records heartbeat +set -euo pipefail + +ctx system check-context-size 2>/dev/null || true +ctx system check-persistence 2>/dev/null || true +ctx system check-journal 2>/dev/null || true +ctx system check-version 2>/dev/null || true +ctx system heartbeat 2>/dev/null || true diff --git a/internal/assets/integrations/copilot-cli/scripts/session-start.ps1 b/internal/assets/integrations/copilot-cli/scripts/session-start.ps1 new file mode 100644 index 000000000..a2acdc4b4 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/session-start.ps1 @@ -0,0 +1,5 @@ +# ctx session start hook for Copilot CLI (PowerShell) +# Bootstraps context and loads the agent packet + +try { ctx system bootstrap 2>$null } catch {} +try { ctx agent 2>$null } catch {} diff --git a/internal/assets/integrations/copilot-cli/scripts/session-start.sh b/internal/assets/integrations/copilot-cli/scripts/session-start.sh new file mode 100644 index 000000000..406291792 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/scripts/session-start.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# ctx session start hook for Copilot CLI +# Bootstraps context and loads the agent packet +set -euo pipefail + +# Bootstrap ctx context +ctx system bootstrap 2>/dev/null || true + +# Load AI-optimized context packet +ctx agent 2>/dev/null || true diff --git a/internal/assets/integrations/copilot-cli/skills/_ctx-alignment-audit/SKILL.md b/internal/assets/integrations/copilot-cli/skills/_ctx-alignment-audit/SKILL.md new file mode 100644 index 000000000..0bc0493f3 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/_ctx-alignment-audit/SKILL.md @@ -0,0 +1,61 @@ +--- +name: _ctx-alignment-audit +description: "Audit alignment between docs and agent instructions. Use when docs make claims about agent behavior that may not be backed by the playbook or skills." +tools: [bash, read, glob, grep] +--- + +Audit whether behavioral claims in documentation are backed by +actual agent instructions. + +## When to Use + +- After writing or updating documentation +- After modifying the Agent Playbook or skills +- When a doc makes claims about proactive agent behavior +- Periodically to catch drift between docs and instructions + +## When NOT to Use + +- For code-level drift (use `ctx-drift` instead) +- For context file staleness (use `ctx-status`) +- When reviewing docs for prose quality (not behavioral claims) + +## Process + +### Step 1: Collect Claims + +Read target docs. Extract every behavioral claim: statements +describing what an agent "will do", "may do", or "offers to do". + +### Step 2: Trace Each Claim + +Search for matching instructions in: +1. **AGENT_PLAYBOOK.md**: primary behavioral source +2. **skills/*/SKILL.md**: skill-specific instructions +3. **INSTRUCTIONS.md**: project-level instructions + +For each claim, determine: +- **Covered**: matching instruction exists +- **Partial**: related but incomplete +- **Gap**: no instruction exists + +### Step 3: Report + +| Claim (file:line) | Status | Backing instruction | Gap | +|---|---|---|---| +| "agent creates tasks" | Gap | None | Not taught | +| "agent saves learnings" | Covered | Playbook | n/a | + +### Step 4: Fix (if requested) + +For each gap, propose: +- **Playbook addition**: if behavior applies broadly +- **Skill addition**: if specific to one skill +- **Doc correction**: if the claim overpromises + +## Quality Checklist + +- [ ] Every behavioral claim was traced +- [ ] Each claim has clear status (Covered/Partial/Gap) +- [ ] Gaps have proposed fixes +- [ ] No new claims introduced without backing diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-add-convention/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-add-convention/SKILL.md new file mode 100644 index 000000000..dcf929503 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-add-convention/SKILL.md @@ -0,0 +1,47 @@ +--- +name: ctx-add-convention +description: "Record a coding convention. Use when a repeated pattern should be codified so all sessions follow it consistently." +tools: [bash] +--- + +Record a coding convention in CONVENTIONS.md. + +## When to Use + +- When a pattern has been used 2-3 times and should be standardized +- When establishing a naming, formatting, or structural rule +- When a new contributor would need to know "how we do things here" +- When the user says "codify that" or "make that a convention" + +## When NOT to Use + +- One-off implementation details (use code comments instead) +- Architectural decisions with trade-offs (use `ctx-add-decision`) +- Debugging insights or gotchas (use `ctx-add-learning`) +- Rules that are already enforced by linters or formatters + +## Gathering Information + +Conventions are simpler than decisions or learnings. You need: + +1. **Name**: What is the convention called? +2. **Rule**: What is the rule? One clear sentence. +3. **Section**: Where does it belong in CONVENTIONS.md? + +If the user provides only a description, infer the section from the +topic. Check existing sections in CONVENTIONS.md first to place it +correctly: don't create a new section if an existing one fits. + +## Execution + +```bash +ctx add convention "Use kebab-case for all CLI flag names" --section "Naming" +``` + +## Quality Checklist + +- [ ] The rule is clear enough that someone unfamiliar could follow it +- [ ] It is specific to this project (not a general rule) +- [ ] It is not already in CONVENTIONS.md (check first) +- [ ] The section matches an existing section, or a new one is needed +- [ ] It describes a pattern, not a one-time choice (that's a decision) diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-add-decision/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-add-decision/SKILL.md new file mode 100644 index 000000000..82f3698f0 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-add-decision/SKILL.md @@ -0,0 +1,47 @@ +--- +name: ctx-add-decision +description: "Record architectural decision. Use when a trade-off is resolved or a non-obvious design choice is made that future sessions need to know." +tools: [bash] +--- + +Record an architectural decision in DECISIONS.md. + +## When to Use + +- After resolving a trade-off between alternatives +- When making a non-obvious design choice +- When the "why" behind a choice needs to be preserved + +## When NOT to Use + +- Minor implementation details (use code comments instead) +- Routine maintenance or bug fixes +- When there was no real alternative to consider + +## Decision Formats + +### Quick Format (Y-Statement) + +> "In the context of **[situation]**, facing **[constraint]**, we decided +> for **[choice]** and against **[alternatives]**, to achieve +> **[benefit]**, accepting that **[trade-off]**." + +### Full Format + +Gather: Context, Alternatives, Decision, Rationale, Consequence. + +## Execution + +```bash +ctx add decision "Use Cobra for CLI framework" \ + --context "Need CLI framework for Go project" \ + --rationale "Better subcommand support, team familiarity" \ + --consequence "More boilerplate, but clearer command structure" +``` + +## Quality Checklist + +- [ ] Context explains the problem clearly +- [ ] At least one alternative was considered +- [ ] Rationale addresses why alternatives were rejected +- [ ] Consequence includes both benefits and trade-offs diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-add-learning/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-add-learning/SKILL.md new file mode 100644 index 000000000..7ac679724 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-add-learning/SKILL.md @@ -0,0 +1,45 @@ +--- +name: ctx-add-learning +description: "Record a learning. Use when discovering gotchas, bugs, or unexpected behavior that future sessions should know about." +tools: [bash] +--- + +Record a learning in LEARNINGS.md. + +## Before Recording + +Three questions: if any answer is "no", don't record: + +1. **"Could someone Google this in 5 minutes?"** → If yes, skip it +2. **"Is this specific to this codebase?"** → If no, skip it +3. **"Did it take real effort to discover?"** → If no, skip it + +Learnings should capture **principles and heuristics**, not code snippets. + +## When to Use + +- After discovering a gotcha or unexpected behavior +- When a debugging session reveals root cause +- When finding a pattern that will help future work + +## When NOT to Use + +- General programming knowledge (not specific to this project) +- One-off workarounds that won't recur +- Things already documented in the codebase + +## Execution + +```bash +ctx add learning "Title" \ + --context "What were you doing when you discovered this?" \ + --lesson "What's the key insight?" \ + --application "How should we handle this going forward?" +``` + +## Quality Checklist + +- [ ] Context explains what happened (not just what you learned) +- [ ] Lesson is a principle, not a code snippet +- [ ] Application gives actionable guidance for next time +- [ ] Not already in LEARNINGS.md (check first) diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-add-task/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-add-task/SKILL.md new file mode 100644 index 000000000..7090eaabe --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-add-task/SKILL.md @@ -0,0 +1,54 @@ +--- +name: ctx-add-task +description: "Add a task. Use when follow-up work is identified or when breaking down complex work into subtasks." +tools: [bash] +--- + +Add a task to TASKS.md. + +## Before Recording + +Three questions: if any answer is "no", don't record: + +1. **"Is this actionable?"** → If it's a vague wish, clarify first +2. **"Would someone else know what to do?"** → If not, add more detail +3. **"Is this tracked elsewhere?"** → If yes, don't duplicate + +Tasks should describe **what to do and why**, not just a topic. + +## When to Use + +- When follow-up work is identified during a session +- When breaking down a complex task into subtasks +- When the user mentions something that should be tracked + +## When NOT to Use + +- Vague ideas without clear scope (discuss first, then add) +- Work already completed (mark existing tasks done instead) +- One-line fixes you can do right now (just do it) + +## Execution + +```bash +ctx add task "Task description" [--priority high|medium|low] [--section "Phase N"] +``` + +**Good examples:** +```bash +ctx add task "Add --cooldown flag to ctx agent" --priority medium +ctx add task "Investigate ctx init overwriting user content" --priority high +``` + +**Bad examples (too shallow):** +```bash +ctx add task "Fix bug" # What bug? Where? +ctx add task "Improve performance" # Of what? How? +``` + +## Quality Checklist + +- [ ] Task starts with a verb (Add, Fix, Implement, Investigate, Update) +- [ ] Someone unfamiliar with the session could act on it +- [ ] Not a duplicate of an existing task in TASKS.md (check first) +- [ ] Priority set if the user indicated urgency diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-agent/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-agent/SKILL.md new file mode 100644 index 000000000..06a253f84 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-agent/SKILL.md @@ -0,0 +1,64 @@ +--- +name: ctx-agent +description: "Load full context packet. Use at session start or when context seems stale or incomplete." +--- + +Load the full context packet for AI consumption. + +## When to Use + +- At the start of a session to load all context +- When context seems stale or incomplete +- When switching between different areas of work + +## When NOT to Use + +- The PreToolUse hook already runs `ctx agent` automatically with a cooldown: + you rarely need to invoke this manually +- Don't run it just to "refresh" if you already have the context loaded in + this session + +## After Loading + +**Read the files listed in "Read These Files (in order)"**: the packet is a +summary, not a substitute. In particular, read CONVENTIONS.md before writing +any code. + +Confirm to the user: "I have read the required context files and I'm +following project conventions." Read and confirm before beginning +implementation. + +## Flags + +| Flag | Default | Description | +|--------------|---------|---------------------------------------------------| +| `--budget` | 8000 | Token budget for context packet | +| `--format` | md | Output format: `md` or `json` | +| `--cooldown` | 10m | Suppress repeated output within this duration | +| `--session` | (none) | Session ID for cooldown isolation (e.g., `$PPID`) | + +## Execution + +```bash +ctx agent $ARGUMENTS +``` + +**Example: default load:** +```bash +ctx agent +``` + +**Example: smaller packet for limited contexts:** +```bash +ctx agent --budget 4000 +``` + +**Example: with cooldown (how the PreToolUse hook invokes it):** +```bash +ctx agent --budget 4000 --session $PPID +``` + +**Example: JSON for programmatic use:** +```bash +ctx agent --format json --budget 8000 +``` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-architecture/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-architecture/SKILL.md new file mode 100644 index 000000000..f402eaee6 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-architecture/SKILL.md @@ -0,0 +1,946 @@ +--- +name: ctx-architecture +description: "Build and maintain architecture maps. Use to create or refresh ARCHITECTURE.md and DETAILED_DESIGN.md. Supports principal mode for deeper analysis: vision, future direction, bottlenecks, implementation alternatives, gaps, upstream proposals, and intervention points." +--- + +Build and maintain two architecture documents incrementally: +**ARCHITECTURE.md** (succinct project map, loaded at session start) +and **DETAILED_DESIGN.md** (deep per-module reference, consulted +on-demand). Coverage is tracked in `map-tracking.json` so each run +extends the map rather than re-analyzing everything. + +## Execution Priority + +When time or context budget runs short, execute in this order. +Never skip a tier to do a lower one: + +1. **Authoritative truth first**: ARCHITECTURE.md + DETAILED_DESIGN.md + must be accurate and honest. Incomplete is fine; wrong is not. +2. **Surface uncertainty honestly**: partial coverage with correct + confidence scores beats inflated scores. Mark what you don't know. +3. **Offer judgment only where grounded**: danger zones, extension + points, improvement ideas only for modules you actually analyzed. +4. **Prefer fewer sharp insights over many shallow sections**: a + CHEAT-SHEETS.md with one excellent cheat sheet beats five thin ones. + An ARCHITECTURE-PRINCIPAL.md with three concrete risks beats ten + vague ones. + +## Mode Detection + +Read the invocation for a mode keyword: + +- **No keyword** (or `default`) → run **Default mode** (Phases 0-5 below) +- `principal` → run **Principal mode** (Phases 0-5 + Principal phases P1-P3) + +Examples: +```text +/ctx-architecture +/ctx-architecture principal +/ctx-architecture (principal) +``` + +--- + +## When to Use + +- First time setting up architecture documentation for a project +- Periodically to refresh stale module coverage after significant + changes +- After major refactors, new package additions, or dependency changes +- When the agent nudges that the map is stale (>30 days, commits + detected) +- When you need deep understanding of a module before working on it +- When you want strategic analysis of the architecture (principal mode) + +## When NOT to Use + +- For minor code changes that don't affect module boundaries or + data flow +- When ARCHITECTURE.md just needs a quick path fix (use `/ctx-drift` + instead) +- Repeatedly in the same session without intervening code changes +- When the user has opted out (`opted_out: true` in + map-tracking.json) + +--- + +## Default Mode (Phases 0-5) + +### Phase 0: Check Opt-Out + +Read `.context/map-tracking.json`. If it exists and +`opted_out: true`, say: + +> Architecture mapping is opted out for this project. Delete +> `.context/map-tracking.json` to re-enable. + +Then stop. + +### Phase 0.25: Companion Tool Check + +Check if **Gemini Search** MCP is available by attempting a +simple query. Gemini is used for upstream documentation, design +rationale, KEPs, peer-project patterns - anything outside the +local codebase that helps understand *why* the code is shaped +the way it is. + +**If available**: note it silently. Use Gemini throughout the +analysis for upstream lookups. Prefer it over built-in web search. + +**If not available**: ask the user once: + +``` +Gemini Search is not connected. It helps me look up upstream +design docs, KEPs, and peer-project patterns during analysis. + +Want to set it up now, or proceed without it? +``` + +Respect the answer and continue either way. + +**Important**: Gemini is for *upstream* and *external* context +only. Do not use it to understand the local codebase - read the +code directly. The depth of analysis comes from forced reading, +not from search shortcuts. + +### Phase 0.5: Quick Structure Scan + Focus Areas + +Before any deep analysis, do a lightweight structural survey to +discover what the project actually contains. This takes seconds +and makes the focus-area question concrete instead of open-ended. + +**Scan steps** (no file reads - structure only): + +```bash +# Detect ecosystem +ls go.mod package.json Cargo.toml pyproject.toml 2>/dev/null + +# List top-level source directories / packages +# Go: +go list ./... 2>/dev/null | sed 's|.*/||' | sort -u | head -40 +# or: ls internal/ cmd/ pkg/ 2>/dev/null + +# Node/other: ls src/ lib/ packages/ 2>/dev/null + +# Large monorepo guard: if >100 packages, limit to top 2 levels only +find . -mindepth 1 -maxdepth 2 -type d \ + ! -path './.git/*' ! -path './vendor/*' ! -path './node_modules/*' \ + | sort | head -60 +``` + +**Then ask** (present the discovered package/module names): + +``` +I found these top-level packages/modules: + [list from scan] + +Any specific areas you'd like me to go deep on? You can name +packages from the list above, describe subsystems (e.g. "the +reconciler loop", "auth handling"), or say "all" for a uniform +pass. + +Skip or press enter to do a standard uniform pass. +``` + +**If focus areas are given**, carry them forward: +- Phase 2 goes deep on focus packages (target confidence ≥ 0.8) +- Direct dependencies of focus packages get a solid pass (≥ 0.7) +- All other packages are stubbed (0.2) unless they appear as + transitive dependencies +- DETAILED_DESIGN.md sections for focus packages are written first + and in full detail +- Principal mode Phase P2 strategic questions reference the focus + areas explicitly + +**If "all" or no answer**, proceed with standard uniform analysis. + +### Phase 1: Assess Current State + +Determine if this is a **first run** or **subsequent run**: + +- **First run**: no `.context/map-tracking.json` exists +- **Subsequent run**: tracking file exists with coverage data + +For subsequent runs, identify the **frontier**: modules that need +analysis: + +1. Read `map-tracking.json` for coverage state +2. For each covered module, check staleness: + +```bash +git log --oneline --since="" \ +-- / +``` + +3. Frontier = uncovered modules + stale modules (commits after + `last_analyzed`) + low-confidence modules (confidence < 0.7) + +### Phase 2: Survey (First Run) or Analyze Frontier (Subsequent Run) + +**First run: full survey:** + +0. Run `ctx deps` to bootstrap the dependency graph: + ```bash + ctx deps + ``` + Auto-detects the ecosystem (Go, Node.js, Python, Rust) from + manifest files. Use this as the starting point for "Package + Dependency Graph": verify and enrich with semantic context. + +1. Read the project manifest for project identity (name, version, + description): `ctx deps` covers the dependency tree +2. Explore directory structure: + ```bash + ctx status + ``` +3. Read key files in each package: exported types, functions, + imports +4. Trace data flow through main entry points +5. Identify architectural patterns (dependency injection, + interfaces, registries) + +**Subsequent run: targeted analysis:** + +1. For each frontier module, read its source files +2. Trace data flow and dependencies +3. Note changes since last analysis +4. Update confidence based on depth of understanding + +### Phase 3: Update Documents + +**ARCHITECTURE.md**: update ONLY if module boundaries, dependency +graph, data flow, or key patterns changed. Internal implementation +changes do NOT warrant updates. Target: under 4000 tokens (~16KB) +so ARCHITECTURE.md loads within the session-start context budget. + +Required sections: +- Overview (design philosophy, key concepts) +- Package Dependency Graph (mermaid `graph TD`) +- Component Map (tables: package, purpose, depends on) +- Data Flow (mermaid sequence diagrams for key operations) +- Key Architectural Patterns +- File Layout (ASCII tree) + +**DETAILED_DESIGN.md**: update per-module sections using this +format: + +```markdown +## + +**Purpose**: One-line description. + +**Key types**: List main structs/interfaces. + +**Exported API**: +- `FuncName()`: what it does +- `Type.Method()`: what it does + +**Data flow**: Entry → Processing → Output + +Include an ASCII sequence diagram when there are 3+ actors or +non-obvious ordering: + +``` +Caller Scheduler Worker +|--schedule()-->| | +| |--dispatch()-->| +| |<--result------| +|<--done--------| | +``` + +Include an ASCII state diagram when the module manages lifecycle +or status transitions: + +``` +[Init] --configure()--> [Ready] --start()--> [Running] +| | +error()---------| |--stop()-->[ Stopped] +| [Stopped] --reset()--> [Ready] +[Failed] +``` + +Use plain ASCII (not mermaid) for DETAILED_DESIGN.md - it renders +in any terminal, editor, or raw file view without a renderer. +Reserve mermaid for ARCHITECTURE.md only. + +**Edge cases**: +- Condition → behavior + +**Performance considerations**: +- Known or likely bottlenecks (hot paths, allocation pressure, + lock contention, I/O bound operations) +- Scale assumptions baked into the design (e.g. "assumes <1000 + items", "single-threaded reconcile loop") +- What breaks first under load + +**Danger zones** (top 3 riskiest modification points): +1. `` - why it's dangerous (hidden coupling, + ordering assumption, shared mutable state, etc.) +2. ... +3. ... + +**Control loop & ownership** (if the module participates in +reconciliation or state management): +- What owns the reconciliation for this module's resources? +- What is source of truth vs. derived/cached state? +- What triggers re-reconciliation? + +**Extension points** (where features would naturally attach): +- `` - what kind of extension fits here + +**Improvement ideas** (1-3 concrete suggestions, not generic): +- `` - what it fixes and why it's feasible + +**Dependencies**: list of internal packages used +``` + +**Splitting DETAILED_DESIGN.md when it grows large:** + +When DETAILED_DESIGN.md exceeds ~600 lines or covers 3+ natural +domains, split into domain files and keep a shallow index: + +- `DETAILED_DESIGN.md` - index only (domain name, file pointer, + module list, one-line domain purpose) +- `DETAILED_DESIGN-.md` - full module sections for that + domain + +Domains are natural groupings, not arbitrary splits. Examples: +- storage, auth, api, reconciler, cli, observability +- If no natural grouping exists, split by: core vs. peripheral + +Index format: +```markdown +# Detailed Design Index + +| Domain | File | Modules | Summary | +|---------|----------------------------|----------------------|-------------------| +| storage | DETAILED_DESIGN-storage.md | pkg/store, pkg/cache | Persistence layer | +| auth | DETAILED_DESIGN-auth.md | pkg/authn, pkg/authz | Identity + policy | + +> See individual files for module-level detail. +``` + +Update `map-tracking.json` to record which domain file each module +lives in: +```json +"pkg/store": { + "domain_file": "DETAILED_DESIGN-storage.md", + ... +} +``` + +Each section is self-contained. The agent reads specific sections +when working on a module, not the entire file. + +**CHEAT-SHEETS.md**: write (or update) short mental models for +key lifecycle flows. One cheat sheet per major lifecycle or flow +identified in the codebase. Format: + +```markdown +## + +Steps: +1. +2. +3. ... + +Key invariants: +- + +Common failure modes: +- + +Flow (ASCII - include when sequence or state is non-obvious): + + [Trigger] --> [Step A] --> [Step B] --> [Done] + | + [Error] --> [Retry] --> [Dead Letter] +``` + +Aim for cheat sheets that fit on one screen. If a flow needs more +than ~15 steps, split it. Write cheat sheets for at minimum: +- The main entry-point lifecycle (e.g. controller reconcile loop, + request handler, CLI command dispatch) +- Any policy or rule evaluation flow +- Any significant async or background job lifecycle + +Skip if the project has no meaningful lifecycles (e.g. a pure +library with no runtime behavior). + +**GLOSSARY.md**: append project-specific terms discovered during +analysis. This captures the vocabulary that makes the codebase +searchable: type names, internal concepts, abbreviations, and +domain jargon that a new reader wouldn't know to search for. + +Rules: +- Skip entirely if `.context/GLOSSARY.md` does not exist (the + project hasn't opted into a glossary) +- Additive only: never modify or remove existing entries +- Maximum 10 new terms per run to avoid flooding +- Project-specific terms only: skip generic programming concepts + (e.g. "mutex", "goroutine") and well-known patterns (e.g. + "singleton"). Include terms that are unique to this codebase or + used in a project-specific way +- Insert alphabetically into the existing list +- Format: `**Term**: one-line definition` +- Print added terms in the convergence report under a + "Glossary additions" line + +### Phase 4: Update Tracking + + +Write `.context/map-tracking.json` with: + +```json +{ + "version": 1, + "opted_out": false, + "opted_out_at": null, + "last_run": "", + "coverage": { + "": { + "last_analyzed": "", + "confidence": <0.0-1.0>, + "files_seen": ["file1.go", "file2.go"], + "notes": "Brief summary of understanding" + } + } +} +``` + +### Phase 5: Convergence Report + Search Prompts + +Print a structured convergence report AND write it to +`.context/CONVERGENCE-REPORT.md`. The printed version is the +primary output the user reads. The file version is the artifact +that `/ctx-architecture-enrich` and future sessions consume. + +The source of truth for confidence scores is `map-tracking.json`. +`CONVERGENCE-REPORT.md` is a human-readable view of that data - +if they ever conflict, `map-tracking.json` wins. + +**Format:** + +``` +## Convergence Report + +### By Module + +| Module | Confidence | Status | Blocker | +|--------|------------|--------|---------| +| pkg/foo | 0.9 | ✅ Converged | - | +| pkg/bar | 0.6 | 🔶 Shallow | Internal flow unclear | +| pkg/baz | 0.2 | 🔴 Stubbed | Not analyzed | + +### By Domain (if natural groupings exist) + +Group related modules and show aggregate coverage: + e.g. "Auth layer: 2/3 modules converged (avg 0.72)" + +### Overall + +- Total modules: N +- Converged (≥ 0.9): N ✅ +- Solid (0.7-0.89): N 🟡 +- Shallow (0.4-0.69): N 🔶 +- Stubbed (< 0.4): N 🔴 + +### What Would Help Next + +For each non-converged module, print a specific suggestion: + +🔶 pkg/bar (0.6) - Shallow + → Read the test files to understand expected behavior under + edge cases: `pkg/bar/*_test.go` + → Trace the internal flow through + → Ask: "walk me through what happens when X" + +🔴 pkg/baz (0.2) - Not analyzed + → Run /ctx-architecture with focus area: pkg/baz + → Or: open pkg/baz/README.md if present + +### Convergence Verdict + +One of: +- ✅ CONVERGED - all modules ≥ 0.9, frontier empty. Further runs + without code changes won't improve coverage. +- 🟡 MOSTLY CONVERGED - core modules ≥ 0.9, peripheral modules + shallow. Diminishing returns on full re-run; use focus areas. +- 🔶 PARTIAL - significant modules below 0.7. Re-run with focus + areas or read tests. +- 🔴 INCOMPLETE - substantial portions unanalyzed. Run again. +``` + +**Convergence thresholds:** +- Module is **converged** at confidence ≥ 0.9 +- Project is **converged** when all non-peripheral modules ≥ 0.9 +- Peripheral = no other modules depend on it AND it has no + exported API surface (pure internal helpers, generated code, + vendor) + +**Blocker vocabulary** (use these consistently in the table): +- `Internal flow unclear` - exports known, internals not traced +- `Not analyzed` - directory listed only +- `Tests not read` - implementation known, behavior under edge + cases unknown +- `Design rationale unknown` - code understood, "why" is unclear +- `Converged` - nothing left to learn from static reading + +--- + +After printing the convergence verdict, append a **Search Prompts** +section. The skill has just read the codebase and knows its jargon - +this is the most useful thing it can hand back to someone who is +not blocked by intelligence but by not knowing the right words. + +**Format:** + +``` +## Search Prompts + +The right keyword changes everything. Based on what I found in +the codebase, here are targeted searches worth running - in your +internal docs, Confluence, Notion, Slack, or publicly: + +### Fill the gaps (ranked by how much they'd help) + +For modules/areas still below 0.9: + +🔶 pkg/bar - Internal flow unclear + Try searching: + - " design" or " internals" + - " " + - "why does use " (ADR or design doc) + +🔴 pkg/baz - Not analyzed + Try searching: + - " explained" + - " behavior" + +### Concepts worth understanding deeply + +List 3-5 technical concepts the codebase clearly depends on but +that can't be learned from the code alone. Give the exact search +phrase, not a topic: + +- " explained" - e.g. "etcd watch semantics + explained", "CRDT merge strategies", "OIDC token refresh flow" +- " tradeoffs" - e.g. "saga pattern vs 2PC tradeoffs" + +### Architecture decision records (if relevant) + +If the code shows signs of a deliberate non-obvious choice +(e.g. custom retry logic instead of a library, unusual data +structure), suggest: + - " ADR" + - " RFC" + - "why doesn't use " + +--- +Note: I won't run these searches for you - you may have internal +docs where these are more useful than public results, and you know +which sources to trust. Pick the phrases that match what's blocking +you. +``` + +**Rules for this section:** +- Always generate search prompts, even for converged modules - + there's always design rationale that code can't express +- Phrases must be concrete and use actual names/types from the + codebase - no generic "learn more about X" fluff +- Rank by usefulness: gaps in shallow modules first, concepts + second, ADRs third +- Maximum ~10 phrases total; fewer sharp ones beat many vague ones +- Default: do NOT run the searches yourself +- Exception: if Gemini Search is available, you MAY run upstream + searches for KEPs, design docs, peer-project patterns, and ADRs + - but only for concepts the codebase shows clear dependency on. + Note what you searched and what you found. This applies in any + mode, not just principal mode. +- If Gemini is not available and the user requested principal-mode + depth, you may fall back to built-in web search for the same + purpose + +--- + +## Principal Mode (Phases 0-5 + P1-P3) + +Run all default mode phases first (0-5), then continue below. +Principal mode is for strategic thinking - beyond "what is" to +"what could be" and "what should concern us." + +### Phase P1: Extended Context Gathering + +In addition to the default phase sources, read: + +- `.context/TASKS.md` - outstanding work, future plans +- `CHANGELOG.md` or `docs/changelog.md` - trajectory of decisions +- `docs/` - any design rationale in user-facing docs +- Recent git log: `git log --oneline -30` + +### Phase P2: Gather Strategic Context + +Two-tier behavior - do not stall: + +**If answers are available** (user provided them in the prompt, +or they exist in `.context/TASKS.md` / `DECISIONS.md`): use them. +Do not ask for what you already have. + +**If answers are not available**: do NOT stop. Generate a +provisional principal analysis with assumptions explicitly labeled +(see Principal Mode Fallback below). Include a "Questions That +Would Sharpen This" section at the end of ARCHITECTURE-PRINCIPAL.md. + +When asking the user, present all questions at once as a numbered +list - do not ask one-at-a-time: + +``` +Before I write the principal analysis, a few questions - skip +or say "unsure" on anything you don't know: + +0. **Focus areas** (if not already set in Phase 0.5) + +1. **Vision**: What is this project trying to become in 12-24 months? + +2. **Future direction**: Any architectural pivots being considered? + (plugin system, multi-tenant, cloud sync, daemon model, etc.) + +3. **Known bottlenecks**: Where does the current design hurt you? + +4. **Implementation alternatives**: Any decisions you'd do + differently starting fresh? + +5. **Gaps**: What's missing that you expect to need? + +6. **Areas of improvement**: Known tech debt or structural awkwardness? +``` + +### Phase P3: Write Principal Analysis + +After collecting answers, write `.context/ARCHITECTURE-PRINCIPAL.md` +(separate from `ARCHITECTURE.md` - speculation must not pollute +the authoritative doc). + +```markdown +# Architecture - Principal Analysis +_Generated . Strategic analysis only; see ARCHITECTURE.md +for the authoritative architecture reference._ + +## Current State Summary +[Condensed narrative of the current architecture - ~1 page max] + +## Vision Alignment +[How does the current architecture support or constrain the stated +vision? What structural changes would enable it?] + +## Future Direction +[Architectural implications of planned pivots or new capabilities. +What would need to change if [feature X] were added?] + +## Known Bottlenecks +[Analysis of performance, scalability, or dev-experience pain +points identified in the codebase or raised by the user] + +## Implementation Alternatives +[For 2-3 key design decisions: current approach, alternatives, +tradeoffs] + +## Gaps +[Missing capabilities or abstractions the architecture doesn't +handle yet but probably will need to] + +## Areas of Improvement +Ranked by impact/effort: +- **High impact, low effort** (do first) +- **High impact, high effort** (plan for) +- **Low impact** (defer or skip) + +## Risks +[Architectural risks as the system scales, team grows, or +requirements evolve] + +## Intervention Points +Top 5 highest-leverage places to implement new features or +improvements, ranked by impact/effort: +1. `` - what kind of change fits here and why +2. ... + +(These are concrete locations - package paths, interface names, +function boundaries - not vague subsystem labels.) + +## Upstream Proposals +2-3 changes worth proposing to the project upstream (KEP / RFC / +issue style thinking). For each: +- **What**: one-sentence description of the change +- **Why**: what problem it solves that the current design can't +- **Where**: which abstraction boundary it touches +- **Risk**: what it breaks or complicates + +Each proposal must cross an abstraction boundary - it must affect +how modules interact, not just refactor internals. If it doesn't +change an interface, a contract, or an ownership boundary, it's +not upstream-worthy; it's a local improvement (put it in +Improvement Ideas instead). + +## Productization Gaps +What would need to change for this to work at enterprise scale? +- Multi-cluster / multi-tenant gaps +- Observability and debuggability holes +- Operational hardening missing from current design +- What a large customer would hit first + +## Failure-First Analysis +[Hidden assumptions baked into the architecture. What breaks +silently vs. loudly? What would cause a cascade? What does the +system assume about its environment that may not hold?] + +## Onboarding Friction +[Practical, not theoretical - this is what a new engineer actually +hits in week one:] +- What makes this system hard to understand quickly? +- Which modules require tribal knowledge to use safely? +- Where would a new engineer get stuck first, and why? +- What isn't written down anywhere? +``` + +**Boundary hygiene** - ARCHITECTURE-PRINCIPAL.md is for synthesis, +leverage, risk, direction, and judgment. Do NOT restate module +details that already exist in DETAILED_DESIGN.md. Reference module +paths only where needed to ground an argument. If you find yourself +summarizing what a module does, stop - link to it instead. + +**Principal mode fallback** - if Phase P2 answers were not provided, +label speculative sections clearly and add at the end: + +```markdown +## Questions That Would Sharpen This Analysis + +Answering any of these would move speculative sections to grounded ones: + +1. **Vision** - What is this project trying to become in 12-24 months? +2. **Future direction** - Any architectural pivots being considered? +3. **Known bottlenecks** - Where does the current design hurt? +4. **Assumptions marked** - These sections are labeled [inferred]: + [list them] +``` + +**Autonomous inferences** - principal mode must also answer the +following from the codebase alone, without waiting for user input. +These are things the code is silently deciding. Surface them: + +- Where are abstraction boundaries likely to calcify under growth? +- Which current APIs are accidentally becoming public contracts? +- What will become expensive when team size or data volume doubles? +- Where is the architecture optimized for current workflow rather + than long-term extensibility? +- Which parts are structurally elegant but strategically wrong for + the likely future? + +These go in a dedicated "Silent Choices" section in +ARCHITECTURE-PRINCIPAL.md. The code is making bets - name them. + +**Opinion floor** - ARCHITECTURE-PRINCIPAL.md must contain at minimum: +- 3 risks (specific, not "this could be slow") +- 3 improvement ideas (concrete, not "add more tests") +- 2 upstream opportunities (actionable, not "contribute more") + +Generate opinions, not just descriptions. If you find yourself +writing neutral summaries, push harder. + +When in doubt, prefer a strong, falsifiable opinion over a safe, +generic one. Weak opinions are noise; strong opinions can be +corrected. + +**Cross-project comparison** (include when the codebase shows +non-obvious design choices or when focus areas have well-known +peers): + +For any module where a comparable exists in another project, add: +```markdown +### Compared to / + +- What does differently +- What does better +- What could be unified or learned from +``` + +Examples worth comparing when relevant: +- Velero vs Stash (backup) +- controller-runtime reconciler vs custom loops +- Gatekeeper vs Kyverno (policy) +- Any CNCF project vs its closest peer + +Skip if no meaningful peer exists. Do not force comparisons. + +Be direct. This document is for engineering judgment, not external +audiences. + +### Phase P4: Write DANGER-ZONES.md + +Extract danger zones from all DETAILED_DESIGN.md module sections +and compile them into a standalone `.context/DANGER-ZONES.md`. +This is the consolidated view - one document a reviewer or new +engineer can read to know where the dragons live. + +```markdown +# Danger Zones + +_Generated from DETAILED_DESIGN.md danger zone sections. +Run `/ctx-architecture-enrich` to add verified blast radius data._ + +## Summary + +| Module | Zone | Risk | Why | +|--------|------|------|-----| +| | | HIGH/MEDIUM/LOW | one-line reason | + +## By Module + +### + +1. **** - + - Hidden coupling / ordering assumption / shared mutable state + - Modification advice: + +2. ... +``` + +**Rules:** +- Only include danger zones from modules actually analyzed + (confidence ≥ 0.4) +- Risk level is the skill's judgment based on code reading: + HIGH (will break things), MEDIUM (likely to cause subtle bugs), + LOW (worth knowing but manageable) +- `/ctx-architecture-enrich` can later add verified blast radius + numbers - leave room for that (don't claim precision you don't + have from reading alone) +- If no danger zones were identified, skip the file entirely + rather than writing an empty one + +--- + +## Confidence Rubric + +Score by **decision usefulness**, not descriptive completeness. +Ask: "What could an engineer safely do with this understanding?" + +| Level | Decision usefulness | +|------------|------------------------------------------------------------------------------| +| 0.0 - 0.3 | Stubbed: not safe to make any decisions; directory listed only | +| 0.4 - 0.6 | Shallow: can describe purpose; not safe to modify without more reading | +| 0.7 - 0.79 | Safe to make localized changes with care; can review simple PRs | +| 0.8 - 0.89 | Can reason about design tradeoffs; safe to design changes in this module | +| 0.9 - 1.0 | Can predict likely breakage from non-trivial changes; safe to own the module | + +Inflate scores and you lie to the next agent that reads the tracking +file. Under-score and the convergence report will never clear. +Score the decision-usefulness honestly. + +## Opt-Out Handling + +If the user says "never", "don't ask again", or similar: + +1. Set `opted_out: true` and `opted_out_at: ""` in + map-tracking.json +2. Confirm: "Noted: won't ask again. Delete + `.context/map-tracking.json` to re-enable." +3. On future invocations, exit immediately with brief message + +## Nudge Behavior + +The agent MAY suggest `/ctx-architecture` during session start when: + +- **No tracking file**: "This project doesn't have an architecture + map yet. Want me to run `/ctx-architecture`?" +- **Stale (>30 days)**: "The architecture map hasn't been updated + since and there are commits touching modules. Want me + to refresh?" +- **Opted out**: say nothing + +The nudge is a suggestion, not automatic execution. + +## Quality Checklist + +After running, verify: +- [ ] ARCHITECTURE.md is under 4000 tokens (~16KB) +- [ ] ARCHITECTURE.md has all required sections (Overview, Dependency + Graph, Component Map, Data Flow, Key Patterns, File Layout) +- [ ] DETAILED_DESIGN.md uses consistent per-module format +- [ ] Each module section has Purpose, Key types, Exported API, + Data flow, Edge cases, Performance considerations, Control + loop & ownership (if applicable), Danger zones, Extension + points, Improvement ideas, Dependencies +- [ ] ASCII sequence diagram included when 3+ actors or + non-obvious ordering +- [ ] ASCII state diagram included when module manages lifecycle + or status transitions +- [ ] No mermaid in DETAILED_DESIGN.md (ASCII only) +- [ ] If DETAILED_DESIGN.md > ~600 lines or 3+ domains: split + into domain files with shallow index +- [ ] map-tracking.json records domain_file for each module + when split +- [ ] map-tracking.json is valid JSON with version, coverage entries +- [ ] Confidence levels are honest (not inflated) +- [ ] Stale modules were re-analyzed, not just marked current +- [ ] ARCHITECTURE.md was only updated for boundary/flow/dependency + changes, not internal implementation details +- [ ] Convergence report printed with per-module table +- [ ] Domain groupings shown if natural groupings exist +- [ ] Each non-converged module has a specific "what would help" + suggestion (not generic advice) +- [ ] Overall convergence verdict stated (CONVERGED / MOSTLY / + PARTIAL / INCOMPLETE) +- [ ] Blocker column uses consistent vocabulary +- [ ] Search Prompts section printed after convergence verdict +- [ ] Search phrases use actual type/function/pattern names from + the codebase (not generic topics) +- [ ] Phrases ranked: shallow-module gaps first, concepts second, + ADRs third +- [ ] No more than ~10 phrases total +- [ ] Skill did NOT run local-code searches itself (upstream + searches via Gemini are allowed) +- [ ] CONVERGENCE-REPORT.md written to .context/ (not just printed) +- [ ] Phase 0.25 Gemini check completed (available or user declined) +- [ ] Phase 0.5 structure scan was run before any deep analysis +- [ ] Focus areas question was asked with actual package names (not + open-ended) +- [ ] If focus areas given: deep analysis concentrated there; other + packages stubbed at 0.2 unless direct dependencies +- [ ] Principal mode: P2 answers used if available; if not, + provisional analysis written with [inferred] labels +- [ ] Principal mode: "Questions That Would Sharpen This" section + present if P2 answers were not provided +- [ ] Principal mode: output written to `ARCHITECTURE-PRINCIPAL.md`, + not overwriting `ARCHITECTURE.md` +- [ ] Principal mode: "Silent Choices" section present (autonomous + inferences from code - abstraction calcification, accidental + contracts, scale costs, strategic bets) +- [ ] Principal mode: ARCHITECTURE-PRINCIPAL.md does not restate + DETAILED_DESIGN.md content - links to module paths instead +- [ ] CHEAT-SHEETS.md written with at least one lifecycle flow +- [ ] Each cheat sheet fits ~one screen; long flows are split +- [ ] Danger zones section present in each DETAILED_DESIGN module + (top 3, with reasoning - not just "this is complex") +- [ ] Extension points section present in each module +- [ ] Principal mode: Failure-First Analysis section written +- [ ] Principal mode: Onboarding Friction section present (practical, + week-one concerns - not generic "hard to understand") +- [ ] Principal mode: Upstream Proposals cross abstraction boundaries + (not internal refactors) +- [ ] Principal mode: Intervention Points section present (concrete + locations, not vague labels) +- [ ] Principal mode: Upstream Proposals section present (2-3 items + with what/why/where/risk) +- [ ] Principal mode: Productization Gaps section present +- [ ] Principal mode: opinion floor met (≥3 risks, ≥3 improvements, + ≥2 upstream opportunities - specific, not generic) +- [ ] Principal mode: cross-project comparisons included where + meaningful peers exist (not forced) +- [ ] Principal mode: DANGER-ZONES.md written with consolidated + danger zones from all analyzed modules (skip if none found) +- [ ] Principal mode: DANGER-ZONES.md includes summary table and + per-module breakdown with risk levels and modification advice +- [ ] GLOSSARY.md: new terms added alphabetically (max 10, project- + specific only, skipped if file doesn't exist) +- [ ] Convergence report includes "Glossary additions" line if + terms were added diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-archive/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-archive/SKILL.md new file mode 100644 index 000000000..8e5da8b12 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-archive/SKILL.md @@ -0,0 +1,57 @@ +--- +name: ctx-archive +description: "Archive completed tasks. Use when TASKS.md has many completed items cluttering the view." +--- + +Move completed tasks from TASKS.md to the archive. + +## Before Archiving + +Two questions: if any answer is "no", don't archive: + +1. **"Are the completed tasks cluttering the view?"** → If TASKS.md is + still easy to scan, there's no urgency +2. **"Are all `[x]` items truly done?"** → Verify nothing was checked off + prematurely + +## When to Use + +- When TASKS.md has many completed `[x]` tasks +- When the task list is hard to navigate +- Periodically to keep context clean + +## When NOT to Use + +- When there are only a few completed tasks (not worth the noise) +- When you're unsure if tasks are truly complete (verify first) +- **Never delete tasks**: only archive (CONSTITUTION invariant) + +## Constitution Rules + +These are inviolable: + +- **Archival is allowed, deletion is not**: never delete context history +- **Archive preserves structure**: Phase headers are kept for traceability +- **Never move tasks**: tasks stay in their Phase section; archiving is + the only sanctioned "move" and it's to the archive directory + +## Execution + +```bash +ctx task archive $ARGUMENTS +``` + +**Example: preview first (recommended):** +```bash +ctx task archive --dry-run +``` + +**Example: archive after confirming the preview:** +```bash +ctx task archive +``` + +Archived tasks go to `archive/tasks-YYYY-MM-DD.md` in the context directory, +preserving Phase headers for traceability. + +Report how many tasks were archived and where the archive file was written. diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-blog-changelog/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-blog-changelog/SKILL.md new file mode 100644 index 000000000..7442862ca --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-blog-changelog/SKILL.md @@ -0,0 +1,139 @@ +--- +name: ctx-blog-changelog +description: "Generate themed blog post from commits. Use when writing about changes between releases or documenting a development arc." +--- + +Generate a blog post about changes since a specific commit, with a given theme. + +## Before Writing + +Two questions; if any answer is "no", reconsider: + +1. **"Is there enough change to tell a story?"** → A handful of typo + fixes doesn't warrant a post +2. **"Is the theme clear?"** → If the commit range covers unrelated + work, narrow the scope or split into multiple posts + +## When to Use + +- When documenting changes between releases +- When writing about a development arc or theme +- When the user wants to explain "what changed and why" + +## When NOT to Use + +- For general project updates without a commit range (use `/ctx-blog`) +- When the changes are minor or routine maintenance +- When there's no unifying theme across the commits + +## Input + +Required: +- **Commit hash**: Starting point (e.g., `040ce99`, `HEAD~50`, `v0.1.0`) +- **Theme**: The narrative angle (e.g., "human-assisted refactoring", + "the recall system") + +Optional: +- **Reference post**: An existing post to match the style + +## Usage Examples + +```text +/ctx-blog-changelog 040ce99 "human-assisted refactoring" +/ctx-blog-changelog HEAD~30 "building the journal system" +/ctx-blog-changelog v0.1.0 "what's new in v0.2.0" +``` + +## Process + +1. **Analyze the commit range**: +```bash +git log --oneline ..HEAD +git diff --stat ..HEAD +git log --format="%s" ..HEAD | head -50 +``` + +2. **Gather supporting context**: +```bash +# Files most changed +git diff --stat ..HEAD | sort -t'|' -k2 -rn | head -20 + +# Journal entries from this period +ctx journal source +``` + +3. **Draft the narrative** following the theme +4. Save to `docs/blog/YYYY-MM-DD-slug.md` +5. **Update `docs/blog/index.md`** with an entry at the top: + +```markdown +### [Post Title](YYYY-MM-DD-slug.md) + +*Author / Date* + +2-3 sentence blurb. + +**Topics**: topic-one, topic-two, topic-three + +--- +``` + +## Blog Structure + +### Frontmatter + +```yaml +--- +title: "[Theme]: [Specific Angle]" +date: YYYY-MM-DD +author: [Ask user] +topics: + - topic-one + - topic-two + - topic-three +--- +``` + +### Body + +```markdown +# [Title] + +![ctx](../images/ctx-banner.png) + +> [Hook related to theme] + +## The Starting Point +[State of codebase at , what prompted the change] + +## The Journey +[Narrative of changes, organized by theme not chronology] + +## Before and After +[Comparison table or code diff showing improvement] + +## Key Commits + +| Commit | Change | +|--------|-------------| +| abc123 | Description | + +## Lessons Learned +[Insights from this work] + +## What's Next +[Future work enabled by these changes] +``` + +## Style Guidelines + +- **Personal voice**: Use "I", "we", share the journey +- **Show don't tell**: Include actual code, commits, diffs +- **Tables for comparisons**: Before/after, key commits +- **Honest about failures**: Include what went wrong and why +- **Concrete examples**: Reference specific files, commits, decisions +- **No em-dashes**: Use `:`, `;`, or restructure the sentence instead +- **Straight quotes only**: Use "dumb quotes" (`"`, `'`), never + typographic/curly quotes +- **80-character line width**: Wrap prose at ~80 characters; exceptions + for tables, code blocks, and URLs diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-blog/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-blog/SKILL.md new file mode 100644 index 000000000..61ba09db6 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-blog/SKILL.md @@ -0,0 +1,145 @@ +--- +name: ctx-blog +description: "Generate blog post draft. Use when documenting project progress, sharing learnings, or writing about development experience." +--- + + + +Generate a blog post draft from recent project activity. + +## Before Writing + +Two questions: if any answer is "no", reconsider: + +1. **"Is there a narrative arc?"** → A blog post needs a story (problem → + approach → outcome), not just a list of changes +2. **"Would someone outside the project learn something?"** → If the + insight is only useful internally, use LEARNINGS.md instead + +## When to Use + +- When documenting significant project progress +- When sharing learnings publicly +- When the user wants to write about the development experience + +## When NOT to Use + +- For internal-only notes (use session saves or LEARNINGS.md) +- When the work is still in progress with no clear insight yet +- For changelogs (use `/ctx-blog-changelog` instead) + +## Input + +The user may specify: +- A time range: `last week`, `since Monday`, `January` +- A topic focus: `the refactoring`, `new features`, `lessons learned` +- Or just run it to analyze recent activity + +## Sources to Analyze + +Gather context from multiple sources: + +```bash +# Recent commits +git log --oneline -30 + +# Recent decisions +ctx status --verbose # or read DECISIONS.md directly + +# Recent learnings +ctx status --verbose # or read LEARNINGS.md directly + +# Recent tasks completed +ctx status # shows active and completed task counts + +# Journal entries (if available) +ctx journal source --limit 10 +``` + +## Blog Post Structure + +### Frontmatter + +```yaml +--- +title: "Descriptive Title: What This Post Is About" +date: YYYY-MM-DD +author: [Ask user] +topics: + - topic-one + - topic-two + - topic-three +--- +``` + +### Body + +```markdown +# Title + +![ctx](../images/ctx-banner.png) + +> Opening hook or question + +[Introduction: Set the scene, why this matters] + +## Section 1: The Context/Problem +[What situation led to this work] + +## Section 2: What We Did +[Narrative of the work, with code examples] + +## Section 3: What We Learned +[Key insights, gotchas, patterns discovered] + +## Section 4: What's Next +[Future work, open questions] +``` + +## Style Guidelines + +- **Personal voice**: Use "I", "we", share the journey +- **Show don't tell**: Include actual code, commits, quotes +- **Tables for comparisons**: Before/after, patterns found +- **Honest about failures**: Include what went wrong and why +- **Concrete examples**: Reference specific files, commits, decisions +- **No em-dashes**: Use `:`, `;`, or restructure the sentence instead +- **Straight quotes only**: Use "dumb quotes" (`"`, `'`), never + typographic/curly quotes +- **80-character line width**: Wrap prose at ~80 characters; exceptions + for tables, code blocks, and URLs + +## Process + +1. Gather sources (git, decisions, learnings, journals) +2. Identify the narrative arc (what's the story?) +3. Draft outline for user approval +4. Write full draft +5. Ask for revisions +6. Save to `docs/blog/YYYY-MM-DD-slug.md` +7. **Update `docs/blog/index.md`**: add entry at the top following the + existing pattern: + +```markdown +### [Post Title](YYYY-MM-DD-slug.md) + +*Author / Date* + +2-3 sentence blurb. + +**Topics**: topic-one, topic-two, topic-three + +--- +``` + +## Example Invocations + +``` +/ctx-blog about the cooldown feature we just built +/ctx-blog last week's refactoring work +/ctx-blog lessons learned from hook design +``` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-brainstorm/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-brainstorm/SKILL.md new file mode 100644 index 000000000..52503003c --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-brainstorm/SKILL.md @@ -0,0 +1,243 @@ +--- +name: ctx-brainstorm +description: "Design before implementation. Use before any creative or constructive work (features, architecture, behavior changes) to transform vague ideas into validated designs." +--- + +Transform raw ideas into **clear, validated designs** through +structured dialogue **before any implementation begins**. + +## Before Brainstorming + +1. **Check if design is needed**: is the change complex enough + to warrant a design phase, or is the solution already clear? +2. **Review prior art**: check `.context/DECISIONS.md` for + related past decisions; do not re-litigate settled choices +3. **Identify what exists**: read relevant code and docs before + asking questions; do not ask the user things the codebase + already answers + +## When to Use + +- Before implementing a new feature +- Before architectural changes +- Before significant behavior modifications +- When an idea is vague and needs shaping + +## When NOT to Use + +- Bug fixes with clear solutions +- Routine maintenance tasks +- When requirements are already well-defined +- Small, isolated changes (just do them) +- When the user explicitly wants to jump straight to code + +## Usage Examples + +```text +/ctx-brainstorm +/ctx-brainstorm (new caching layer for the API) +/ctx-brainstorm (should we split the monolith?) +``` + +## Operating Mode + +Design facilitator, not builder. + +- No implementation while brainstorming +- No speculative features +- No silent assumptions +- No skipping ahead + +**Slow down just enough to get it right.** + +## The Process + +### 1. Understand Current Context + +Before asking questions: + +- Review project state: files, docs, prior decisions +- Check `.context/DECISIONS.md` for related past decisions +- Identify what exists vs what is proposed +- Note implicit constraints + +**Do not design yet.** + +### 2. Clarify the Idea + +Goal: **shared clarity**, not speed. + +Rules: +- Ask **one question per message** +- Prefer **multiple-choice** when possible +- Split complex topics into multiple questions + +Focus on: +- Purpose: why does this need to exist? +- Users: who benefits? +- Constraints: what limits apply? +- Success criteria: how do we know it works? +- Non-goals: what is explicitly out of scope? + +### 3. Non-Functional Requirements + +Explicitly clarify or propose assumptions for: + +- Performance expectations +- Scale (users, data, traffic) +- Security/privacy constraints +- Reliability needs +- Maintenance expectations + +If the user is unsure, propose reasonable defaults and mark +them as **assumptions**. + +### 4. Understanding Lock (Gate) + +Before proposing any design, pause and provide: + +**Understanding Summary** (5-7 bullets): +- What is being built +- Why it exists +- Who it is for +- Key constraints +- Explicit non-goals + +**Assumptions**: list all explicitly. + +**Open Questions**: list unresolved items. + +Then ask: +> "Does this accurately reflect your intent? Confirm or +> correct before we move to design." + +**Do NOT proceed until confirmed.** + +### 5. Explore Design Approaches + +Once understanding is confirmed: + +- Propose **2-3 viable approaches** +- Lead with your **recommended option** +- Explain trade-offs: complexity, extensibility, risk, + maintenance +- Apply YAGNI ruthlessly + +### 6. Stress-Test the Chosen Approach + +After the user picks an approach, pause for adversarial review +before moving to detailed design. + +**Surface assumptions**: +- List assumptions the chosen approach depends on +- Identify implicit dependencies (libraries, infra, team knowledge) + +**Identify failure modes**: +- What would make this approach fail? (edge cases, scale limits, + integration risks, operational complexity) +- What's the worst-case recovery if it does fail? + +**Steel-man an alternative**: +- Name the strongest argument for a different approach +- Be specific: "Approach B avoids X risk" not "there are other options" + +**Gate question**: +> "These are the risks I see. Do they change your preference, or +> should we proceed with the chosen approach?" + +Only move to detailed design after the user confirms. This is +one gate, not a debate: if the user says "proceed," proceed. + +### 7. Present the Design + +Break into digestible sections. After each, ask: +> "Does this look right so far?" + +Cover as relevant: +- Architecture +- Components +- Data flow +- Error handling +- Edge cases +- Testing strategy + +### 8. Decision Log + +Maintain a running log throughout: + +| Decision | Alternatives | Rationale | +|----------|--------------|-----------| +| ... | ... | ... | + +## After the Design + +### Persist to Context + +Once validated, persist outputs: + +```bash +# Record key decisions +ctx add decision "..." \ + --session-id ID --branch BR --commit HASH \ + --context "..." --rationale "..." --consequence "..." +``` + +### Implementation Handoff + +Only after documentation, ask: +> "Ready to begin implementation?" + +If yes: +- Create explicit implementation plan +- Break into incremental steps +- Proceed one step at a time + +## Good Example + +> **Understanding Summary**: +> - Building a cooldown mechanism for `ctx agent` hooks +> - Prevents repetitive context injection on every tool use +> - For Claude Code users running ctx in PreToolUse hooks +> - Must be session-isolated (two sessions share no state) +> - Non-goal: per-tool granularity (cooldown is global) +> +> **Assumptions**: 10-minute default cooldown is reasonable. +> +> **Open Questions**: none remaining. +> +> Does this accurately reflect your intent? + +## Bad Examples + +- Jumping to architecture diagrams before asking what the + feature is for +- Asking 5 questions in one message (ask one at a time) +- Proposing a design without the Understanding Lock step +- "Let me implement this real quick" (no implementation + during brainstorm) + +## Quality Checklist + +Exit brainstorming mode **only when**: + +- [ ] Understanding Lock confirmed by the user +- [ ] At least one design approach accepted +- [ ] Stress-test completed (assumptions, failure modes, alternatives) +- [ ] Major assumptions documented explicitly +- [ ] Key risks acknowledged +- [ ] Decision Log complete +- [ ] Decisions persisted to `.context/DECISIONS.md` + +If any criterion is unmet, continue refinement. + +## Principles + +- **Think step-by-step** before proposing anything: reason + through the problem space before jumping to solutions +- One question at a time +- Assumptions must be explicit +- Explore alternatives before committing +- Validate incrementally +- Clarity over cleverness +- Be willing to go back +- **YAGNI ruthlessly** diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-check-links/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-check-links/SKILL.md new file mode 100644 index 000000000..dac6a0e72 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-check-links/SKILL.md @@ -0,0 +1,136 @@ +--- +name: ctx-check-links +description: "Audit docs for dead links. Use before releases, after restructuring docs, or when running a documentation audit." +tools: [bash, read, glob, grep] +--- + +Scan markdown files for broken links. Two passes: +internal (file targets) and external (HTTP URLs). + +## Scope Discovery + +Determine which directories to scan: + +1. If the user specifies a path, use that +2. Otherwise, glob for common doc directories: `docs/`, `doc/`, + `documentation/`, `site/` +3. If none exist, fall back to scanning all `.md` files in the + project root (excluding `node_modules/`, `.git/`, `vendor/`) + +Report which directories are being scanned at the start of output. + +## When to Use + +- Before releases or doc deployments +- After renaming, moving, or deleting doc pages +- After restructuring documentation directories or nav +- When `/_ctx-audit` runs (audit check #12) +- When a user reports a 404 on the site + +## When NOT to Use + +- When editing a single doc (just eyeball links in that file) +- When offline and only external checks would matter + +## Execution + +### Pass 1: Internal Links + +Scan every `.md` file in the discovered scope for markdown links +pointing to other files: `[text](target.md)`, +`[text](../path/file.md)`, `[text](path/file.md#anchor)`. + +For each link: + +1. Resolve the target **relative to the source file's directory** +2. Strip any `#anchor` fragment before checking file existence +3. Skip external URLs (`http://`, `https://`, `mailto:`) +4. Skip bare anchors (`#section-name`): these are intra-page +5. Verify the target file exists on disk + +Collect all broken internal links as: + +``` +BROKEN: source-file.md:LINE → target.md (file not found) +``` + +### Pass 2: External Links + +Scan every `.md` file in the discovered scope for `http://` and +`https://` URLs in markdown link syntax. + +For each URL: + +1. Send an HTTP HEAD request with a 10-second timeout +2. If HEAD fails or returns 405, retry with GET +3. Record the HTTP status code + +Report failures as: + +``` +WARN: source-file.md:LINE → https://example.com (HTTP 404) +WARN: source-file.md:LINE → https://example.com (timeout) +``` + +**Do not treat external failures as errors.** Network partitions, +rate limiting, and transient outages are common. Report them but +do not fail the check. + +Exceptions: skip these URLs: +- `localhost` / `127.0.0.1` URLs (local dev servers) +- `example.com` / `example.org` (placeholder domains) + +### Pass 3: Image References + +Scan for image links: `![alt](path/to/image.png)` and +`![alt](images/file.jpg)`. + +Verify the image file exists on disk. Same resolution rules as +internal links. + +## Output Format + +``` +## Link Check Report + +### Internal Links +- N broken links found (or "All clear") +- [list of broken links with file:line and target] + +### External Links +- N warnings (or "All reachable") +- [list of failures with file:line, URL, and reason] + +### Images +- N missing images (or "All present") +- [list of missing images with file:line and target] + +### Summary +Internal: N broken / M total +External: N unreachable / M total +Images: N missing / M total +``` + +## Fixing + +For broken internal links, offer specific fixes: + +- If the target was renamed, suggest the new path +- If the target was deleted, suggest removing the link or + pointing to an alternative +- If the target is a typo (close match exists), suggest the + correction + +For external links, just report. The user decides whether to +update, remove, or ignore. + +## Quality Checklist + +After running the check: +- [ ] All `.md` files in the discovered scope were scanned +- [ ] Relative path resolution accounts for subdirectories +- [ ] Anchors stripped before file existence check +- [ ] External check used timeouts (not hanging on slow hosts) +- [ ] localhost/example URLs were skipped +- [ ] Report distinguishes errors (internal) from warnings + (external) diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-commit/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-commit/SKILL.md new file mode 100644 index 000000000..81443ad50 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-commit/SKILL.md @@ -0,0 +1,202 @@ +--- +name: ctx-commit +description: "Commit with context persistence. Use instead of raw git commit to capture decisions and learnings alongside code changes." +--- + +Commit code changes, then prompt for decisions and learnings +worth persisting. Bridges the gap between committing code and +recording the context behind it. + +## When to Use + +- For ALL commits. This is the only way to commit in this project. + Raw `git commit` bypasses spec enforcement and violates CONSTITUTION. +- When the user says "commit", "commit this", "ship it", "let's commit": + always use this skill, never raw git commit. + +## When NOT to Use + +- When nothing has changed (no staged or unstaged modifications) + +## Usage Examples + +```text +/ctx-commit +/ctx-commit "implement session enrichment" +/ctx-commit --skip-qa +``` + +## Process + +### 1. Check CONSTITUTION for commit rules + +Read `.context/CONSTITUTION.md` (if it exists) for commit-specific +rules. Common project rules to look for and enforce: + +- **Spec-per-commit**: Add a `Spec:` trailer, verify a spec file exists in + `specs/` before proceeding. If no spec exists, stop and offer to run + `/ctx-spec` to scaffold one. +- **Sign-off**: `Signed-off-by:`, include it. +- **Other trailers**: Honor any project-specific trailer requirements. + +Read CONSTITUTION fully and apply all relevant rules before +proceeding to pre-commit checks. + +### 2. Pre-commit checks + +Unless the user says `--skip-qa` or "skip checks": + +- Run `git diff --name-only` to see what changed +- Run the project's build and lint commands to verify nothing is broken. + Check for a Makefile, package.json, or equivalent. If you cannot + identify the build/lint commands, ask the user before proceeding. +- If the build or lint fails, stop and report: do not commit broken code + +**Verify before claiming ready**: map each claim to evidence. +"Tests pass" requires test output with 0 failures. "Build succeeds" +requires exit 0. "Lint clean" requires linter output with 0 errors. +Run commands fresh; never reuse earlier output. Before proceeding +to stage, answer these self-audit questions: + +1. What assumptions did I make? +2. What did I NOT check? +3. Where am I least confident? +4. What would a reviewer question first? + +If any answer reveals a gap, address it before staging. + +### 3. Close matching tasks + +Every commit closes work. Before staging, check TASKS.md for +tasks that this commit completes: + +- Read `.context/TASKS.md` +- Identify the spec being committed (the `Spec:` trailer value) +- Find open tasks (`[ ]`) whose `Spec:` field matches +- If no spec match, search by keywords from the commit subject +- Mark matching tasks `[x]` +- If uncertain whether a task is fully done, ask the user +- Stage the updated TASKS.md alongside the code changes + +This is the closure point in the plan→spec→task→commit chain. +Skipping it causes task rot: completed work stays open, +future sessions waste time re-triaging stale items. + +### 4. Stage and commit + +- Review unstaged changes with `git status` +- Stage relevant files (prefer specific files over `git add -A`) +- Craft a concise commit message: + - If the user provided a message, use it + - If not, draft one based on the changes (1-2 sentences, + "why" not "what") +- Include the `Spec:` and `Signed-off-by:` trailers (see format below) + +### 5. Context prompt + +After a successful commit, ask the user: + +> **Any context to capture?** +> +> - **Decision**: Did you make a design choice or trade-off? +> - **Learning**: Did you hit a gotcha or discover something? +> - **Neither**: No context to capture: we're done. + +Wait for the user's response. If they provide a decision or +learning, record it using the appropriate command: + +```bash +ctx add decision "Use PostgreSQL" \ + --session-id abc12345 --branch main --commit 68fbc00a \ + --context "Need a reliable database" \ + --rationale "ACID compliance and JSON support" \ + --consequence "Team needs training" +``` + +```bash +ctx add learning "Go embed requires files in same package" \ + --session-id abc12345 --branch main --commit 68fbc00a \ + --context "..." --lesson "..." --application "..." +``` + +### 6. Reflect + +After every commit, run `/ctx-reflect` to capture the bigger +picture before moving on. This is mandatory: Skipping reflection +is how context gets lost between sessions. + +## Commit Message Format + +Follow the repository's existing commit style. Draft messages +that: +- Focus on **why**, not what (the diff shows what) +- Use lowercase, no period at the end +- Scale detail to match scope: a one-file fix gets 1-2 sentences; + a multi-package change gets a summary paragraph plus a bulleted + list of what changed and why +- Include any trailers required by CONSTITUTION (e.g., `Spec:`, + `Signed-off-by:`) + +Example: +``` +complete journal-recall merge wiring and cross-cutting cleanup + +Wire journal commands through journal/core packages instead of +recall/core. Move importer, lock, unlock, sync cmd packages from +recall/cmd to journal/cmd. + +Changes: +- journal/core/{plan,execute,query} are now canonical +- sourcefm/sourceformat renamed to source/frontmatter, source/format +- Magic numbers extracted to config/stats constants +- state.StateDir renamed to state.Dir across 26 callers +- splitLines moved to parse.ByteLines +- /ctx-commit skill generalized to be language-agnostic + +Spec: specs/journal-merge-completion.md +Signed-off-by: Jane Doe +``` + +## Commit Discipline + +- **Spec trailer is mandatory**: identify the spec that covers + this work and include `Spec:` in the commit message. If + CONSTITUTION also requires it, this is non-negotiable. +- **Confirm the message** with the user before committing (or use + their provided message) +- **Always present the context prompt**: this is the whole point + of the skill +- **Always reflect**: even a one-sentence reflection prevents + context loss +- **Check for secrets** (`.env`, credentials, tokens) in the diff + before staging + +## Quality Checklist + +Before committing, verify: +- [ ] Spec exists and is referenced in the commit message +- [ ] Build and lint pass +- [ ] Matching tasks marked `[x]` in TASKS.md +- [ ] Commit message is concise and explains the why +- [ ] `Spec:` and `Signed-off-by:` trailers are present +- [ ] No secrets or sensitive files in the staged changes +- [ ] Specific files staged (not blind `git add -A`) + +After committing, verify: +- [ ] Context prompt was presented to the user +- [ ] Any decisions/learnings provided were recorded +- [ ] Reflection was completed + +## Human Relay + +After every successful commit, relay a structured summary to the +human verbatim: + +``` +┌─ Commit Summary ───────────────────────── +│ Spec: specs/.md +│ Tasks closed: +│ Files changed: +│ Message: +└────────────────────────────────────────── +``` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-consolidate/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-consolidate/SKILL.md new file mode 100644 index 000000000..30ccce47d --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-consolidate/SKILL.md @@ -0,0 +1,194 @@ +--- +name: ctx-consolidate +description: "Consolidate redundant entries in LEARNINGS.md or DECISIONS.md. Use when ctx drift reports high entry counts or entries overlap." +--- + +Analyze entries in LEARNINGS.md and/or DECISIONS.md, group overlapping +entries by topic, and (with user approval) merge groups into denser +consolidated entries. Originals are archived, not deleted. + +## Key Distinction + +**Consolidation != archival.** Archival moves old entries to +the archive directory. Consolidation *replaces* verbose entries with +tighter ones: the file stays useful, just denser. The originals move +to archive as a paper trail. + +## When to Use + +- When `ctx drift` reports entry counts above threshold + (default: 30 learnings, 20 decisions) +- When you notice 3+ entries about the same topic +- When the user asks "clean up learnings", "consolidate context", + "reduce noise in decisions" +- Before a release, to keep context lean + +## When NOT to Use + +- When there are fewer than 10 entries (nothing meaningful to group) +- When the user wants to *delete* entries (offer archival instead) +- Automatically: always require user approval before modifying files +- Mid-task when the user is focused on shipping + +## Execution + +### Step 1: Parse Entries + +Read the target file(s): + +```bash +# Check entry counts first +ctx drift --json +``` + +Then read the files directly: +- LEARNINGS.md (in the context directory) +- DECISIONS.md (in the context directory) + +Parse entries by their `## [YYYY-MM-DD-HHMMSS] Title` headers. Each +entry extends from its header to the line before the next header or +end of file. + +### Step 2: Extract Keywords and Group + +For each entry, extract keywords from its title and body: + +1. Split text on whitespace and punctuation +2. Lowercase everything +3. Filter out stop words (the, and, for, with, from, are, was, etc.) + and words shorter than 3 characters +4. Deduplicate + +Build a keyword-to-entries map. Entries sharing **2 or more +non-trivial keywords** are candidates for the same group. + +**Grouping rules:** +- Minimum group size: 2 entries (nothing to consolidate with 1) +- Maximum group size: 8 entries (larger groups suggest the topic + needs splitting, not merging) +- An entry can only belong to one group (assign to the best match) + +### Step 3: Present Candidates + +Show the user what you found. Format: + +``` +Consolidation candidates for LEARNINGS.md: + +Group 1: "Hook behavior" (5 entries) + - [2026-01-15] Hook scripts can lose execute permission + - [2026-01-20] Two-tier hook output is sufficient + - [2026-02-03] Claude Code Hook Key Names + - [2026-02-09] Agent ignores repeated hook output + - [2026-02-16] Security docs vulnerable after migrations + -> Proposed: merge into 1 consolidated entry + +Group 2: "Path handling" (3 entries) + - [2026-01-10] Path construction uses stdlib + - [2026-02-05] G304 gosec false positives + - [2026-02-16] gosec G301/G306 permissions + -> Proposed: merge into 1 consolidated entry + +Ungrouped: 12 entries (no consolidation needed) +``` + +**Wait for the user to approve, modify, or reject each group.** +Do NOT proceed without explicit confirmation. + +### Step 4: Generate Consolidated Entries + +For each approved group, write a consolidated entry that: + +- Uses today's timestamp in `YYYY-MM-DD-HHMMSS` format +- Appends "(consolidated)" to the title +- Lists the date range of originals in a `**Consolidated from**` line +- Distills each original into 1-2 lines +- **Preserves all unique information** (nothing is lost) + +**Format:** + +```markdown +## [YYYY-MM-DD-HHMMSS] Hook behavior (consolidated) + +**Consolidated from**: 5 entries (2026-01-15 to 2026-02-16) + +- Hook scripts can lose execute permission without warning; always + restore +x after sync operations +- Two-tier output (stdout for AI context, stderr+exit for blocks) + is sufficient; don't over-engineer severity levels +- Claude Code hook key names are case-sensitive: PreToolUse, not + pre_tool_use +- Agents develop repetition fatigue: vary hook output phrasing + across invocations +- After infrastructure migrations, audit security docs first: + stale paths in security guidance give false confidence +``` + +### Step 5: Execute Approved Merges + +For each approved group: + +1. **Add the consolidated entry** at the top of the file (below + the `# Learnings` or `# Decisions` header) +2. **Remove the original entries** from the source file +3. **Append originals to archive** at + `archive/learnings-consolidated-YYYY-MM-DD.md` in the context + directory (or `decisions-consolidated-YYYY-MM-DD.md`) +4. **Rebuild the index**: + +```bash +ctx learning reindex +# or +ctx decision reindex +``` + +### Step 6: Report Results + +``` +Consolidated LEARNINGS.md: + - Group "Hook behavior": 5 entries -> 1 (originals archived) + - Group "Path handling": 3 entries -> 1 (originals archived) + Total: 8 entries consolidated into 2. File reduced from 47 to 41 entries. + Archive: archive/learnings-consolidated-2026-02-19.md (in context dir) +``` + +## Archive Format + +The archive file uses the same Markdown format as the source file. +Each archived entry keeps its original timestamp and content, +preceded by a header noting which consolidated entry replaced it: + +```markdown +# Archived Learnings (consolidated 2026-02-19) + +Originals replaced by consolidated entries in LEARNINGS.md. + +## Group: Hook behavior + +## [2026-01-15-120000] Hook scripts can lose execute permission +(original content preserved verbatim) + +## [2026-01-20-093000] Two-tier hook output is sufficient +(original content preserved verbatim) +``` + +## What This Skill Does NOT Do + +- **Automatic consolidation**: always requires user approval +- **Cross-file consolidation**: learnings stay in LEARNINGS.md, + decisions stay in DECISIONS.md +- **Delete entries**: always archives originals as a paper trail +- **Semantic understanding via embeddings**: uses keyword matching, + which is sufficient for structured entries with consistent formatting +- **Consolidate TASKS.md or CONVENTIONS.md**: use `ctx task archive` + for tasks; conventions rarely need consolidation + +## Quality Checklist + +Before reporting results: +- [ ] Presented all candidate groups before making changes +- [ ] Waited for explicit user approval per group +- [ ] Each consolidated entry preserves all unique information +- [ ] Original entries are archived, not deleted +- [ ] Ran `ctx reindex` after modifications +- [ ] Reported what changed and where archives were written diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-doctor/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-doctor/SKILL.md new file mode 100644 index 000000000..f5608fb59 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-doctor/SKILL.md @@ -0,0 +1,102 @@ +--- +name: ctx-doctor +description: "Troubleshoot ctx behavior. Runs structural health checks, analyzes event log patterns, and presents findings with suggested actions." +--- + +Diagnose ctx problems by combining structural health checks with +event log analysis. + +## When to Use + +- User says "doctor", "diagnose", "troubleshoot", "health check" +- User asks "why didn't my hook fire?" +- User says "hooks seem broken" or "context seems stale" +- User says "too many nudges" or "something seems off" +- User asks "what happened last session?" + +## When NOT to Use + +- User wants a quick status check (use `/ctx-status`) +- User wants to fix drift (use `/ctx-drift`) +- User wants to change hook messages (use `ctx hook message`) +- User wants to pause hooks (use `/ctx-pause`) + +## Diagnostic Playbook + +Follow this triage sequence: + +### Phase 1: Structural Baseline + +Run `ctx doctor --json` to get the full structural health report. + +```bash +ctx doctor --json +``` + +Parse the JSON output. Note any warnings or errors. + +### Phase 2: Event Log Analysis (if available) + +If the doctor report shows event logging is enabled, query recent events: + +```bash +ctx hook event --json --last 100 +``` + +If the user is asking about a specific hook: + +```bash +ctx hook event --hook --json --last 20 +``` + +If event logging is not enabled, note: "Enable `event_log: true` in +`.ctxrc` for hook-level diagnostics." + +### Phase 3: Targeted Investigation + +Based on findings, check additional sources: + +- **Hook config**: read `.claude/settings.local.json` to verify hook registration +- **Custom messages**: run `ctx hook message list` to check for silenced hooks +- **RC config**: read `.ctxrc` to check configuration +- **Reminders**: run `ctx remind list` for pending reminders + +### Phase 4: Present Findings + +Structure your report as: + +``` +## Doctor Report + +### Structural health +- Summarize ctx doctor results + +### Event analysis (if available) +- Patterns, gaps, or anomalies in event data +- Specific hook behavior observations + +### Suggested actions +- [ ] Actionable items based on findings +``` + +### Phase 5: Suggest, Don't Fix + +Present actionable next steps but do NOT auto-fix anything. +The user decides what to act on. + +## Available Data Sources + +| Source | Command | What it reveals | +|----------------------|------------------------------------------|-----------------------| +| Structural health | `ctx doctor --json` | All mechanical checks | +| Event log | `ctx hook event --json --last 100` | Recent hook activity | +| Event log (filtered) | `ctx hook event --hook --json` | Specific hook | +| Reminders | `ctx remind list` | Pending reminders | +| Hook messages | `ctx hook message list` | Custom vs default | +| RC config | Read `.ctxrc` | Configuration | + +## Graceful Degradation + +If event logging is not enabled, the skill still works with reduced +capability. Run `ctx doctor` for structural checks and note that +event-level diagnostics require `event_log: true` in `.ctxrc`. diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-drift/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-drift/SKILL.md index 59cd6f907..da7aabcab 100644 --- a/internal/assets/integrations/copilot-cli/skills/ctx-drift/SKILL.md +++ b/internal/assets/integrations/copilot-cli/skills/ctx-drift/SKILL.md @@ -212,14 +212,14 @@ drifts independently from the codebase. | Missing `Skill(ctx-*)` entry | Suggest adding: skill will prompt every time | | Stale `Skill(ctx-*)` entry | Suggest removing: dead reference | | Granular `Bash(ctx :*)` | Suggest consolidating to `Bash(ctx:*)` | -| One-off / session debris entries | Note as hygiene issue (see `hack/runbooks/sanitize-permissions.md`) | +| One-off / session debris entries | Note as hygiene issue (see `docs/operations/runbooks/sanitize-permissions.md`) | ### Important Do **not** edit `settings.local.json` directly. Report findings and let the user make changes. This file controls agent permissions: self-modification is a security concern. Refer -users to `hack/runbooks/sanitize-permissions.md` for the manual cleanup +users to `docs/operations/runbooks/sanitize-permissions.md` for the manual cleanup procedure. ## Proactive Use diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-implement/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-implement/SKILL.md new file mode 100644 index 000000000..f9331c2cd --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-implement/SKILL.md @@ -0,0 +1,176 @@ +--- +name: ctx-implement +description: "Execute a plan step-by-step with verification. Use when you have a plan document and need disciplined, checkpointed implementation." +--- + +Take a plan (inline text, file path, or from the conversation) +and execute it step-by-step with build/test verification between +steps. + +## When to Use + +- When the user provides a plan document or file and says + "implement this" +- When a multi-step task has been planned and needs disciplined + execution +- When the user wants checkpointed progress with verification + at each step +- After `/ctx-brainstorm` or plan mode produces an approved plan + +## When NOT to Use + +- For single-step tasks: just do them directly +- When the plan is vague or incomplete: use `/ctx-brainstorm` + first to refine it +- When the user wants to explore or discuss, not execute +- When changes are trivial (typo fix, config tweak) + +## Usage Examples + +```text +/ctx-implement +/ctx-implement path/to/plan.md +/ctx-implement (the plan from our discussion above) +``` + +## Process + +### 1. Load the plan + +- If a file path is provided, read it +- If inline text is provided, use it directly +- If neither, look back in the conversation for the most + recent plan or approved design +- If no plan can be found, ask the user for one + +### 2. Break into steps + +Parse the plan into discrete, checkable steps. Each step +should be: +- **Atomic**: one logical change (a file, a function, a test) +- **Verifiable**: has a clear pass/fail check +- **Ordered**: dependencies respected (create before use, + test after implement) + +Present the step list to the user for confirmation: + +> **Implementation plan** (N steps): +> +> 1. [Step description] - verify: [check] +> 2. [Step description] - verify: [check] +> 3. ... +> +> Ready to start? + +### 3. Execute step-by-step + +For each step: + +1. **Announce** what you're doing (one line) +2. **Think through** the change before writing code: what does + it touch, what could break, what's the simplest correct path? +3. **Implement** the change +3. **Verify** with the appropriate check: + - Go code changed → `CGO_ENABLED=0 go build -o /dev/null ./cmd/ctx` + - Tests affected → `CGO_ENABLED=0 go test ./...` + - Config/template changed → build to verify embeds + - Docs only → no verification needed +4. **Report** step result: pass or fail +5. **If failed**: stop, diagnose, fix, re-verify before + moving to the next step + +Verify after every individual step before proceeding to the next. + +### 4. Checkpoint progress + +After every 3-5 steps (or after a significant milestone): +- Summarize what has been completed +- Note any deviations from the plan +- Ask the user if they want to continue, adjust, or stop + +### 5. Wrap up + +After all steps complete: +- Run a final full verification (`make check` or + `CGO_ENABLED=0 go build && go test ./...`) +- Summarize what was implemented +- Note any deviations from the original plan +- Suggest context to persist (decisions, learnings, tasks) + +## Step Verification Map + +| Change type | Verification command | +|--------------------|---------------------------------------------------| +| Go source code | `CGO_ENABLED=0 go build -o /dev/null ./cmd/ctx` | +| Test files | `CGO_ENABLED=0 go test ./...` | +| Templates/embeds | `CGO_ENABLED=0 go build -o /dev/null ./cmd/ctx` | +| Makefile | Run the new/changed target | +| Skill files | Build (to verify embed) + check live copy matches | +| Docs/markdown only | None required | +| Shell scripts | `bash -n script.sh` (syntax check) | + +## Handling Failures + +When a step fails verification: + +1. **Don't panic**: read the error output carefully +2. **Reason through** the failure step-by-step before attempting + a fix; understand the cause, not just the symptom +3. **Fix** the issue in the current step +4. **Re-verify** the fix +5. **Only then** move to the next step +6. If the fix changes the plan, note the deviation + +If a step fails repeatedly (3+ attempts), stop and ask the +user for guidance rather than thrashing. + +## Output Format + +Progress updates should be concise: + +``` +Step 1/6: Create ctx-next skill directory .......... OK +Step 2/6: Write SKILL.md template .................. OK +Step 3/6: Copy to live skill directory ............. OK +Step 4/6: Build to verify template embeds .......... OK +Step 5/6: Run tests ................................ OK +Step 6/6: Mark task in TASKS.md .................... OK + +All 6 steps complete. Build and tests pass. +``` + +## Examples + +### Good Implementation + +> **Step 3/8**: Add `check` target to Makefile +> Added `check: build audit` after the `audit` target. +> Verify: `make check` ... build OK, audit OK. +> **Result**: PASS + +### Bad Implementation + +> "I'll implement the whole plan now" +> *[makes all changes at once without verification]* +> "Done! Everything should work." + +(No step-by-step, no verification, no checkpoints: this +defeats the purpose of the skill.) + +## Quality Checklist + +Before starting, verify: +- [ ] Plan exists and is clear enough to execute +- [ ] Steps are broken down and presented to the user +- [ ] User confirmed readiness to proceed + +During execution, verify: +- [ ] Each step is verified before moving on +- [ ] Failures are fixed in place, not deferred +- [ ] Checkpoints happen every 3-5 steps + +After completion, verify: +- [ ] Final full verification passes +- [ ] Deviations from plan are noted +- [ ] Summary of what was implemented is presented +- [ ] Context persistence is suggested if warranted diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-import-plans/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-import-plans/SKILL.md new file mode 100644 index 000000000..7f72bb741 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-import-plans/SKILL.md @@ -0,0 +1,56 @@ +--- +name: ctx-import-plans +description: "Import plan files into project specs directory. Use to convert external plans into project-tracked specs." +tools: [bash, read, write] +--- + +Import plan files into the project's `specs/` directory. + +## When to Use + +- When plan files exist outside the project (e.g., from AI + tool plan modes) +- When converting external design docs to project specs +- When the user says "import that plan" + +## When NOT to Use + +- Plan is already in `specs/` +- Plan is too vague to be a spec (brainstorm first) + +## Process + +### 1. Locate the plan + +If path provided, read it. Otherwise, check common locations: +- Current conversation context +- Session workspace files + +### 2. Convert to spec format + +Map plan sections to the spec template structure: +- Problem → Problem +- Steps/Tasks → Implementation +- Goals → Happy Path +- Risks → Edge Cases + +### 3. Handle conflicts + +If `specs/{name}.md` already exists: +- Compare contents +- Offer to merge, replace, or rename + +### 4. Write the spec + +Write to `specs/{name}.md`. + +### 5. Create tasks (optional) + +Offer to break the spec into tasks in TASKS.md. + +## Quality Checklist + +- [ ] Spec follows project template structure +- [ ] No conflicts with existing specs +- [ ] File written to correct location +- [ ] Tasks offered if applicable diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich-all/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich-all/SKILL.md new file mode 100644 index 000000000..1155653ab --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich-all/SKILL.md @@ -0,0 +1,233 @@ +--- +name: ctx-journal-enrich-all +description: "Full journal pipeline: import unimported sessions, then batch-enrich all unenriched entries. Use when the user says 'process the journal' or to catch up on the backlog." +--- + +Full journal pipeline: import if needed, then batch-enrich. + +## When to Use + +- When the user says "enrich everything" or "process the journal" +- When there is a backlog of unenriched or unimported sessions +- Periodically to catch up on recent sessions +- After the `check-journal` hook reports unimported or unenriched entries + +## When NOT to Use + +- For a single specific session (use `/ctx-journal-enrich` instead) + +## Process + +### Step 0: Import If Needed + +Before enriching, check whether there are unimported sessions. If +the journal directory has no `.md` files at all, or if there are +`.jsonl` session files newer than the newest journal entry, import +them first. + +```bash +CTX_DIR=$(ctx system bootstrap -q) +JOURNAL_DIR="$CTX_DIR/journal" + +# Check if any .md files exist +md_count=$(ls "$JOURNAL_DIR"/*.md 2>/dev/null | wc -l) + +if [ "$md_count" -eq 0 ]; then + echo "No journal entries found: importing all sessions." + ctx journal import --all --yes +else + # Compare newest .md mtime against .jsonl files + newest_md=$(stat -c %Y $(ls -t "$JOURNAL_DIR"/*.md | head -1)) + unimported=$(find ~/.claude/projects -name "*.jsonl" -newermt @${newest_md} 2>/dev/null | wc -l) + if [ "$unimported" -gt 0 ]; then + echo "$unimported unimported session(s) found: importing first." + ctx journal import --all --yes + fi +fi +``` + +Report how many sessions were imported (or "none needed") before +moving to enrichment. + +### Step 1: Find Unenriched Entries + +List all journal entries that lack enrichment using the state file: + +```bash +# List .md files in journal dir and check state +CTX_DIR=$(ctx system bootstrap -q) +for f in "$CTX_DIR/journal/"*.md; do + name=$(basename "$f") + ctx system mark-journal --check "$name" enriched || echo "$f" +done +``` + +Or read `.state.json` in the journal directory directly and list +entries without an `enriched` date set. + +### Fallback: Detect Enrichment from Frontmatter + +If `mark-journal --check` is unavailable (no state file, command +fails), fall back to frontmatter inspection. An entry is considered +**already enriched** if its YAML frontmatter contains **both** `type` +and `outcome` fields: these are set exclusively by enrichment, never +by import. + +Do NOT use `title` or `date` to detect enrichment: those are always +present from import. The enrichment-only fields are: + +| Field | Set by | +|----------------|----------------| +| `title` | Import | +| `date` | Import | +| `time` | Import | +| `model` | Import | +| `tokens_in` | Import | +| `tokens_out` | Import | +| `session_id` | Import | +| `project` | Import | +| `type` | **Enrichment** | +| `outcome` | **Enrichment** | +| `topics` | **Enrichment** | +| `technologies` | **Enrichment** | +| `summary` | **Enrichment** | + +If all entries already have enrichment recorded, report that and stop. + +### Step 2: Filter Out Noise + +Skip entries that are not worth enriching: + +- **Locked entries**: a file is locked if `.state.json` has a + `locked` date OR the frontmatter contains `locked: true`. Never + modify locked files: neither metadata nor body. Check via: + `ctx system mark-journal --check locked` + or look for `locked: true` in the YAML frontmatter. +- **Suggestion sessions**: files under ~20 lines or containing + only auto-complete fragments. Check with: + ```bash + wc -l + ``` +- **Multi-part continuations**: files ending in `-p2.md`, `-p3.md` + etc. Enrich only the first part; continuation parts inherit + the frontmatter topic. + +Report how many entries will be processed and how many were +filtered out. + +### Step 3: Process Each Entry + +For each entry, read the conversation and extract: + +1. **Title**: a short descriptive title for the session +2. **Type**: feature, bugfix, refactor, exploration, debugging, + or documentation +3. **Outcome**: completed, partial, abandoned, or blocked +4. **Topics**: 2-5 topic tags +5. **Technologies**: languages, frameworks, tools used +6. **Summary**: 2-3 sentences describing what was accomplished + +Apply YAML frontmatter to each file: + +```yaml +--- +title: "Session title" +date: 2026-01-27 +type: feature +outcome: completed +topics: + - authentication + - caching +technologies: + - go + - redis +--- +``` + +### Step 4: Mark Enriched + +After writing frontmatter to each file, update the state file: + +```bash +ctx system mark-journal enriched +``` + +### Step 5: Report + +After processing, report: + +- How many sessions were imported (or "none needed") +- How many entries were enriched +- How many were skipped (already enriched, too short, etc.) +- Remind the user to rebuild: `ctx journal site --build` + +## Confirmation Mode + +**Interactive** (default when user is present): show a summary +of proposed enrichments before applying. Group by type/outcome +so the user can scan quickly rather than reviewing one by one. + +**Unattended** (when running in a loop or explicitly told +"just do it"): apply enrichments directly and report results. + +## Large Backlogs (20+ entries) + +For large backlogs, use the heuristic enrichment script bundled +in `references/enrich-heuristic.py`. This script infers type, +outcome, topics, and technologies from the title and filename +patterns, then inserts frontmatter and marks state automatically. + +### How to use + +1. Build a file list of eligible entries (non-multipart, 20+ lines, + missing `type:` and `outcome:` fields): + ```bash + CTX_DIR=$(ctx system bootstrap -q) + for f in "$CTX_DIR"/journal/*.md; do + [ -f "$f" ] || continue + has_type=$(head -30 "$f" | grep -c '^type:' || true) + has_outcome=$(head -30 "$f" | grep -c '^outcome:' || true) + if [ "$has_type" -eq 0 ] || [ "$has_outcome" -eq 0 ]; then + name=$(basename "$f") + case "$name" in *-p[0-9].md|*-p[0-9][0-9].md) continue ;; esac + lines=$(wc -l < "$f") + [ "$lines" -ge 20 ] && echo "$f" + fi + done > /tmp/enrich-list.txt + ``` + +2. Run the heuristic enrichment script. The script path is relative + to this skill's directory: copy it to /tmp or reference it via + the full embedded path: + ```bash + python3 references/enrich-heuristic.py /tmp/enrich-list.txt + ``` + +3. The script handles everything: reads files, inserts frontmatter, + runs `ctx system mark-journal` for each, and reports counts. + +### When to use heuristic vs. per-file enrichment + +| Backlog size | Approach | +|--------------|---------------------------------------------------| +| 1-5 entries | Read each file, enrich manually with full context | +| 6-20 entries | Sequential processing in the main conversation | +| 20+ entries | Use `enrich-heuristic.py` for bulk processing | + +The heuristic script produces good-enough enrichment from titles +and filenames. For higher quality, follow up with manual review +of entries where the type or topics look wrong. + +Subagent parallelization is an alternative for 20+ entries, but +requires that subagents have Edit and Bash permissions granted. +If permissions are restricted, the heuristic script is faster +and more reliable. + +## Quality Checklist + +- [ ] Unimported sessions detected and imported before enrichment +- [ ] Suggestion sessions and multi-part continuations filtered +- [ ] Each enriched entry has all required frontmatter fields +- [ ] Summary is specific to the session, not generic +- [ ] User was shown a summary before applying (unless unattended) +- [ ] State file updated for each enriched entry diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich/SKILL.md new file mode 100644 index 000000000..9d29aee5f --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-journal-enrich/SKILL.md @@ -0,0 +1,161 @@ +--- +name: ctx-journal-enrich +description: "Enrich journal entry with metadata. Use when journal entries lack frontmatter, tags, or summary for future reference." +--- + +Enrich a session journal entry with structured metadata. + +## Before Enriching + +1. **Check if locked**: a file is locked if `.state.json` has a + `locked` date OR the frontmatter contains `locked: true`. Locked + files must not be modified: skip them silently. Check via: + `ctx system mark-journal --check locked` + or look for `locked: true` in the YAML frontmatter. +2. **Check if already enriched**: check the state file via + `ctx system mark-journal --check enriched` or read + `.state.json` in the journal directory; confirm before overwriting + +## When to Use + +- When journal entries lack metadata for future reference +- After importing sessions that need categorization +- When building a searchable session archive + +## When NOT to Use + +- On entries that already have complete frontmatter (unless updating) +- Before normalizing entries with broken formatting +- On suggestion sessions (short auto-complete prompts; not worth enriching) + +## Input + +The user specifies a journal entry by partial match: +- `twinkly-stirring-kettle` (slug) +- `twinkly` (partial slug) +- `2026-01-24` (date) +- `76fe2ab9` (short ID) + +Find matching files in the journal directory: +```bash +ls "$(ctx system bootstrap -q)/journal/"*.md | grep -i "" +``` + +If multiple matches, show them and ask which one. +If no argument given, show recent unenriched entries by reading +`.state.json` in the journal directory and listing entries without +an `enriched` date: + +```bash +# List unenriched entries using state file +CTX_DIR=$(ctx system bootstrap -q) +for f in "$CTX_DIR/journal/"*.md; do + name=$(basename "$f") + ctx system mark-journal --check "$name" enriched || echo "$f" +done | head -10 +``` + +## Usage Examples + +```text +/ctx-journal-enrich twinkly-stirring-kettle +/ctx-journal-enrich twinkly +/ctx-journal-enrich 2026-01-24 +/ctx-journal-enrich 76fe2ab9 +``` + +## Enrichment Tasks + +Read the journal entry and extract: + +### 1. Frontmatter (YAML at top of file) + +```yaml +--- +title: "Session title" +date: 2026-01-27 +model: claude-opus-4-6 # auto-populated at import +tokens_in: 234000 # auto-populated at import +tokens_out: 89000 # auto-populated at import +type: feature +outcome: completed +topics: + - authentication + - caching +technologies: + - go + - postgresql +libraries: + - cobra + - fatih/color +key_files: + - internal/auth/token.go + - internal/db/cache.go +--- +``` + +**Auto-populated fields** (set during `ctx journal import`, do NOT overwrite): +`date`, `time`, `project`, `session_id`, `model`, `tokens_in`, `tokens_out`, `branch` + +**Type values:** + +| Type | When to use | +|-----------------|---------------------------------------| +| `feature` | Building new functionality | +| `bugfix` | Fixing broken behavior | +| `refactor` | Restructuring without behavior change | +| `exploration` | Research, learning, experimentation | +| `debugging` | Investigating issues | +| `documentation` | Writing docs, comments, README | + +**Outcome values:** + +| Outcome | Meaning | +|-------------|------------------------------------| +| `completed` | Goal achieved | +| `partial` | Some progress, work continues | +| `abandoned` | Stopped pursuing this approach | +| `blocked` | Waiting on external dependency | + +### 2. Summary + +If `## Summary` says "[Add your summary...]", replace with 2-3 sentences +describing what was accomplished. + +### 3. Extracted Items + +Scan the conversation and extract: + +**Decisions made**: link to DECISIONS.md if persisted: +```markdown +## Decisions +- Used Redis for caching ([D12](../DECISIONS.md#d12)) +- Chose JWT over sessions (not yet persisted) +``` + +**Learnings discovered**: link to LEARNINGS.md if persisted: +```markdown +## Learnings +- Token refresh requires cache invalidation ([L8](../LEARNINGS.md#l8)) +- Go's defer runs LIFO (new insight) +``` + +**Tasks completed/created**: +```markdown +## Tasks +- [x] Implement caching layer +- [ ] Add cache metrics (created this session) +``` + +## Process + +1. Find and read the journal file +2. Analyze the conversation +3. Propose enrichment (type, topics, outcome) +4. Ask user for confirmation/adjustments +5. Show diff and write if approved +6. **Mark enriched** in the state file: + ```bash + ctx system mark-journal enriched + ``` +7. Remind user to rebuild: `ctx journal site --build` or `make journal` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-journal-normalize/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-journal-normalize/SKILL.md new file mode 100644 index 000000000..298f5dc28 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-journal-normalize/SKILL.md @@ -0,0 +1,46 @@ +--- +name: ctx-journal-normalize +description: "Normalize journal source markdown for clean rendering. Use after journal site shows rendering issues: fence nesting, metadata formatting, broken lists." +tools: [bash, read, write, edit] +--- + +Reconstruct journal entries as clean markdown from stripped plain text. + +## When to Use + +- After `ctx journal site` shows rendering issues +- When journal entries have fence nesting problems +- When metadata blocks render as raw `**Key**: value` +- Before running `ctx-journal-enrich` (clean markdown improves extraction) + +## When NOT to Use + +- On entries already normalized (check `.state.json`) +- When the site renders correctly +- On non-journal markdown files + +## Output Rules + +1. **Fences**: Always use backtick fences. Innermost code gets + 3 backticks. Each nesting level adds 1. +2. **Metadata**: `**Key**: value` blocks become collapsed `
`. +3. **Tool output**: Collapse into `
` when > 10 lines. +4. **Lists**: 2-space indent per level. +5. **No invented content**: Every word in output traces to input. + +## Process + +1. **Backup first**: copy journal directory to `.bak` sibling +2. Identify files to normalize (skip already-normalized via `.state.json`) +3. Process files turn-by-turn (not whole file at once) +4. Write back the fixed files +5. Mark normalized: `ctx system mark-journal normalized` +6. Regenerate site: `ctx journal site --build` +7. Report what changed + +## Quality Checklist + +- [ ] Backup created before modifying +- [ ] Already-normalized files skipped +- [ ] No content was invented or lost +- [ ] State file updated for processed entries diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-loop/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-loop/SKILL.md new file mode 100644 index 000000000..4730b4b0e --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-loop/SKILL.md @@ -0,0 +1,107 @@ +--- +name: ctx-loop +description: "Generate a shell script for running AI tools in autonomous iteration loops. Use when setting up unattended iteration, headless agent runs, or CI-driven AI workflows." +--- + +Generate a ready-to-use autonomous loop shell script. + +## Before Generating + +1. **Check for existing loop script**: look for `loop.sh` in the + project root; confirm before overwriting +2. **Verify PROMPT.md exists**: the generated script defaults to + reading `PROMPT.md`; if missing, ask the user what prompt file + to use +3. **Verify the context directory exists**: the loop pattern depends + on persistent context; run `ctx init` first if needed + +## When to Use + +- When setting up a project for autonomous iteration +- When the user wants to run unattended AI development +- When switching AI tools (e.g., Claude to Aider) and need a + new loop script +- When customizing loop parameters (max iterations, completion + signal, prompt file) + +## When NOT to Use + +- For interactive pair-programming sessions (just use the AI + tool directly) +- When the user already has a working loop script and has not + asked for changes +- When the project lacks a context directory and `PROMPT.md` (set + those up first with `ctx init --ralph`) + +## Usage Examples + +```text +/ctx-loop +/ctx-loop --tool aider +/ctx-loop --prompt TASKS.md --max-iterations 10 +/ctx-loop --completion SYSTEM_BLOCKED --output my-loop.sh +``` + +## Flags + +| Flag | Short | Default | Purpose | +|--------------------|-------|--------------------|---------------------------------| +| `--prompt` | `-p` | `PROMPT.md` | Prompt file the loop reads | +| `--tool` | `-t` | `claude` | AI tool: claude, aider, generic | +| `--max-iterations` | `-n` | `0` (unlimited) | Stop after N iterations | +| `--completion` | `-c` | `SYSTEM_CONVERGED` | Signal that ends the loop | +| `--output` | `-o` | `loop.sh` | Output script filename | + +## Supported Tools + +| Tool | Command generated | +|-----------|--------------------------------------| +| `claude` | `claude --print "$(cat )"` | +| `aider` | `aider --message-file ` | +| `generic` | Template stub for custom AI CLI | + +## Completion Signals + +The loop watches AI output for these signals: + +| Signal | Meaning | +|----------------------|--------------------------------------| +| `SYSTEM_CONVERGED` | All tasks complete; loop exits | +| `SYSTEM_BLOCKED` | Needs human input; loop exits | +| `BOOTSTRAP_COMPLETE` | Initial scaffolding done; loop exits | + +## Execution + +```bash +ctx loop $ARGUMENTS +``` + +The command writes a shell script (default `loop.sh`) and makes +it executable. Report the generated path and how to run it: + +```bash +chmod +x loop.sh # already done by ctx loop +./loop.sh +``` + +## Safety Notes + +- The generated script includes `set -e` and a 1-second sleep + between iterations to prevent runaway loops +- `--max-iterations` is strongly recommended for first runs; + suggest a reasonable default (e.g., 10) if the user omits it +- The script captures AI tool errors with `|| true` so one + failed iteration does not kill the loop +- Autonomous agents benefit from explicit reasoning prompts in + PROMPT.md: adding "think step-by-step before each change" + to the iteration prompt significantly improves accuracy and + reduces cascading mistakes in unattended runs + +## Quality Checklist + +Before reporting success, verify: +- [ ] Generated script exists at the output path +- [ ] Script is executable +- [ ] Prompt file referenced in the script actually exists +- [ ] If `--max-iterations 0`, user is aware it runs until + a completion signal (warn them) diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-pad/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-pad/SKILL.md new file mode 100644 index 000000000..5ff0aa9f0 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-pad/SKILL.md @@ -0,0 +1,162 @@ +--- +name: ctx-pad +description: "Manage encrypted scratchpad. Use for short, sensitive one-liners that travel with the project." +--- + +Manage the encrypted scratchpad via `ctx pad` commands using +natural language. Translate what the user says into the right +command. + +## When to Use + +- User wants to jot down a quick note, reminder, or sensitive value +- User asks to see, add, remove, edit, or reorder scratchpad entries +- User mentions "scratchpad", "pad", "notes", or "sticky notes" +- User says "jot down", "remember this", "note to self" + +## When NOT to Use + +- For structured tasks (use `ctx add task` instead) +- For architectural decisions (use `ctx add decision` instead) +- For lessons learned (use `ctx add learning` instead) + +## Command Mapping + +| User intent | Command | +|------------------------------------------------------------|--------------------------------------------| +| "show my scratchpad" / "what's on my pad" | `ctx pad` | +| "show me entry 3" / "what's in entry 3" | `ctx pad show 3` | +| "add a note: check DNS" / "jot down: check DNS" | `ctx pad add "check DNS"` | +| "delete the third one" / "remove entry 3" | `ctx pad rm 3` | +| "change entry 2 to ..." / "replace entry 2 with ..." | `ctx pad edit 2 "new text"` | +| "append '-- important' to entry 3" / "add to entry 3: ..." | `ctx pad edit 3 --append "-- important"` | +| "prepend 'URGENT:' to entry 1" | `ctx pad edit 1 --prepend "URGENT:"` | +| "move entry 4 to the top" / "prioritize entry 4" | `ctx pad mv 4 1` | +| "move entry 1 to the bottom" | `ctx pad mv 1 N` (where N = last position) | +| "import my notes from notes.txt" | `ctx pad import notes.txt` | +| "import from stdin" / pipe into pad | `cmd \| ctx pad import -` | +| "export all blobs" / "extract blobs to DIR" | `ctx pad export [DIR]` | +| "export blobs, overwrite existing" | `ctx pad export --force [DIR]` | +| "merge entries from another pad" | `ctx pad merge FILE...` | +| "merge with a different key" | `ctx pad merge --key /path/to/key FILE` | +| "show entries tagged later" / "filter by #later" | `ctx pad --tag later` | +| "show everything except #later" | `ctx pad --tag ~later` | +| "what tags do I have" / "list my tags" | `ctx pad tags` | +| "tag entry 5 as urgent" | `ctx pad edit 5 --tag urgent` | + +## Execution + +**List entries:** +```bash +ctx pad +``` + +**Show a single entry (raw text, pipe-friendly):** +```bash +ctx pad show 3 +``` + +**Add an entry:** +```bash +ctx pad add "remember to check DNS config on staging" +``` + +**Remove an entry:** +```bash +ctx pad rm 2 +``` + +**Replace an entry:** +```bash +ctx pad edit 1 "updated note text" +``` + +**Append to an entry:** +```bash +ctx pad edit 3 --append " - this is important" +``` + +**Prepend to an entry:** +```bash +ctx pad edit 1 --prepend "URGENT: " +``` + +**Move an entry:** +```bash +ctx pad mv 3 1 # move entry 3 to position 1 +``` + +**Compose entries (pipe show into edit):** +```bash +ctx pad edit 1 --append "$(ctx pad show 3)" +``` + +**Import lines from a file:** +```bash +ctx pad import notes.txt +``` + +**Import from stdin:** +```bash +grep TODO *.go | ctx pad import - +``` + +**Export blobs to a directory:** +```bash +ctx pad export ./ideas +ctx pad export --dry-run # preview without writing +ctx pad export --force ./backup # overwrite existing files +``` + +**Merge entries from another scratchpad:** +```bash +ctx pad merge worktree/.context/scratchpad.enc +ctx pad merge --key /path/to/other.key foreign.enc +ctx pad merge --dry-run pad-a.enc pad-b.md +``` + +**Filter by tag:** +```bash +ctx pad --tag later # entries with #later +ctx pad --tag ~later # entries WITHOUT #later +ctx pad --tag later --tag ci # entries with both (AND) +``` + +**List all tags:** +```bash +ctx pad tags +ctx pad tags --json +``` + +**Tag an entry:** +```bash +ctx pad edit 5 --tag urgent +ctx pad edit 5 --append "checked" --tag done # combine with other ops +``` + +## Interpreting User Intent + +When the user's intent is ambiguous: + +- "update entry 2" with new text → **replace** (full rewrite) +- "add X to entry 2" → **append** (partial update) +- "put X before entry 2's text" → **prepend** +- "prioritize" / "bump up" / "move to top" → **mv N 1** +- "deprioritize" / "move to bottom" → **mv N last** + +When the user says "add": check context: +- "add a note" / "add to my pad" → `ctx pad add` (new entry) +- "add to entry 3" / "add this to the third one" → `ctx pad edit 3 --append` (modify existing) + +## Important Notes + +- Keep the encryption key path (`~/.ctx/.ctx.key`) internal to + `ctx pad` commands: exposing it grants full decryption access + to all pad entries +- Always use `ctx pad` to access entries: reading `scratchpad.enc` + directly yields unreadable ciphertext +- If the user gets a "no key" error, tell them to obtain the + key file from a teammate +- Entries are one-liners; do not add multi-line content +- After modifying, show the updated scratchpad so the user can + verify the change diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-pause/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-pause/SKILL.md new file mode 100644 index 000000000..78dd72955 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-pause/SKILL.md @@ -0,0 +1,47 @@ +--- +name: ctx-pause +description: "Pause context hooks for this session. Use when context nudges aren't needed for the current task." +--- + +Pause all context nudge and reminder hooks for the current session. +Security hooks (dangerous command blocking) still fire. + +## When to Use + +- User says "pause ctx", "pause context", "quiet mode" +- User says "stop the nudges", "too many reminders" +- Quick investigation or one-off task that doesn't need ceremonies +- User explicitly asks to reduce context overhead + +## When NOT to Use + +- User wants to silence a specific hook (use `ctx hook message edit` to + customize or silence individual hooks) +- User wants to permanently disable hooks (edit `.claude/settings.local.json`) +- Session involves real project work that benefits from persistence nudges + +## Execution + +Run the pause command: + +```bash +ctx hook pause +``` + +Then confirm to the user: + +> Context hooks paused for this session. Nudges, reminders, and ceremony +> prompts are silenced. Security hooks still fire. +> +> Resume anytime with `/ctx-resume`. + +## Important Notes + +- **Session-scoped**: only affects the current session, not other terminals +- **Hooks still fire silently**: they check the pause flag and no-op +- **Graduated reminder**: a minimal `ctx:paused` indicator appears in hook + output so the state is never invisible +- **Resume before wrap-up**: if the session evolves into real work, resume + hooks before wrapping up to capture learnings and decisions +- **Initial context load is unaffected**: the ~8k token startup injection + happens before any command runs: pause only affects subsequent hooks diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-prompt-audit/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-prompt-audit/SKILL.md new file mode 100644 index 000000000..e8b0b972f --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-prompt-audit/SKILL.md @@ -0,0 +1,157 @@ +--- +name: ctx-prompt-audit +description: "Audit prompting patterns. Use periodically to help users improve prompt quality and reduce clarification cycles." +--- + +Analyze recent session transcripts to identify prompts that led to +unnecessary clarification back-and-forth. + +## Before Auditing + +1. **Check for session data**: look in the journal directory for + transcripts to analyze +2. **Need at least 3 sessions**: fewer than that gives too small a + sample; tell the user to try again later +3. **Confirm scope**: if the user specifies sessions or a date + range, use that; otherwise default to the 5 most recent + +## When to Use + +- Periodically to help users improve their prompting +- When the user asks for feedback on their prompting style +- After noticing many clarification cycles in recent sessions +- After a session with unusually high back-and-forth + +## When NOT to Use + +- Immediately after a user's first session (not enough data) +- When the user is frustrated; coaching lands poorly when someone + is already annoyed +- Unsolicited; only run when the user invokes it or explicitly + asks for feedback + +## Usage Examples + +```text +/ctx-prompt-audit +/ctx-prompt-audit --sessions 10 +/ctx-prompt-audit 2026-01-24 +``` + +## Data Sources + +Session transcripts are stored in the journal: + +| Source | Format | +|-------------------------|------------------------------------| +| Journal directory | Exported session journals (richer) | + +Journal entries contain full turn-by-turn conversation and are +the best source for pattern detection. + +## Process + +1. **Gather transcripts**: read 3-5 recent sessions from the + journal +2. **Extract user prompts**: isolate the human turns +3. **Identify vague prompts**: flag those that caused clarifying + questions (see criteria below) +4. **Cross-reference patterns**: look for repeated habits across + sessions, not one-off mistakes +5. **Generate coaching report**: use the output format below +6. **Present and discuss**: share the report, ask if the user + wants to dig into any example + +## What Makes a Prompt "Vague" + +Look for prompts where the agent asked clarifying questions +instead of acting: + +- **Missing file context**: "fix the bug" without specifying + which file or error +- **Ambiguous scope**: "optimize it" without what to optimize + or success criteria +- **Undefined targets**: "update the component" when multiple + components exist +- **Missing error details**: "it's not working" without symptoms +- **Vague action words**: "make it better", "clean this up" + +## Important Nuance + +Not every short prompt is vague. Consider context: +- "fix the bug" after discussing a specific error: **not vague** +- "fix the bug" as the first message: **vague** +- "same:" after a pattern is established: **not vague** (the + user set a convention and is being efficient) +- Shorthand that references shared context is good prompting, + not lazy prompting + +## Output Format + +```markdown +## Prompt Audit Report + +**Sessions analyzed**: 5 +**User prompts reviewed**: 47 +**Vague prompts found**: 4 (8.5%) + +--- + +### Example 1: Missing File Context + +**Your prompt**: "fix the bug" + +**What happened**: I had to ask which file and what error. + +**Better prompt**: "fix the authentication error in +src/auth/login.ts where JWT validation fails with 401" + +--- + +## Patterns to Watch + +Based on your sessions, you tend to: +1. Skip mentioning file paths (3 occurrences) +2. Use "it" without establishing what "it" refers to + (2 occurrences) + +## What You Do Well + +- You provide error output when debugging (4 of 5 sessions) +- You reference specific files by path in most prompts + +## Tips + +- Start prompts with the **file path** when discussing + specific code +- Include **error messages** when debugging +- Specify **success criteria** for optimization tasks +``` + +## Guidelines + +- **Constructive, not critical**: frame suggestions as + improvements, not corrections +- **Show actual prompts**: quote from their sessions so + examples are concrete, not hypothetical +- **Explain the consequence**: what happened because the prompt + was vague (extra round-trip, wrong file edited, etc.) +- **Provide rewrites**: show a concrete better alternative for + each example +- **Acknowledge strengths**: include a "What You Do Well" + section; people learn better when not purely criticized +- **Look for patterns**: one vague prompt is noise; three of the + same kind is a habit worth addressing +- **End with actionable tips**: 3-5 specific, memorable tips + +## Quality Checklist + +Before presenting the report, verify: +- [ ] At least 3 sessions were analyzed (not a tiny sample) +- [ ] Every "vague" example includes the actual quoted prompt +- [ ] Every example has a concrete rewrite (not just "be more + specific") +- [ ] Context was considered (short != vague) +- [ ] Report includes positive observations, not just criticism +- [ ] Tips are specific to this user's patterns, not generic + advice diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-prompt/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-prompt/SKILL.md new file mode 100644 index 000000000..3cd99bc47 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-prompt/SKILL.md @@ -0,0 +1,49 @@ +--- +name: ctx-prompt +description: "Apply, list, and manage saved prompt templates from .context/prompts/. Use when the user asks to apply, list, or create a reusable template like code-review or refactor." +tools: [bash, read, write] +--- + +Apply reusable prompt templates from `.context/prompts/`. + +## When to Use + +- User says "use the code-review prompt" or "apply the refactor template" +- User asks to list, create, or manage prompt templates +- User mentions "prompt template" or "reusable prompt" + +## When NOT to Use + +- For structured context entries (use `ctx add` instead) +- For full workflow automation (use a dedicated skill instead) +- For scratchpad notes (use `ctx pad` instead) + +## Command Mapping + +| User intent | Command | +|----------------------------------|---------------------------------| +| "list my prompts" | `ctx prompt list` | +| "show the code-review prompt" | `ctx prompt show code-review` | +| "create a new prompt" | `ctx prompt add --stdin` | +| "delete the debug prompt" | `ctx prompt rm debug` | + +## Execution + +**When no name is given:** +```bash +ctx prompt list +``` + +**When a name is given:** +```bash +ctx prompt show +``` + +Read the prompt content, then follow the instructions in the +prompt applied to the user's current context. + +## Quality Checklist + +- [ ] Used correct subcommand for user intent +- [ ] Prompt content was applied, not just displayed +- [ ] If prompt not found, suggested `ctx prompt list` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-recall/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-recall/SKILL.md index 5db979773..609bb20f9 100644 --- a/internal/assets/integrations/copilot-cli/skills/ctx-recall/SKILL.md +++ b/internal/assets/integrations/copilot-cli/skills/ctx-recall/SKILL.md @@ -1,42 +1,62 @@ --- name: ctx-recall description: "Browse session history. Use when referencing past discussions or finding context from previous work." +tools: [bash] --- -Browse, inspect, and export AI session history. +Browse, inspect, and import AI session history. ## When to Use - When the user asks "what did we do last time?" - When looking for context from previous work sessions -- When exporting sessions to the journal +- When importing sessions to the journal for enrichment - When searching for a specific session by topic or date ## When NOT to Use -- When the user just wants current context (use ctx-status instead) -- For modifying session content (recall is read-only) +- When the user just wants current context (use `ctx-status` or + `ctx-agent` instead) +- For modifying session content (browsing is read-only) -## Execution +## Subcommands -List recent sessions: +### `ctx journal source` ```bash -ctx recall list --limit 5 +ctx journal source --limit 5 ``` -Show details of a specific session: +### `ctx journal source --show` / `--latest` ```bash -ctx recall show --latest -ctx recall show +ctx journal source --show +ctx journal source --latest ``` -Export sessions to journal markdown: +### `ctx journal import` ```bash -ctx recall export --all +ctx journal import --all # Import new sessions only +ctx journal import --all --regenerate # Re-import all ``` -After listing sessions, summarize relevant findings rather than -dumping raw output. +## Typical Workflows + +**"What did we work on recently?"** +```bash +ctx journal source --limit 5 +``` + +**"Import everything to the journal"** +```bash +ctx journal import --all +``` + +Then suggest `ctx-journal-enrich-all` for enrichment. + +## Quality Checklist + +- [ ] Used the right subcommand for user intent +- [ ] Applied filters if user mentioned project, date, or topic +- [ ] For import, mentioned the normalize/enrich pipeline diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-reflect/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-reflect/SKILL.md new file mode 100644 index 000000000..ce78db11c --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-reflect/SKILL.md @@ -0,0 +1,124 @@ +--- +name: ctx-reflect +description: "Reflect on session progress. Use at natural breakpoints, after unexpected behavior, or when shifting to a different task." +--- + +Pause and reflect on this session. Review what has been +accomplished and identify context worth persisting. + +## When to Use + +- At natural breakpoints (feature complete, bug fixed, task + done) +- After unexpected behavior or a debugging detour +- When shifting from one task to a different one +- When context is getting full and the session may end soon +- When the user explicitly asks to reflect or wrap up + +## When NOT to Use + +- At the very start of a session (nothing to reflect on yet) +- After trivial changes (a typo fix does not need reflection) +- When the user is in flow and has not paused; do not interrupt + with unsolicited reflection + +## Usage Examples + +```text +/ctx-reflect +/ctx-reflect (after fixing the auth bug) +``` + +## Reflection Checklist + +Before listing items, step back and reason through the session +as a whole: what was the arc, what surprised you, what would +you do differently? This framing surfaces insights that a +mechanical checklist misses. + +Work through each category. Skip categories with nothing +to report; do not force empty sections. + +### 1. Learnings + +- Did we discover any gotchas, bugs, or unexpected behavior? +- Did we learn something about the codebase, tools, or + patterns? +- Would this help a future session avoid problems? +- Is it specific to this project? (General knowledge does not + belong in LEARNINGS.md) + +### 2. Decisions + +- Did we make any architectural or design choices? +- Did we choose between alternatives? What was the trade-off? +- Should the rationale be captured for future sessions? + +### 3. Tasks + +- Did we complete any tasks? (Mark done in TASKS.md) +- Did we start any tasks that are not yet finished? +- Should new tasks be added for follow-up work discovered + during this session? + +### 4. Session Notes + +- Was this a significant session worth a full snapshot? +- Would a future session benefit from the discussion context? +- Are there open threads that a future session needs to pick + up? + +## Output Format + +After reflecting, provide: + +1. **Summary**: what was accomplished (2-3 sentences) +2. **Suggested persists**: list what should be saved, with + the specific command or file for each item +3. **Offer**: ask the user which items to persist + +### Good Example + +> This session implemented the cooldown mechanism for +> `ctx agent` and updated all related docs. We discovered +> that `$PPID` in hook context resolves to the Claude Code +> process PID, which is unique per session. +> +> I'd suggest persisting: +> - **Learning**: `$PPID` in PreToolUse hooks resolves to +> the Claude Code PID (unique per session) +> `ctx add learning "Title" --session-id ID --branch BR --commit HASH --context "..." --lesson "..." --application "..."` +> - **Task**: mark "Add cooldown to ctx agent" as done +> - **Decision**: tombstone-based cooldown with 10m default +> `ctx add decision "Title" --session-id ID --branch BR --commit HASH --context "..." --rationale "..." --consequence "..."` +> +> Want me to persist any of these? + +### Bad Examples + +- "We did some stuff. Want me to save it?" (too vague; + no specific items or commands) +- Listing 10 trivial learnings that are general knowledge + (only project-specific insights belong) +- Persisting without asking (always get user confirmation) + +## Persistence Commands + +| What to persist | Command | +|------------------|----------------------------------------------------------------------------------------------------------------------------| +| Learning | `ctx add learning "Title" --session-id ID --branch BR --commit HASH --context "..." --lesson "..." --application "..."` | +| Decision | `ctx add decision "Title" --session-id ID --branch BR --commit HASH --context "..." --rationale "..." --consequence "..."` | +| Task completed | Edit TASKS.md directly | +| New task | `ctx add task "Description" --session-id ID --branch BR --commit HASH` | + +## Quality Checklist + +Before presenting the reflection, verify: +- [ ] Every suggested persist has a concrete command or file + path (not just "save the learning") +- [ ] Learnings are project-specific, not general knowledge +- [ ] Decisions include the trade-off rationale, not just + the choice +- [ ] No empty checklist categories (skip what has nothing + to report) +- [ ] The user is asked before anything is persisted diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-remember/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-remember/SKILL.md new file mode 100644 index 000000000..245c7b8e3 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-remember/SKILL.md @@ -0,0 +1,150 @@ +--- +name: ctx-remember +description: "Recall project context and present structured readback. Use when the user asks 'do you remember?', at session start, or when context seems lost." +--- + +Recall project context and present a structured readback as if +remembering, not searching. + +## Before Recalling + +Check that the context directory exists. If it does not, tell the +user: "No context directory found. Run `ctx init` to set up context +tracking, then there will be something to remember." + +## When to Use + +- The user asks "do you remember?", "what were we working on?", + or any memory-related question +- At the start of a session when context is not yet loaded +- When context seems lost or stale mid-session +- When the user asks about previous work, decisions, or learnings + +## When NOT to Use + +- Context was already loaded this session via `/ctx-agent`: don't + re-fetch what you already have +- Mid-session when you are actively working on a task and context + is fresh: don't interrupt flow +- When the user is asking about a *specific* past session by name + or ID: use `/ctx-history` instead, which has list/show/export + subcommands + +## Process + +Do all of this **silently**: narrating the steps makes the readback +feel like a file search rather than genuine recall: + +1. **Load context packet**: + ```bash + ctx agent + ``` +2. **Read the files** listed in the packet's "Read These Files" + section (TASKS.md, DECISIONS.md, LEARNINGS.md, etc.) +3. **List recent sessions**: + ```bash + ctx journal source --limit 3 + ``` +4. **Present the structured readback** (see format below) + +## Readback Format + +Present your findings as a structured readback with these sections: + +**Last session**: Topic, date, and what was accomplished. Cite the +most recent session from the session list. + +**Active work**: Pending and in-progress tasks from TASKS.md. Use +a brief list: one line per task with its status. + +**Recent context**: 1-2 recent decisions or learnings that are +relevant. Pick the most recent or most impactful. + +**Next step**: Suggest what to work on next based on the active +tasks, or ask the user for direction if priorities are unclear. + +## Readback Rules + +- Open directly with the readback: instead of "I don't have memory", + present what you found +- Skip preamble like "Let me check": go straight to the structured + readback +- Present findings as recall, not discovery: you are *remembering*, + not *searching* +- Be honest about the mechanism only if the user explicitly asks + *how* you remember (e.g., "It's stored in context files managed + by ctx") + +## Examples + +### Good Readback + +> **Last session** (2026-02-07): We implemented the cooldown +> mechanism for `ctx agent` to prevent redundant context loads. +> +> **Active work**: +> - [ ] Add `--format json` flag to `ctx status` (pending) +> - [x] Implement session cooldown (done) +> - [ ] Write integration tests for journal import (in progress) +> +> **Recent context**: +> - Decided to use file-based cooldown tokens instead of +> environment variables (simpler, works across shells) +> - Learned that Claude Code hooks run in a subprocess, so env +> vars set in hooks don't persist to the main session +> +> **Next step**: The integration tests for journal import are +> partially done. Want to continue those, or shift to the JSON +> status flag? + +### Bad Readback (Anti-patterns) + +> "I don't have persistent memory, but let me check if there +> are any context files..." + +> "Let me look at the context files to see what's there. +> I found TASKS.md, let me read it..." + +> "I found some session files. Here's what they contain..." + +## Companion Tool Check + +After presenting the readback, check companion tool availability. +Skip this section entirely if `companion_check: false` is set in +`.ctxrc`: check by running `ctx config status` and looking for +the field value. + +**Companion tools** enhance ctx skills with web search and code +intelligence. They are optional but recommended: + +| Tool | Purpose | Smoke test | +|---------------|--------------------------------------------------------|----------------------------------------------------------------------| +| Gemini Search | Grounded web search with citations | Call `mcp__gemini-search__search_with_grounding` with a simple query | +| GitNexus | Code knowledge graph (symbols, blast radius, clusters) | Call `mcp__gitnexus__list_repos` | + +**Check procedure:** + +1. Attempt each smoke test silently +2. For tools that respond: note as available (no output needed) +3. For tools that fail or are not connected: append a brief note + after the readback: + > "Companion tools: Gemini Search is not connected (web search + > will fall back to built-in). Install via MCP settings if + > needed." +4. For GitNexus specifically: if it responds but the current repo + is not indexed or the index is stale, suggest: + > "GitNexus index is stale: run `npx gitnexus analyze` to + > rehydrate." + +Present companion status as a one-line note after the readback, +not a separate section. If everything is healthy, say nothing. + +## Quality Checklist + +Before presenting the readback, verify: +- [ ] Context packet was loaded (not skipped) +- [ ] Files from the read order were actually read +- [ ] Structured readback has all four sections +- [ ] No narration of the discovery process leaked into output +- [ ] Readback feels like recall, not a file system tour +- [ ] Companion tool check ran (unless suppressed via .ctxrc) diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-remind/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-remind/SKILL.md new file mode 100644 index 000000000..d020799c6 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-remind/SKILL.md @@ -0,0 +1,87 @@ +--- +name: ctx-remind +description: "Manage session reminders. Use when the user says 'remind me to...' or asks about pending reminders." +--- + +Manage session-scoped reminders via `ctx remind` commands using +natural language. Translate what the user says into the right +command. + +## When to Use + +- User says "remind me to..." or "remind me about..." +- User asks "what reminders do I have?" +- User wants to dismiss or clear reminders +- User mentions reminders surfaced at session start + +## When NOT to Use + +- For structured tasks with status tracking (use `ctx add task`) +- For sensitive values or quick notes (use `ctx pad`) +- For architectural decisions (use `ctx add decision`) +- Create a reminder only when the user explicitly says "remind me": + for everything else, let the conversation proceed without creating records + +## Command Mapping + +| User intent | Command | +|--------------------------------------|-----------------------------------------------| +| "remind me to refactor swagger" | `ctx remind "refactor swagger"` | +| "remind me tomorrow to check CI" | `ctx remind "check CI" --after YYYY-MM-DD` | +| "remind me next week to review auth" | `ctx remind "review auth" --after YYYY-MM-DD` | +| "what reminders do I have?" | `ctx remind list` | +| "dismiss reminder 3" | `ctx remind dismiss 3` | +| "clear all reminders" | `ctx remind dismiss --all` | + +## Execution + +**Add a reminder:** +```bash +ctx remind "refactor the swagger definitions" +``` + +**Add with date gate:** +```bash +ctx remind "check CI after the deploy" --after 2026-02-25 +``` + +**List reminders:** +```bash +ctx remind list +``` + +**Dismiss by ID:** +```bash +ctx remind dismiss 3 +``` + +**Dismiss all:** +```bash +ctx remind dismiss --all +``` + +## Natural Language Date Handling + +The CLI only accepts `YYYY-MM-DD` for `--after`. You must convert +natural language dates to this format. + +| User says | You run | +|--------------------------|---------------------------------------------------------| +| "remind me next session" | `ctx remind "..."` (no `--after`) | +| "remind me tomorrow" | `ctx remind "..." --after YYYY-MM-DD` (tomorrow's date) | +| "remind me next week" | `ctx remind "..." --after YYYY-MM-DD` (7 days from now) | +| "remind me about X" | `ctx remind "X"` (no `--after`, immediate) | +| "remind me after Friday" | `ctx remind "..." --after YYYY-MM-DD` (next Saturday) | + +If the date is ambiguous (e.g., "after the release"), ask the user +for a specific date. + +## Important Notes + +- Reminders fire **every session** until dismissed: no throttle +- The `--after` flag gates when a reminder starts appearing, not when + it expires +- IDs are never reused: after dismissing ID 3, the next gets ID 4+ +- Reminders are stored in `.context/reminders.json` (committed to git) +- After creating or dismissing, show the command output so the user + can confirm the action diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-resume/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-resume/SKILL.md new file mode 100644 index 000000000..ce0862c35 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-resume/SKILL.md @@ -0,0 +1,37 @@ +--- +name: ctx-resume +description: "Resume context hooks after a pause. Use when the user says 'resume ctx', 'unpause', 'turn nudges back on', or when transitioning from a quick task back to project work." +--- + +Resume all context hooks after a `/ctx-pause`. Restores normal nudge, +reminder, and ceremony behavior. + +## When to Use + +- User says "resume ctx", "resume context", "unpause" +- User says "turn nudges back on" +- Session has evolved from a quick task into real project work +- Before running `/ctx-wrap-up` (wrap-up needs hooks active) + +## When NOT to Use + +- Session is not paused (resume is a silent no-op, but don't confuse the user) +- User wants to restart or reset the session (just start a new session) + +## Execution + +Run the resume command: + +```bash +ctx hook resume +``` + +Then confirm to the user: + +> Context hooks resumed. Nudges, reminders, and ceremonies are active again. + +## Important Notes + +- **Silent no-op if not paused**: safe to run even if hooks aren't paused +- **Turn counter resets**: the graduated reminder counter starts fresh if + you pause again later diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-sanitize-permissions/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-sanitize-permissions/SKILL.md new file mode 100644 index 000000000..69c24c0f1 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-sanitize-permissions/SKILL.md @@ -0,0 +1,67 @@ +--- +name: ctx-sanitize-permissions +description: "Audit tool permissions for dangerous or overly broad entries. Use to ensure safe agent configuration." +tools: [bash, read, write] +--- + +Audit agent permission configurations for dangerous patterns. + +## When to Use + +- After initial project setup +- When reviewing security posture +- When permissions seem overly broad +- Before sharing a project configuration + +## When NOT to Use + +- No permission config exists +- Already audited recently + +## Categories to Check + +### 1. Hook bypass permissions +Permissions that disable safety hooks entirely. + +### 2. Destructive command permissions +Allow patterns that cover `rm -rf`, `git push --force`, +`git reset --hard`, etc. + +### 3. Injection vectors +Overly broad shell permissions that could allow arbitrary +command execution. + +### 4. Overly broad wildcards +Permissions like `Bash(*)` or `Write(*)` that grant +unrestricted access. + +## Process + +1. Read the permission configuration file +2. Check each entry against the four categories +3. Flag dangerous entries with severity level +4. Propose safer alternatives +5. Apply fixes with user approval + +## Output Format + +``` +## Permission Audit Results + +### 🔴 Critical (N) +1. `Bash(*)`: unrestricted shell access + → Suggest: scope to specific commands + +### 🟡 Warning (N) +1. `Write(/etc/*)`: write access to system dirs + → Suggest: remove or scope to project + +### ✅ Clean (N entries passed) +``` + +## Quality Checklist + +- [ ] All permission entries reviewed +- [ ] Critical items flagged +- [ ] Safer alternatives proposed +- [ ] No changes made without user approval diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-skill-audit/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-skill-audit/SKILL.md new file mode 100644 index 000000000..540f3fd72 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-skill-audit/SKILL.md @@ -0,0 +1,236 @@ +--- +name: ctx-skill-audit +description: "Audit skills against Anthropic prompting best practices. Use when reviewing skill quality, after creating or modifying a skill, before releasing skills, or when a skill produces inconsistent results. Also use when the user says 'audit this skill', 'check skill quality', 'review the skills', or 'are our skills any good?'" +--- + +Audit one or more skills against Anthropic's prompting best +practices. The goal is to find patterns that degrade skill +effectiveness with current Claude models and suggest concrete +improvements. + +## When to Use + +- After creating or modifying a skill (quality gate) +- Reviewing all skills before a release (batch audit) +- When a skill produces inconsistent or poor results +- When skills were written for older models and may need + calibration for Claude 4.5/4.6 + +## Before Auditing + +1. Read `references/anthropic-best-practices.md` from this + skill's directory: it contains the condensed audit criteria. +2. Identify which skill(s) to audit. If the user names a + specific skill, audit that one. If they say "audit all + skills," plan a batch pass. +3. For bundled skills, read from + `internal/assets/claude/skills/*/SKILL.md`. + For live skills, read from `.claude/skills/*/SKILL.md`. + +## Audit Dimensions + +Apply these checks to each skill. Each dimension maps to a +section in the best practices reference. + +### 1. Positive Framing + +Scan for negative instructions ("don't", "never", "avoid", +"do not") that lack a positive counterpart. Every negative +should be paired with what the agent *should* do instead. + +**Pass:** negative instructions are supplements to clear +positive guidance. +**Fail:** primary instructions are negative, leaving the +agent to guess the desired behavior. + + + +Do not create new files. Do not modify tests. Do not add +comments. + + +Edit only the files specified in the task. Preserve existing +tests and comments: add new ones only when the user requests +them. + + + +### 2. Motivation Over Mandates + +Check for MUST, NEVER, ALWAYS, CRITICAL used as emphasis +without explaining *why* the rule matters. Claude 4.5/4.6 +responds better to reasoning than rigid directives. + +**Pass:** important instructions include motivation ("because +X" or "so that Y") that lets the model generalize. +**Fail:** instructions rely on emphasis alone to convey +importance. + + + +You MUST ALWAYS run tests before reporting completion. + + +Run tests before reporting completion: untested changes +create silent regressions that compound across sessions. + + + +### 3. XML Tag Structure + +Check whether the skill mixes instructions with variable +content (file paths, user input, injected code) without +clear delimiters. XML tags prevent the model from confusing +injected content with skill instructions. + +**Pass:** variable content is wrapped in descriptive tags, +or the skill doesn't inject variable content. +**Fail:** the skill templates in external content alongside +instructions without delimiters. + +### 4. Few-Shot Examples + +Check whether non-trivial behaviors (output formats, decision +logic, style requirements) are demonstrated with examples. +Skills that describe complex output without showing it drift +over time. + +**Pass:** key behaviors have at least one good/bad example +pair, or the behavior is simple enough that examples would +be redundant. +**Fail:** the skill describes a specific output format or +decision process but provides no examples. + +### 5. Subagent Guard + +If the skill spawns or encourages spawning subagents (via the +Agent tool), check that it states when subagents are and +aren't warranted. Claude Opus 4.6 over-delegates to subagents +when a direct tool call would be faster. + +**Pass:** subagent usage has explicit scope (when to use, +when not to), or the skill doesn't involve subagents. +**Fail:** the skill defaults to subagent delegation without +stating when direct execution is preferable. + +### 6. Overtriggering Calibration + +Check for language written to combat undertriggering in older +models that may cause overtriggering in Claude 4.5/4.6: +excessive caps emphasis (CRITICAL, MUST), redundant capability +statements ("You are an expert"), or aggressive always/never +framing. + +**Pass:** instructions use natural language with emphasis +reserved for genuinely critical points. +**Fail:** the skill reads like it was written for a less +capable model that needed constant nudging. + +### 7. Phantom References + +Every file path, tool name, and command referenced in the +skill must exist. Broken references are a form of hallucination +in the skill itself. + +**Pass:** all references resolve to real files/tools. +**Fail:** the skill mentions files or commands that don't +exist. + +### 8. Scope Discipline + +Check whether the skill encourages work beyond what's +requested: "while you're in there" improvements, unsolicited +refactoring, or scope creep. Skills should state the minimum +viable outcome. + +**Pass:** the skill's scope matches its stated purpose. +**Fail:** the skill encourages additional work beyond its +core task. + +### 9. Description Trigger Quality + +The `description` field determines when the skill activates. +Check that it: +- Covers concrete trigger situations and user phrases +- Includes synonyms and related concepts +- Is specific enough to avoid false triggers +- Is "pushy" enough to avoid undertriggering + +**Pass:** reading the description alone, you'd know exactly +when to use this skill. +**Fail:** the description is vague ("use for general tasks") +or too narrow (misses common phrasings). + +## Process + +### Single Skill Audit + +1. Read the skill's SKILL.md. +2. Apply all 9 audit dimensions. +3. Report findings using the output format below. +4. Suggest specific rewrites for any failures: show the + current text and the proposed replacement. + +### Batch Audit + +1. List all skills to audit (bundled, live, or both). +2. Audit each skill directly in the main conversation: + spawning one subagent per skill adds latency and context + overhead that outweighs parallelism for typical batch sizes. +3. Report concisely: only dimensions that fail or have notable + findings. +4. Summarize with a scorecard at the end. + +## Output Format + +For each audited skill, report: + +``` +### /skill-name + +**Overall:** X/9 pass + +| # | Dimension | Result | Notes | +|---|------------------------|--------|--------------------------| +| 1 | Positive framing | pass | | +| 2 | Motivation over mandates | fail | 3 bare MUST/NEVER found | +| 3 | XML tag structure | pass | | +| 4 | Few-shot examples | fail | No output format example | +| 5 | Subagent guard | n/a | No subagent usage | +| 6 | Overtriggering | pass | | +| 7 | Phantom references | pass | | +| 8 | Scope discipline | pass | | +| 9 | Description quality | warn | Missing synonym coverage | + +**Suggested fixes:** +- [Dimension 2] Line "You MUST ALWAYS run tests" → + "Run tests before completion: untested changes create + silent regressions." +- [Dimension 4] Add example showing expected output format + after the "Report results" section. +``` + +For batch audits, end with a summary: + +``` +## Batch Summary + +| Skill | Score | Top Issue | +|--------------------|-------|--------------------------| +| ctx-commit | 8/9 | Missing example | +| ctx-drift | 7/9 | 2 bare mandates | +| ctx-verify | 9/9 | - | +``` + +## Quality Checklist + +Before reporting audit results: + +- [ ] Read the best practices reference before starting +- [ ] Applied all 9 dimensions (mark n/a where inapplicable) +- [ ] Every "fail" has a specific suggested rewrite, not just + a description of the problem +- [ ] Phantom reference check actually verified file existence + (used Glob/Read, not assumption) +- [ ] Description quality check considered real user phrases, + not hypothetical ones diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-skill-creator/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-skill-creator/SKILL.md new file mode 100644 index 000000000..f8b05ac9a --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-skill-creator/SKILL.md @@ -0,0 +1,76 @@ +--- +name: ctx-skill-creator +description: "Create, improve, test, and deploy skills. Full skill lifecycle from intent to working skill file." +tools: [bash, read, write, edit, glob, grep] +--- + +Create new skills or improve existing ones through a structured +workflow. + +## When to Use + +- Creating a new skill from scratch +- Improving an underperforming skill +- Porting a skill from one integration to another + +## When NOT to Use + +- Quick one-off automations (just script it) +- When the need is too vague (brainstorm first) + +## Process + +### 1. Intent capture + +Gather: +- What should this skill do? +- When should it trigger? +- What tools does it need? +- What's the expected output? + +### 2. Draft the SKILL.md + +Use the standard structure: + +```yaml +--- +name: ctx-{name} +description: "..." +tools: [bash, read, write, ...] +--- +``` + +Sections: When to Use, When NOT to Use, Process, Quality Checklist. + +### 3. Validate + +Check against skill audit dimensions: +- Positive framing +- Clear scope +- Good examples +- No phantom references +- Overtriggering guard + +### 4. Test + +If possible, do a dry run of the skill's workflow to verify +it works end-to-end. + +### 5. Deploy + +Write the file to the appropriate skills directory: +- Claude: `internal/assets/claude/skills/{name}/SKILL.md` +- Copilot CLI: `internal/assets/integrations/copilot-cli/skills/{name}/SKILL.md` + +### 6. Build + +Run `go build ./cmd/ctx/...` to verify the embed compiles. + +## Quality Checklist + +- [ ] Frontmatter is complete (name, description, tools) +- [ ] When to Use / When NOT to Use sections exist +- [ ] Process has numbered, actionable steps +- [ ] Quality Checklist at the end +- [ ] No phantom references +- [ ] Build passes with new skill embedded diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-spec/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-spec/SKILL.md new file mode 100644 index 000000000..d5fb8cadc --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-spec/SKILL.md @@ -0,0 +1,109 @@ +--- +name: ctx-spec +description: "Scaffold a feature spec from the project template. Use when planning a new feature, writing a design document, or when a task references a missing spec." +--- + +Scaffold a new spec from `specs/tpl/spec-template.md` and walk through +each section with the user to produce a complete design document. + +## When to Use + +- Before implementing a non-trivial feature +- When a task says "Spec: `specs/X.md`" and the file does not exist +- When `/ctx-brainstorm` has produced a validated design that needs + a written artifact +- When the user says "let's spec this out" or "write a spec for..." + +## When NOT to Use + +- Bug fixes or small changes (just do them) +- When a spec already exists (read it instead) +- When the design is still vague (use `/ctx-brainstorm` first) + +## Usage Examples + +```text +/ctx-spec +/ctx-spec (session checkpointing) +/ctx-spec (rss feed generation) +``` + +## Process + +### 1. Gather the Feature Name + +If not provided as an argument, ask: +> "What feature should this spec cover?" + +Derive the filename: lowercase, hyphens, no spaces. +Target path: `specs/{feature-name}.md` + +If the file already exists, warn and offer to review it instead. + +### 2. Read the Template + +Read `specs/tpl/spec-template.md` to get the current structure. + +### 3. Walk Through Sections + +Work through each section **one at a time**. For each section: + +1. Explain what belongs there (one sentence) +2. Ask the user for input or propose content based on context +3. Write their answer into the section +4. Move to the next section + +**Section order and prompts:** + +| Section | Prompt | +|----------------------|----------------------------------------------------------------------------------------------------| +| **Problem** | "What user-visible problem does this solve? Why now?" | +| **Approach** | "High-level: how does this work? Where does it fit?" | +| **Happy Path** | "Walk me through what happens when everything goes right." | +| **Edge Cases** | "What could go wrong? Think: empty input, partial failure, duplicates, concurrency, missing deps." | +| **Validation Rules** | "What input constraints are enforced? Where?" | +| **Error Handling** | "For each error condition: what message does the user see? How do they recover?" | +| **Interface** | "CLI command? Skill? Both? What flags?" | +| **Implementation** | "Which files change? Key functions? Existing helpers to reuse?" | +| **Configuration** | "Any .ctxrc keys, env vars, or settings?" | +| **Testing** | "Unit, integration, edge case tests?" | +| **Non-Goals** | "What does this intentionally NOT do?" | + +**Spend extra time on Edge Cases and Error Handling.** These are +where specs earn their value. Push for at least 3 edge cases and +their expected behaviors. Do not accept "none" without challenge. + +### 4. Open Questions + +After all sections, ask: +> "Anything unresolved? If not, I'll remove the Open Questions +> section." + +### 5. Write the Spec + +Write the completed spec to `specs/{feature-name}.md`. + +### 6. Cross-Reference + +- If a Phase exists in TASKS.md referencing this spec, confirm + the path matches +- If no tasks exist yet, offer to create them: + > "Want me to break this into tasks in TASKS.md?" + +## Skipping Sections + +Not every spec needs every section. If a section clearly does not +apply (e.g., no CLI for an internal refactor), the user can say +"skip" and the section is omitted entirely: not left with +placeholder text. + +## Quality Checklist + +Before writing the file, verify: + +- [ ] Problem section explains *why*, not just *what* +- [ ] At least 3 edge cases enumerated with expected behavior +- [ ] Error handling has user-facing messages and recovery steps +- [ ] Non-goals are explicit (prevents scope creep later) +- [ ] No placeholder `...` text remains +- [ ] Filename matches the convention: `specs/{feature-name}.md` diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-verify/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-verify/SKILL.md new file mode 100644 index 000000000..c0101db64 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-verify/SKILL.md @@ -0,0 +1,52 @@ +--- +name: ctx-verify +description: "Verify before claiming completion. Use before saying work is done, tests pass, or builds succeed." +tools: [bash, read, glob, grep] +--- + +Run the relevant verification command before claiming a result. + +## When to Use + +- Before saying "tests pass", "build succeeds", or "bug fixed" +- Before reporting completion of any task with a testable outcome +- When the user asks "does it work?" or "is it done?" + +## When NOT to Use + +- For documentation-only changes with no testable outcome +- When the user explicitly says "skip verification" +- For exploratory work with no pass/fail criterion + +## Workflow + +1. **Identify** what command proves the claim +2. **Think through** what passing looks like (and false positives) +3. **Run** the command (fresh, not a previous run) +4. **Read** full output; check exit code, count failures +5. **Report** actual results with evidence + +## Claim-to-Evidence Map + +| Claim | Required Evidence | +|-------------------|--------------------------------------------| +| Tests pass | Test command output showing 0 failures | +| Linter clean | `golangci-lint run` showing 0 errors | +| Build succeeds | `go build` exit 0 | +| Bug fixed | Original symptom no longer reproduces | +| All checks pass | `make audit` showing all steps pass | + +## Self-Audit Questions + +Before presenting any artifact as complete: +- What assumptions did I make? +- What did I NOT check? +- Where am I least confident? +- What would a reviewer question first? + +## Quality Checklist + +- [ ] Verification command was run fresh (not reused) +- [ ] Exit code was checked +- [ ] Claim matches evidence (build ≠ tests) +- [ ] If multiple claims, each has its own evidence diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-worktree/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-worktree/SKILL.md new file mode 100644 index 000000000..87db0f567 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-worktree/SKILL.md @@ -0,0 +1,170 @@ +--- +name: ctx-worktree +description: "Manage git worktrees for parallel agent development. Use when splitting work across independent task tracks." +--- + +Manage git worktrees to parallelize agent work across independent +task tracks. Supports creating, listing, and tearing down worktrees +with ctx-aware guardrails. + +## When to Use + +- User wants to parallelize a backlog across multiple agents +- Multiple independent task tracks with non-overlapping files +- User says "create worktree", "let's parallelize", "split the work" +- 3+ independent tasks that can be worked concurrently + +## When NOT to Use + +- Single task or tightly coupled tasks +- Tasks that touch overlapping files (high merge conflict risk) +- Fewer than 3 independent tasks (overhead exceeds benefit) +- Already inside a worktree (manage from the main checkout only) +- User just wants concurrent Claude Code sessions in the same tree + +## Operations + +### `create ` + +Create a new worktree as a sibling directory with a `work/` branch. + +**Process:** + +1. **Check count**: refuse if 4 worktrees already exist: + ```bash + git worktree list + ``` + Count lines. If >= 5 (1 main + 4 worktrees), stop and explain + the limit. + +2. **Determine project name** from the current directory basename: + ```bash + basename "$(git rev-parse --show-toplevel)" + ``` + +3. **Create the worktree** as a sibling directory: + ```bash + git worktree add "../-" -b "work/" + ``` + +4. **Verify** the worktree was created: + ```bash + ls "../-" + ``` + +5. **Remind the user**: + > Do NOT run `ctx init` in the worktree. The context + > directory is already tracked in git and will be present. + > Launch a separate Claude Code session there and work + > normally. + +### `list` + +Show all active worktrees: + +```bash +git worktree list +``` + +### `teardown ` + +Merge a completed worktree back and clean up. + +**Process:** + +1. **Check for uncommitted changes** in the worktree: + ```bash + git -C "../-" status --porcelain + ``` + If output is non-empty, warn and stop. The user must commit or + discard changes first. + +2. **Merge the work branch** into the current branch: + ```bash + git merge "work/" + ``` + If there are conflicts, stop and help the user resolve them. + TASKS.md conflicts are common: see guidance below. + +3. **Remove the worktree**: + ```bash + git worktree remove "../-" + ``` + +4. **Delete the branch**: + ```bash + git branch -d "work/" + ``` + +5. **Verify** cleanup: + ```bash + git worktree list + git branch | grep "work/" + ``` + +## Guardrails + +- **Max 4 worktrees**: more than 4 parallel tracks makes merge + complexity outweigh productivity gains +- **Sibling directories only**: worktrees go in `../-`, + never inside the project tree +- **`work/` branch prefix**: all worktree branches use `work/` + for easy identification and cleanup +- **No `ctx init` in worktrees**: the context directory is tracked + in git; running init would overwrite shared context files +- **Manage from main checkout only**: create and teardown worktrees + from the main working tree, not from inside a worktree +- **TASKS.md conflict resolution**: when merging, TASKS.md will + often conflict because multiple agents marked different tasks as + complete. Resolution: accept all `[x]` completions from both sides. + No task should go from `[x]` back to `[ ]`. + +## What Works Differently in Worktrees + +The encryption key lives at `~/.ctx/.ctx.key` (user-level, outside +the project). All worktrees on the same machine share this path, so +**`ctx pad` and `ctx hook notify` work in worktrees automatically**. + +One thing to watch: + +- **Journal enrichment**: `ctx journal import` and journal enrichment + resolve paths relative to the current working directory. Files + created in a worktree stay in that worktree and are discarded on + teardown. Enrich journals on the main branch after merging: the + JSONL session logs are intact regardless. + +## Task Grouping Guidance + +Before creating worktrees, analyze the backlog to group tasks into +non-overlapping tracks: + +1. **Read TASKS.md** and identify all pending tasks +2. **Estimate blast radius**: which files/directories does each + task touch? +3. **Group by non-overlapping directories**: tasks that touch the + same package or file must go in the same track +4. **Present the grouping** to the user before creating worktrees: + +```text +Proposed worktree groups: + + work/docs : recipe updates, blog post, getting started guide + (touches: docs/) + work/crypto : P3.1-P3.3 encrypted scratchpad infra + (touches: internal/crypto/, internal/config/) + work/pad-cli : P3.4-P3.9 pad CLI commands + (touches: internal/cli/pad/) +``` + +Let the user approve or adjust before proceeding. + +## Quality Checklist + +Before any operation, verify: +- [ ] Worktree count checked (max 4) +- [ ] Branch uses `work/` prefix +- [ ] Worktree is a sibling directory (`../`) +- [ ] User reminded not to run `ctx init` in worktree +- [ ] Uncommitted changes checked before teardown +- [ ] Merge completed before worktree removal +- [ ] Branch deleted after worktree removal diff --git a/internal/assets/integrations/copilot-cli/skills/ctx-wrap-up/SKILL.md b/internal/assets/integrations/copilot-cli/skills/ctx-wrap-up/SKILL.md new file mode 100644 index 000000000..6c9a21a51 --- /dev/null +++ b/internal/assets/integrations/copilot-cli/skills/ctx-wrap-up/SKILL.md @@ -0,0 +1,180 @@ +--- +name: ctx-wrap-up +description: "End-of-session context persistence ceremony. Use when wrapping up a session to capture learnings, decisions, conventions, and tasks." +--- + +Guide end-of-session context persistence. Gather signal from the +session, propose candidates worth persisting, and persist approved +items via `ctx add`. + +This is a **ceremony skill**: invoke it explicitly as `/ctx-wrap-up` +at session end, not conversationally. It pairs with `/ctx-remember` +at session start. + +## Before Starting + +Check that the context directory exists. If it does not, tell the user: +"No context directory found. Run `ctx init` to set up context +tracking, then there will be something to wrap up." + +## When to Use + +- At the end of a session, before the user quits +- When the user says "let's wrap up", "save context", "end of + session" +- When the `check-persistence` hook suggests it + +## When NOT to Use + +- Nothing meaningful happened (only read files, quick lookup) +- The user already persisted everything manually with `ctx add` +- Mid-session when the user is still in flow: use `/ctx-reflect` + instead for mid-session checkpoints + +## Process + +### Phase 1: Gather signal + +Do this **silently**: do not narrate the steps: + +1. Check what changed in the working tree: + ```bash + git diff --stat + ``` +2. Check commits made this session: + ```bash + git log --oneline @{upstream}..HEAD 2>/dev/null || git log --oneline -5 + ``` +3. Scan the conversation history for: + - Architectural choices or design trade-offs discussed + - Gotchas, bugs, or unexpected behavior encountered + - Patterns established or conventions agreed upon + - Follow-up work identified but not yet started + - Tasks completed or progressed + +### Phase 2: Propose candidates + +Think step-by-step about what is worth persisting. For each +potential candidate, ask yourself: +- Is this project-specific or general knowledge? (Only persist + project-specific insights) +- Would a future session benefit from knowing this? +- Is this already captured in the context files? +- Is this substantial enough to record, or is it trivial? + +Present candidates in a structured list, grouped by type. +Skip categories with no candidates: do not show empty sections. + +``` +## Session Wrap-Up + +### Learnings (N candidates) +1. **Title of learning** + - Context: What prompted this + - Lesson: The key insight + - Application: How to apply it going forward + +### Decisions (N candidates) +1. **Title of decision** + - Context: What prompted this + - Rationale: Why this choice + - Consequence: What changes as a result + +### Conventions (N candidates) +1. **Convention description** + +### Tasks (N candidates) +1. **Task description** (new | completed | updated) + +Persist all? Or select which to keep? +``` + +### Phase 3: Persist approved candidates + +Wait for the user to approve, select, or modify candidates. +Wait for the user to approve each item before persisting: +candidates proposed by the agent may be incomplete or +mischaracterized, and the user is the final authority on what +belongs in their context. + +For each approved candidate, run the appropriate command: + +| Type | Command | +|-------------|--------------------------------------------------------------------------------------------------------------------------------| +| Learning | `ctx add learning "Title" --session-id ID --branch BR --commit HASH --context "..." --lesson "..." --application "..."` | +| Decision | `ctx add decision "Title" --session-id ID --branch BR --commit HASH --context "..." --rationale "..." --consequence "..."` | +| Convention | `ctx add convention "Description"` | +| Task (new) | `ctx add task "Description" --session-id ID --branch BR --commit HASH` | +| Task (done) | Edit TASKS.md to mark complete | + +Report the result of each command. If any fail, report the error +and continue with the remaining items. + +### Phase 3.5: Suppress post-wrap-up nudges + +After persisting, mark the session as wrapped up so checkpoint +nudges are suppressed for the remainder of the session: + +```bash +ctx system mark-wrapped-up +``` + +### Phase 4: Commit (optional) + +After persisting, check for uncommitted changes: + +```bash +git status --short +``` + +If there are uncommitted changes, offer: + +> There are uncommitted changes. Want me to run `/ctx-commit` +> to commit with context capture? + +Do not auto-commit. The user decides. + +## Candidate Quality Guide + +### Good candidates + +- "PyMdownx `details` extension wraps content in `
` + tags, breaking `
` rendering in MkDocs": specific
+  gotcha, actionable for future sessions
+- "Decision: use file-based cooldown tokens instead of env vars
+  because hooks run in subprocesses": real trade-off with
+  rationale
+- "Convention: all skill descriptions use imperative mood":
+  codifies a pattern for consistency
+
+### Weak candidates (do not propose)
+
+- "Go has good error handling": general knowledge, not
+  project-specific
+- "We edited main.go": obvious from the diff, not an insight
+- "Tests should pass before committing": too generic to be
+  useful
+- Anything already present in LEARNINGS.md or DECISIONS.md
+
+## Relationship to /ctx-reflect
+
+`/ctx-reflect` is for mid-session checkpoints at natural
+breakpoints. `/ctx-wrap-up` is for end-of-session: it's more
+thorough, covers the full session arc, and includes the commit
+offer. If the user already ran `/ctx-reflect` recently, avoid
+proposing the same candidates again.
+
+## Quality Checklist
+
+Before presenting candidates, verify:
+- [ ] Signal was gathered (git diff, git log, conversation scan)
+- [ ] Every candidate has complete fields (not just a title)
+- [ ] Candidates are project-specific, not general knowledge
+- [ ] No duplicates with existing context files
+- [ ] Empty categories are omitted, not shown as "(none)"
+- [ ] User is asked before anything is persisted
+
+After persisting, verify:
+- [ ] Each `ctx add` command succeeded
+- [ ] Uncommitted changes were surfaced (if any)
+- [ ] User was offered `/ctx-commit` (if applicable)
diff --git a/internal/assets/integrations/copilot/copilot-instructions.md b/internal/assets/integrations/copilot/copilot-instructions.md
index 0caf71efb..0d63db10b 100644
--- a/internal/assets/integrations/copilot/copilot-instructions.md
+++ b/internal/assets/integrations/copilot/copilot-instructions.md
@@ -126,7 +126,7 @@ validation, session tracking, and boundary checks automatically.
 **Rule**: Do NOT run `ctx` in the terminal when the equivalent MCP tool
 exists. MCP tools enforce boundary validation and track session state.
 Terminal fallback is only for commands without an MCP equivalent (e.g.,
-`ctx agent`, `ctx recall list`).
+`ctx agent`, `ctx journal source`).
 
 ## Governance: When to Call Tools
 
@@ -143,17 +143,17 @@ responses when governance actions are overdue. Follow this protocol:
 ### During Work
 
 - **After making a decision or discovering a gotcha**: call `ctx_add()`
-  to persist it immediately — not at session end.
+  to persist it immediately, not at session end.
 - **After completing a task**: call `ctx_complete()` or
   `ctx_check_task_completion()`.
-- **Every 10–15 tool calls or 15 minutes**: call `ctx_drift()` to
+- **Every 10-15 tool calls or 15 minutes**: call `ctx_drift()` to
   check for stale context.
 - **Before git commit**: call `ctx_status()` to verify context health.
 
 ### Responding to Warnings
 
 When a tool response contains a `⚠` warning, act on it in your next
-action. Do not ignore governance warnings — they indicate context
+action. Do not ignore governance warnings; they indicate context
 hygiene actions that are overdue.
 
 When a tool response contains a `🚨 CRITICAL` warning, **stop current
@@ -169,7 +169,7 @@ real time. The following actions are flagged as violations:
 
 - **Dangerous commands**: `sudo`, `rm -rf /`, `git push`, `git reset
   --hard`, `curl`, `wget`, `chmod 777`
-- **hack/ scripts**: Direct execution of `hack/*.sh` — use `make`
+- **hack/ scripts**: Direct execution of `hack/*.sh`; use `make`
   targets instead
 - **Sensitive files**: Editing `.env`, `.pem`, `.key`, or files
   matching `credentials` or `secret`
diff --git a/internal/assets/read/agent/agent.go b/internal/assets/read/agent/agent.go
index 2122220c7..6259f2f75 100644
--- a/internal/assets/read/agent/agent.go
+++ b/internal/assets/read/agent/agent.go
@@ -4,8 +4,6 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package agent provides access to agent integration files embedded
-// in the assets filesystem.
 package agent
 
 import (
diff --git a/internal/assets/read/agent/doc.go b/internal/assets/read/agent/doc.go
index bcbc655bb..81c944a8b 100644
--- a/internal/assets/read/agent/doc.go
+++ b/internal/assets/read/agent/doc.go
@@ -4,9 +4,33 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package agent provides access to agent integration files
-// embedded in the assets filesystem.
+// Package agent provides access to agent integration
+// files embedded in the assets filesystem.
 //
-// [CopilotInstructions] returns the embedded copilot-instructions.md
-// template, deployed by ctx init for GitHub Copilot integration.
+// # Copilot Integration
+//
+// CopilotInstructions returns the embedded
+// copilot-instructions.md template deployed by
+// ctx init for GitHub Copilot integration.
+//
+// CopilotCLIHooksJSON returns the hooks config JSON
+// for the Copilot CLI integration.
+//
+// CopilotCLIScripts returns a map of filename to
+// content for all embedded hook scripts in the
+// Copilot CLI scripts directory.
+//
+// CopilotCLISkills returns a map of skill name to
+// SKILL.md content for embedded Copilot CLI skills.
+//
+// # GitHub Agents
+//
+// AgentsMd returns the AGENTS.md template for
+// repository-level agent configuration.
+//
+// AgentsCtxMd returns the .github/agents/ctx.md
+// template for the ctx agent definition.
+//
+// InstructionsCtxMd returns the path-specific
+// instructions template for the context directory.
 package agent
diff --git a/internal/assets/read/catalog/doc.go b/internal/assets/read/catalog/doc.go
index d7d8e8ea3..d5c1c8f24 100644
--- a/internal/assets/read/catalog/doc.go
+++ b/internal/assets/read/catalog/doc.go
@@ -4,11 +4,26 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package catalog lists available context template files from the
-// embedded assets.
+// Package catalog lists available context template
+// files from the embedded assets.
 //
-// [List] returns the names of all .context/ template files
-// (TASKS.md, DECISIONS.md, etc.) available for deployment by
-// ctx init. The list is derived from the embedded filesystem
-// at compile time.
+// # Listing Templates
+//
+// List returns the names of all .context/ template
+// files (TASKS.md, DECISIONS.md, CONVENTIONS.md, etc.)
+// available for deployment by ctx init. The list is
+// derived from the embedded filesystem at compile
+// time by reading the context/ asset directory.
+//
+//	names, err := catalog.List()
+//	for _, name := range names {
+//	    fmt.Println(name) // "TASKS.md", etc.
+//	}
+//
+// # Usage
+//
+// The init command uses this list to determine which
+// template files to deploy into a new .context/
+// directory. Each name corresponds to a file that
+// can be read via the template package.
 package catalog
diff --git a/internal/assets/read/claude/claude.go b/internal/assets/read/claude/claude.go
index 147bb44ed..68a54cb7c 100644
--- a/internal/assets/read/claude/claude.go
+++ b/internal/assets/read/claude/claude.go
@@ -4,8 +4,6 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package claude provides access to Claude Code integration files
-// embedded in the assets filesystem.
 package claude
 
 import (
diff --git a/internal/assets/read/claude/doc.go b/internal/assets/read/claude/doc.go
index f347cd996..a845337d1 100644
--- a/internal/assets/read/claude/doc.go
+++ b/internal/assets/read/claude/doc.go
@@ -4,10 +4,32 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package claude provides access to Claude Code integration files
-// embedded in the assets filesystem.
+// Package claude provides access to Claude Code
+// integration files embedded in the assets filesystem.
 //
-// [Md] returns the embedded CLAUDE.md template for project-level
-// instructions. [PluginVersion] extracts the semver string from
-// the embedded plugin.json manifest.
+// # CLAUDE.md Template
+//
+// Md returns the embedded CLAUDE.md template for
+// project-level Claude Code instructions. This file
+// is deployed to the project root during ctx init,
+// separate from the .context/ template files.
+//
+//	content, err := claude.Md()
+//
+// # Plugin Version
+//
+// PluginVersion extracts the semver version string
+// from the embedded plugin.json manifest. This is
+// used to report the ctx plugin version without
+// parsing the manifest at the call site.
+//
+//	version, err := claude.PluginVersion()
+//	// => "0.8.1"
+//
+// # Plugin Manifest
+//
+// The plugin.json file follows the Claude Code plugin
+// specification and declares the ctx plugin identity,
+// version, and capabilities. It is embedded alongside
+// CLAUDE.md under the claude/ asset directory.
 package claude
diff --git a/internal/assets/read/desc/desc.go b/internal/assets/read/desc/desc.go
index ff6c7a8f7..105d84e68 100644
--- a/internal/assets/read/desc/desc.go
+++ b/internal/assets/read/desc/desc.go
@@ -29,7 +29,7 @@ func Command(key string) (short, long string) {
 
 // Flag returns the description for a flag.
 //
-// Keys use dot notation: "add.file", "context-dir".
+// Keys use dot notation: "add.file", "compact.archive".
 // Returns an empty string if the key is not found.
 //
 // Parameters:
diff --git a/internal/assets/read/desc/doc.go b/internal/assets/read/desc/doc.go
index 89d6c0fad..7860ba1f1 100644
--- a/internal/assets/read/desc/doc.go
+++ b/internal/assets/read/desc/doc.go
@@ -4,12 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package desc provides command, flag, text, and example description
-// lookups backed by embedded YAML.
-//
-// All user-facing strings are externalized to YAML files loaded at
-// init time via [lookup.Init]. The four accessors — [Command],
-// [Flag], [Example], and [Text] — resolve DescKey constants to
-// their localized values. Missing keys return the key itself as
-// a fallback, making gaps visible without crashing.
+// Package desc provides command, flag, text, and
+// example description lookups backed by embedded YAML.
+//
+// All user-facing strings are externalized to YAML
+// files loaded at init time via lookup.Init. The four
+// accessors resolve DescKey constants to their
+// localized values. Missing keys return the key
+// itself as a fallback, making gaps visible without
+// crashing.
+//
+// # Command Descriptions
+//
+// Command returns Short and Long descriptions for a
+// CLI command by dot-notation key.
+//
+//	short, long := desc.Command("pad.show")
+//
+// # Flag Descriptions
+//
+// Flag returns the description for a CLI flag by
+// dot-notation key.
+//
+//	d := desc.Flag("add.file")
+//
+// # Example Text
+//
+// Example returns usage example text for an entry
+// type key (decision, learning, task, convention).
+//
+//	ex := desc.Example("decision")
+//
+// # General Text
+//
+// Text returns a user-facing text string by
+// dot-notation key. This is used throughout ctx for
+// error messages, labels, and formatted output.
+//
+//	msg := desc.Text("backup.run-hint")
 package desc
diff --git a/internal/assets/read/entry/doc.go b/internal/assets/read/entry/doc.go
index e471fa8b1..79521ab63 100644
--- a/internal/assets/read/entry/doc.go
+++ b/internal/assets/read/entry/doc.go
@@ -4,11 +4,34 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package entry provides access to entry template files embedded in
-// the assets filesystem.
+// Package entry provides access to entry template
+// files embedded in the assets filesystem.
 //
-// Entry templates are Markdown scaffolds used when adding new
-// decisions, learnings, tasks, or conventions via ctx add.
-// [List] returns available template names and [ForName] reads
-// a specific template by name.
+// Entry templates are Markdown scaffolds used when
+// adding new decisions, learnings, tasks, or
+// conventions via ctx add. Each template defines the
+// structure and required fields for its entry type.
+//
+// # Listing Templates
+//
+// List returns the file names of all available entry
+// templates from the entry-templates/ asset directory.
+//
+//	names, err := entry.List()
+//	// => ["decision.md", "learning.md", ...]
+//
+// # Reading Templates
+//
+// ForName reads a specific template by filename. The
+// returned bytes contain the Markdown scaffold ready
+// for field substitution.
+//
+//	content, err := entry.ForName("decision.md")
+//
+// # Usage
+//
+// The add command reads the appropriate template,
+// substitutes user-provided values into the scaffold,
+// and appends the formatted entry to the matching
+// context file.
 package entry
diff --git a/internal/assets/read/hook/doc.go b/internal/assets/read/hook/doc.go
index fe3c9c664..c8eff3815 100644
--- a/internal/assets/read/hook/doc.go
+++ b/internal/assets/read/hook/doc.go
@@ -4,10 +4,34 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package hook provides access to hook message templates and the
-// hook registry from embedded assets.
+// Package hook provides access to hook message
+// templates and the hook registry from embedded
+// assets.
 //
-// [Message] reads a specific template file by hook name and
-// filename. [MessageRegistry] returns the raw registry.yaml.
-// [TraceScript] reads an embedded trace git hook script.
+// # Message Templates
+//
+// Message reads a specific template file by hook
+// name and filename. Templates are stored under
+// hooks/messages// in the embedded
+// filesystem.
+//
+//	data, err := hook.Message("qa-reminder", "gate.txt")
+//
+// # Registry
+//
+// MessageRegistry returns the raw registry.yaml that
+// describes all hook message templates, their
+// categories, and template variables. The registry is
+// parsed by the hooks/messages package for structured
+// access.
+//
+//	raw, err := hook.MessageRegistry()
+//
+// # Trace Scripts
+//
+// TraceScript reads an embedded git hook script by
+// filename. These scripts are installed into
+// .git/hooks/ by ctx init to enable commit tracing.
+//
+//	script, err := hook.TraceScript("prepare-commit-msg.sh")
 package hook
diff --git a/internal/assets/read/journal/doc.go b/internal/assets/read/journal/doc.go
index 594e6331e..8bfc33058 100644
--- a/internal/assets/read/journal/doc.go
+++ b/internal/assets/read/journal/doc.go
@@ -4,9 +4,24 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package journal provides access to journal site assets such as
-// extra CSS embedded in the binary.
+// Package journal provides access to journal site
+// assets embedded in the binary.
 //
-// [ExtraCSS] returns the CSS content injected into the generated
-// journal site for styling beyond what zensical provides by default.
+// # Extra CSS
+//
+// ExtraCSS returns the CSS content injected into the
+// generated journal site for styling beyond what the
+// zensical static site generator provides by default.
+// This CSS customizes the appearance of session
+// entries, timestamps, and other journal-specific
+// elements.
+//
+//	css, err := journal.ExtraCSS()
+//
+// # Journal Site Generation
+//
+// The journal site is built by the zensical package
+// from Markdown session entries. The extra CSS is
+// written alongside the generated site to override
+// default theme styles.
 package journal
diff --git a/internal/assets/read/lookup/doc.go b/internal/assets/read/lookup/doc.go
index c6de44ccb..79faa0049 100644
--- a/internal/assets/read/lookup/doc.go
+++ b/internal/assets/read/lookup/doc.go
@@ -1,16 +1,55 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package lookup owns the YAML description maps and eager
-// initialization for all embedded text lookups.
+// Package lookup is the **eager-init in-memory cache** for
+// every embedded YAML asset ctx ships: command-help text,
+// flag-help text, prompt templates, examples, stop-word
+// lists, glob patterns, default permission lists.
 //
-// [Init] loads all YAML files (commands, flags, text, examples)
-// into in-memory maps. [TextDesc] resolves a text DescKey.
-// [StopWords] returns the stop word set for relevance scoring.
-// [ConfigPatterns] returns glob patterns for config file detection.
-// [PermAllowListDefault] and [PermDenyListDefault] return the
-// default permission lists for Claude Code settings.
+// The package is what sits between
+// [internal/assets/read/desc] (the typed lookup helpers
+// every CLI command calls) and the embedded YAML files
+// (slow to parse on the hot path). Loading once at process
+// start trades a few milliseconds at boot for fast lookups
+// every time a hook fires.
+//
+// # Public Surface
+//
+//   - **[Init]**: loads every embedded YAML map into
+//     memory. Called exactly once from `main()` before
+//     the CLI starts dispatching. Idempotent: repeat
+//     calls are noops (the [sync.Once] guard short-
+//     circuits).
+//   - **[TextDesc](key)**: resolves a
+//     [internal/config/embed/text].DescKey to its
+//     rendered string.
+//   - **[StopWords]**: returns the embedded English
+//     stop-word set used by
+//     [internal/cli/agent/core/score] for relevance
+//     scoring.
+//   - **[ConfigPatterns]**: returns the embedded glob
+//     pattern list used to detect "this file is a
+//     config" in drift checks and skill heuristics.
+//   - **[PermAllowListDefault]** /
+//     **[PermDenyListDefault]**: return the default
+//     allow/deny entries for Claude Code permissions
+//     (used by `ctx init` and the
+//     `_ctx-permission-sanitize` skill).
+//
+// # Why Eager Loading
+//
+// Lazy parsing per call would dominate the time budget
+// for fast-fire hooks (some run on every tool call).
+// One up-front parse means the per-call cost is just
+// a map lookup. The maps are read-only after [Init];
+// concurrent readers never race.
+//
+// # Concurrency
+//
+// All readers are safe for concurrent use after [Init]
+// returns. The single-init guard ensures no race
+// between concurrent first-callers.
 package lookup
diff --git a/internal/assets/read/makefile/doc.go b/internal/assets/read/makefile/doc.go
index c8c7556e3..00172e434 100644
--- a/internal/assets/read/makefile/doc.go
+++ b/internal/assets/read/makefile/doc.go
@@ -4,10 +4,23 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package makefile provides access to the embedded Makefile.ctx
-// template.
+// Package makefile provides access to the embedded
+// Makefile.ctx template.
 //
-// [Ctx] returns the Makefile.ctx content deployed by ctx init,
-// providing common make targets (build, test, lint, audit) for
-// projects using ctx.
+// # Template
+//
+// Ctx returns the Makefile.ctx content deployed by
+// ctx init. This file provides common make targets
+// (build, test, lint, audit) for projects using ctx.
+// It is designed to be included from the project's
+// main Makefile via an include directive.
+//
+//	content, err := makefile.Ctx()
+//
+// # Deployment
+//
+// During ctx init, the Makefile.ctx template is
+// written to the project root alongside the .context/
+// directory. Projects can include it from their main
+// Makefile to inherit standard ctx targets.
 package makefile
diff --git a/internal/assets/read/philosophy/doc.go b/internal/assets/read/philosophy/doc.go
index 2acff44f0..4b9fb7ecb 100644
--- a/internal/assets/read/philosophy/doc.go
+++ b/internal/assets/read/philosophy/doc.go
@@ -4,10 +4,24 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package philosophy provides access to embedded why-documents
-// (manifesto, about, design invariants).
+// Package philosophy provides access to embedded
+// why-documents (manifesto, about, design invariants).
 //
-// [WhyDoc] reads a philosophy document by name. These are the
-// source files for the ctx why interactive reader, synced from
-// docs/ into the binary via make sync-why.
+// # Reading Documents
+//
+// WhyDoc reads a philosophy document by name from the
+// why/ directory in the embedded filesystem. The .md
+// extension is appended automatically.
+//
+//	doc, err := philosophy.WhyDoc("manifesto")
+//	doc, err := philosophy.WhyDoc("about")
+//	doc, err := philosophy.WhyDoc("design-invariants")
+//
+// # Purpose
+//
+// These documents are the source files for the
+// "ctx why" interactive reader. They explain the
+// rationale, design philosophy, and invariants behind
+// ctx. The documents are synced from docs/ into the
+// binary via make sync-why during the build process.
 package philosophy
diff --git a/internal/assets/read/project/doc.go b/internal/assets/read/project/doc.go
index e200f17eb..e934abf0b 100644
--- a/internal/assets/read/project/doc.go
+++ b/internal/assets/read/project/doc.go
@@ -4,9 +4,24 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package project provides access to project-root files and
-// directory README templates from embedded assets.
+// Package project provides access to project-root
+// files and directory README templates from embedded
+// assets.
 //
-// [Readme] reads the README template for a specific
-// subdirectory (e.g. specs/README.md).
+// # README Templates
+//
+// Readme reads a directory-specific README template
+// by directory name. Templates are stored as
+// project/-README.md in the embedded filesystem.
+//
+//	content, err := project.Readme("specs")
+//	content, err := project.Readme("ideas")
+//
+// # Deployment
+//
+// During ctx init, README templates are deployed into
+// project subdirectories (specs/, ideas/, etc.) to
+// provide guidance on how each directory should be
+// used. The directory name is sanitized via path.Base
+// before constructing the asset path.
 package project
diff --git a/internal/assets/read/schema/doc.go b/internal/assets/read/schema/doc.go
index c11d397d4..6b047e704 100644
--- a/internal/assets/read/schema/doc.go
+++ b/internal/assets/read/schema/doc.go
@@ -4,11 +4,29 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package schema provides access to the embedded JSON Schema for
-// .ctxrc validation.
+// Package schema provides access to the embedded JSON
+// Schema for .ctxrc validation.
 //
-// [Schema] returns the raw ctxrc.schema.json bytes. The schema
-// documents all configuration fields, their types, defaults, and
-// constraints. It is kept in sync with the CtxRC struct via
+// # Schema Access
+//
+// Schema returns the raw ctxrc.schema.json bytes from
+// the embedded filesystem. The schema documents all
+// configuration fields, their types, defaults, and
+// constraints for the .ctxrc configuration file.
+//
+//	data, err := schema.Schema()
+//
+// # Validation
+//
+// The returned JSON Schema can be used by editors and
+// tools to validate .ctxrc files. It covers all
+// fields in the CtxRC struct and is kept in sync via
 // TestSchemaCoversCtxRC.
+//
+// # Sync Guarantee
+//
+// The schema is tested against the CtxRC struct to
+// ensure every field is documented. If a new field is
+// added to the struct without updating the schema,
+// the test fails, preventing drift.
 package schema
diff --git a/internal/assets/read/skill/doc.go b/internal/assets/read/skill/doc.go
index a42e135d2..3e0202122 100644
--- a/internal/assets/read/skill/doc.go
+++ b/internal/assets/read/skill/doc.go
@@ -4,11 +4,36 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package skill provides access to embedded skill directories,
-// SKILL.md files, and reference documents.
+// Package skill provides access to embedded skill
+// directories, SKILL.md files, and reference
+// documents.
 //
-// [List] returns the names of all bundled skills deployed by
-// ctx init. [Content] reads a specific skill's SKILL.md file
-// by name. Skills are the primary agent instruction mechanism
-// in the ctx plugin.
+// # Listing Skills
+//
+// List returns the names of all bundled skill
+// directories deployed by ctx init. Each skill is a
+// directory containing a SKILL.md file following the
+// Agent Skills specification.
+//
+//	names, err := skill.List()
+//	// => ["ctx-status", "ctx-reflect", ...]
+//
+// # Reading Skill Content
+//
+// Content reads a specific skill's SKILL.md file by
+// directory name. The returned bytes contain the full
+// skill definition including triggers, instructions,
+// and examples.
+//
+//	data, err := skill.Content("ctx-status")
+//
+// # Skill Structure
+//
+// Each skill directory under claude/skills/ contains:
+//
+//   - SKILL.md: the skill definition file with
+//     frontmatter (name, triggers) and body
+//     (instructions, examples)
+//   - references/: optional reference documents
+//     that provide additional context
 package skill
diff --git a/internal/assets/read/template/doc.go b/internal/assets/read/template/doc.go
index c4975e662..b58cb6fc1 100644
--- a/internal/assets/read/template/doc.go
+++ b/internal/assets/read/template/doc.go
@@ -4,10 +4,30 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package template provides access to context template files
-// (TASKS.md, DECISIONS.md, etc.) from embedded assets.
+// Package template provides access to context
+// template files from embedded assets.
 //
-// [Template] reads a context file template by name. These templates
-// are stamped into .context/ by ctx init and include comment-header
-// guidance for when and how to update each file.
+// # Reading Templates
+//
+// Template reads a context file template by name from
+// the context/ asset directory. These templates are
+// stamped into .context/ by ctx init and include
+// comment-header guidance for when and how to update
+// each file.
+//
+//	content, err := template.Template("TASKS.md")
+//	content, err := template.Template("DECISIONS.md")
+//
+// # Available Templates
+//
+// The following context files have templates:
+//
+//   - TASKS.md: work items and progress tracking
+//   - DECISIONS.md: architectural decisions
+//   - CONVENTIONS.md: code patterns and standards
+//   - LEARNINGS.md: gotchas and lessons learned
+//   - CONSTITUTION.md: hard rules and invariants
+//   - ARCHITECTURE.md: system design overview
+//   - GLOSSARY.md: project-specific terminology
+//   - AGENT_PLAYBOOK.md: agent instruction guide
 package template
diff --git a/internal/assets/schema/ctxrc.schema.json b/internal/assets/schema/ctxrc.schema.json
index 7b191c44f..8e2dae1f3 100644
--- a/internal/assets/schema/ctxrc.schema.json
+++ b/internal/assets/schema/ctxrc.schema.json
@@ -10,10 +10,6 @@
       "type": "string",
       "description": "Active configuration profile name."
     },
-    "context_dir": {
-      "type": "string",
-      "description": "Name of the context directory. Default: .context."
-    },
     "token_budget": {
       "type": "integer",
       "description": "Default token budget for context assembly. Default: 8000.",
@@ -39,10 +35,6 @@
       "type": "boolean",
       "description": "Whether to encrypt the scratchpad. Default: true."
     },
-    "allow_outside_cwd": {
-      "type": "boolean",
-      "description": "Skip boundary validation for external context dirs. Default: false."
-    },
     "entry_count_learnings": {
       "type": "integer",
       "description": "Entry count threshold for LEARNINGS.md drift warning. Default: 30. 0 disables.",
diff --git a/internal/assets/tpl/README.md b/internal/assets/tpl/README.md
index f053a1b12..a287be6c5 100644
--- a/internal/assets/tpl/README.md
+++ b/internal/assets/tpl/README.md
@@ -21,9 +21,9 @@ Move it to a YAML text entry instead.
 | `tpl_entry.go`    | 15          | ctx add entry templates (decision, learning, convention, task) |
 | `tpl_journal.go`  | 26          | Journal markdown rendering |
 | `tpl_loop.go`     | 15          | Shell script generation for autonomous loops |
-| `tpl_obsidian.go` | 1           | Obsidian vault README (borderline — could migrate) |
+| `tpl_obsidian.go` | 1           | Obsidian vault README (borderline; could migrate) |
 | `tpl_recall.go`   | 21          | Recall output rendering |
-| `tpl_trigger.go`  | 2           | Trigger script scaffold (borderline — could migrate) |
+| `tpl_trigger.go`  | 2           | Trigger script scaffold (borderline; could migrate) |
 
 ## How they will be replaced
 
diff --git a/internal/assets/tpl/doc.go b/internal/assets/tpl/doc.go
index a6dd17a59..4125b1845 100644
--- a/internal/assets/tpl/doc.go
+++ b/internal/assets/tpl/doc.go
@@ -1,14 +1,62 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package tpl holds Sprintf-based format string constants that cannot
-// be expressed in the YAML text system (multi-line templates, structured
-// output with conditional sections).
+// Package tpl holds **Sprintf-based format string constants**
+// for output that is too structurally rich to live in the
+// flat YAML text store ([internal/config/embed/text]).
 //
-// Templates cover: Obsidian vault pages, loop script generation, recall
-// session formatting, journal entry scaffolding, and add entry headers.
-// See TASKS.md for the migration plan to Go text/template files.
+// The line ctx draws is: simple substitution → YAML; multi-
+// line templated content with conditional sections,
+// indentation rules, or per-call escaping → here.
+//
+// # What Lives Here
+//
+// Each `tpl_*.go` file owns one rendering domain:
+//
+//   - **`tpl_entry.go`**: the canonical TASKS.md task
+//     line and its inline tags (`#priority:`,
+//     `#session:`, `#branch:`, `#commit:`, `#added:`).
+//     Used by `ctx add task`.
+//   - **`tpl_hub_entry.go`**: markdown rendering of one
+//     hub entry (date header + origin tag + content
+//     body + horizontal rule). Consumed by
+//     `ctx connection sync` when materializing entries
+//     into `.context/hub/`.
+//   - **`tpl_journal.go`**: the journal entry skeleton:
+//     YAML frontmatter + body shell that the importer
+//     fills in.
+//   - **`tpl_loop.go`**: the autonomous-loop shell
+//     script template (`ctx loop` output).
+//   - **`tpl_obsidian.go`**: the Obsidian vault page
+//     templates (note frontmatter + wikilink section).
+//   - **`tpl_recall.go`**: the format the legacy
+//     `ctx recall` command used; kept here while the
+//     journal-merge transition completes.
+//   - **`tpl_trigger.go`**: the empty trigger script
+//     scaffold installed by `ctx trigger add`.
+//
+// # Naming Convention
+//
+// Constants are named for what they render, not how:
+// [HubEntryMarkdown], [Task], [TaskPriority], etc.
+// Each carries a doc comment listing the Sprintf args
+// in order so callers cannot accidentally pass the
+// wrong argument order.
+//
+// # Migration Note
+//
+// Several templates here are migration candidates for
+// Go `text/template`; Sprintf with many positional
+// arguments is fragile. The migration is tracked in
+// TASKS.md; until then, contributors should add new
+// templates here only when the YAML text store cannot
+// represent the structure.
+//
+// # Concurrency
+//
+// All exports are immutable string constants. Safe
+// for any access pattern.
 package tpl
diff --git a/internal/assets/tpl/tpl_hub_entry.go b/internal/assets/tpl/tpl_hub_entry.go
index 72fb00739..4972b1db8 100644
--- a/internal/assets/tpl/tpl_hub_entry.go
+++ b/internal/assets/tpl/tpl_hub_entry.go
@@ -8,7 +8,7 @@ package tpl
 
 // Hub entry markdown rendering template.
 const (
-	// TplEntryMarkdown formats a single hub entry as markdown
+	// HubEntryMarkdown formats a single hub entry as markdown
 	// with a date header, origin tag, and horizontal rule.
 	//
 	// Args (in order):
@@ -16,5 +16,5 @@ const (
 	//   - title: first line of content (used as heading)
 	//   - origin: entry origin identifier
 	//   - content: full entry content
-	TplEntryMarkdown = "## [%s] %s\n\n**Origin**: %s\n\n%s\n\n---\n\n"
+	HubEntryMarkdown = "## [%s] %s\n\n**Origin**: %s\n\n%s\n\n---\n\n"
 )
diff --git a/internal/assets/tpl/tpl_obsidian.go b/internal/assets/tpl/tpl_obsidian.go
index 65c6e165e..7b42a5e1c 100644
--- a/internal/assets/tpl/tpl_obsidian.go
+++ b/internal/assets/tpl/tpl_obsidian.go
@@ -15,7 +15,7 @@ package tpl
 // template rendering pipeline is implemented (see TASKS.md).
 const ObsidianReadme = `# journal-obsidian (generated)
 
-Generated by ` + "`ctx journal obsidian`" + ` — read-only.
+Generated by ` + "`ctx journal obsidian`" + `, read-only.
 Do not edit files here - changes will be overwritten on the next run.
 
 ## To update
diff --git a/internal/assets/tpl/tpl_trigger.go b/internal/assets/tpl/tpl_trigger.go
index 35b62ed51..47c3f9b1f 100644
--- a/internal/assets/tpl/tpl_trigger.go
+++ b/internal/assets/tpl/tpl_trigger.go
@@ -16,7 +16,7 @@ const (
 	//   - name: trigger script base name (without .sh)
 	//   - type: trigger type (e.g. pre-tool-use, session-start)
 	//
-	// The generated script has no executable bit — users
+	// The generated script has no executable bit; users
 	// must run `ctx trigger enable ` after review, so
 	// unreviewed code never fires on real events.
 	TriggerScript = `#!/usr/bin/env bash
diff --git a/internal/assets/why/design-invariants.md b/internal/assets/why/design-invariants.md
index c8a2d4cd4..9c23691aa 100644
--- a/internal/assets/why/design-invariants.md
+++ b/internal/assets/why/design-invariants.md
@@ -162,7 +162,7 @@ permanent assets.
 
 ---
 
-## 11. Policies Are Encoded, not Remembered
+## 11. Policies Are Encoded, Not Remembered
 
 Alignment **must not** depend on recall or goodwill.
 
diff --git a/internal/assets/why/manifesto.md b/internal/assets/why/manifesto.md
index 31fab1939..1b922530b 100644
--- a/internal/assets/why/manifesto.md
+++ b/internal/assets/why/manifesto.md
@@ -5,13 +5,13 @@
 #   \    Copyright 2026-present Context contributors.
 #                 SPDX-License-Identifier: Apache-2.0
 
-title: The ctx Manifesto
+title: Manifesto
 icon: lucide/flame
 ---
 
 ![ctx](images/ctx-banner.png)
 
-# `ctx` Manifesto
+# The `ctx` Manifesto
 
 **Creation, not code**.
 
@@ -101,7 +101,7 @@ Vision, goals, and direction are **human responsibilities**.
 
 **Nothing** critical should depend on recall.
 
-!!! danger "Oral Tradition Does not Scale"
+!!! danger "Oral Tradition Does Not Scale"
     If intent cannot be inspected, it cannot be enforced.
 
 ---
@@ -189,7 +189,7 @@ Memory heuristics **drift**.
 
 ## Verified Reality Is the Scoreboard
 
-!!! danger "Activity is a False Proxy"
+!!! danger "Activity Is a False Proxy"
     Output volume correlates *poorly* with impact.
 
     * *Code* is **not** progress.
@@ -238,7 +238,7 @@ We build to:
 
 ## Failures Are Assets
 
-!!! important "Failure Without Capture is Waste"
+!!! important "Failure without Capture Is Waste"
     **Pain** that does not teach is pure *loss*.
 
     **Failures** are *not* erased: They are **preserved**.
@@ -280,9 +280,9 @@ A repeated mistake is a missing `ctx` artifact.
 
 ---
 
-## Encode Intent Into the Environment
+## Encode Intent into the Environment
 
-!!! danger "Goodwill Does not Belong to the Table"
+!!! danger "Goodwill Does Not Belong to the Table"
     *Alignment* that depends on memory will **drift**.
 
     *Alignment* **cannot depend on** *memory* or *goodwill*.
@@ -364,7 +364,7 @@ Transparent `ctx` **compounds** understanding.
 
 ## Continuously Verify the System
 
-!!! warning "Stability is Temporary"
+!!! warning "Stability Is Temporary"
     Every assumption has a half-life:
 
     * Models drift.
@@ -387,7 +387,7 @@ Transparent `ctx` **compounds** understanding.
 
 ## `ctx` Is Leverage
 
-!!! note "Humans are Decision Engines"
+!!! note "Humans Are Decision Engines"
     *Execution* should **not** consume *judgment*.
 
     Humans **must not be** typists.
diff --git a/internal/audit/README.md b/internal/audit/README.md
index c19ce04fe..0a092aa86 100644
--- a/internal/audit/README.md
+++ b/internal/audit/README.md
@@ -40,4 +40,4 @@ directory.**
 
 If a test fails, the fix belongs in the code under test, not here.
 If you believe an exception is truly warranted, surface it to the
-user and let them decide — do not silently widen a check.
+user and let them decide; do not silently widen a check.
diff --git a/internal/audit/cmd_fprint_test.go b/internal/audit/cmd_fprint_test.go
new file mode 100644
index 000000000..751c46208
--- /dev/null
+++ b/internal/audit/cmd_fprint_test.go
@@ -0,0 +1,171 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// ================================================================
+// STOP — Read internal/audit/README.md before editing this file.
+//
+// These tests enforce project conventions. The codebase is clean:
+// all checks pass with zero violations, zero exceptions.
+//
+// If a test fails after your change, fix the code under test.
+// Do NOT add allowlist entries, bump grandfathered counters, or
+// weaken checks. Exceptions require a dedicated PR with
+// justification for every entry. See README.md for the full policy.
+// ================================================================
+
+package audit
+
+import (
+	"go/ast"
+	"strings"
+	"testing"
+)
+
+// fmtFprintMethods lists fmt.Fprint-family helpers that, when
+// pointed at a user-facing stream (cmd.OutOrStdout / cmd.OutOrStderr
+// / os.Stdout / os.Stderr), bypass the internal/write/ formatting
+// pipeline. The cmd_print and printf_calls tests catch the direct
+// `cmd.Print*(...)` form; this test closes the indirect form
+// `fmt.Fprint*(stream, ...)`.
+var fmtFprintMethods = map[string]bool{
+	"Fprint":   true,
+	"Fprintf":  true,
+	"Fprintln": true,
+}
+
+// TestNoFmtFprintToUserStream catches `fmt.Fprint*(stream, ...)`
+// calls where stream is a user-facing destination
+// (cmd.OutOrStdout / cmd.OutOrStderr / os.Stdout / os.Stderr) made
+// outside internal/write/. Same intent as TestNoCmdPrintOutsideWrite:
+// every user-visible write must route through write/ so output
+// formatting stays consistent and template-driven.
+//
+// Calls writing to in-memory destinations (strings.Builder,
+// bytes.Buffer, json.Encoder targets, etc.) are unaffected because
+// those arguments are neither cmd.OutOr* calls nor os.Std* idents.
+//
+// Test files are exempt.
+//
+// See specs/ast-audit-tests.md for rationale.
+func TestNoFmtFprintToUserStream(t *testing.T) {
+	pkgs := loadPackages(t)
+	var violations []string
+
+	for _, pkg := range pkgs {
+		// Allow calls inside internal/write/ — that is precisely
+		// where these patterns belong.
+		if strings.Contains(pkg.PkgPath, "internal/write/") ||
+			strings.HasSuffix(pkg.PkgPath, "internal/write") {
+			continue
+		}
+
+		for _, file := range pkg.Syntax {
+			fpath := pkg.Fset.Position(file.Pos()).Filename
+			if isTestFile(fpath) {
+				continue
+			}
+
+			ast.Inspect(file, func(n ast.Node) bool {
+				call, ok := n.(*ast.CallExpr)
+				if !ok {
+					return true
+				}
+
+				sel, ok := call.Fun.(*ast.SelectorExpr)
+				if !ok {
+					return true
+				}
+
+				// Must be the fmt package.
+				pkgIdent, ok := sel.X.(*ast.Ident)
+				if !ok || pkgIdent.Name != "fmt" {
+					return true
+				}
+
+				if !fmtFprintMethods[sel.Sel.Name] {
+					return true
+				}
+
+				if len(call.Args) == 0 {
+					return true
+				}
+
+				if !isUserFacingStream(call.Args[0]) {
+					return true
+				}
+
+				violations = append(violations,
+					posString(pkg.Fset, call.Pos())+
+						": fmt."+sel.Sel.Name+
+						"(, ...) — must go through internal/write/",
+				)
+				return true
+			})
+		}
+	}
+
+	for _, v := range violations {
+		t.Error(v)
+	}
+}
+
+// isUserFacingStream reports whether expr is one of the
+// user-visible writers we forbid bypassing.
+//
+// Recognized shapes:
+//   - cmd.OutOrStdout() — cobra's stdout writer (SetOut or
+//     os.Stdout fallback).
+//   - cmd.OutOrStderr() — confusingly-named cobra accessor that
+//     returns the SetOut writer with **stderr** as fallback. Still
+//     a user-visible stream; route through internal/write/.
+//   - cmd.ErrOrStderr() — cobra's stderr writer (SetErr or
+//     os.Stderr fallback). The actual "write to stderr"
+//     accessor; covered here to keep the rule total.
+//   - os.Stdout / os.Stderr — direct *os.File globals.
+//
+// The receiver name "cmd" is the project convention; a non-"cmd"
+// receiver is allowed as a calculated escape hatch (rare and
+// would show up in review).
+//
+// Anything else (strings.Builder, bytes.Buffer, json.Encoder
+// targets, custom io.Writer, etc.) is in-memory string assembly
+// and is not a concern of this test.
+//
+// Parameters:
+//   - expr: AST expression in the first-argument slot of a
+//     fmt.Fprint*-family call.
+//
+// Returns:
+//   - bool: true when expr is one of the recognized user streams.
+func isUserFacingStream(expr ast.Expr) bool {
+	switch e := expr.(type) {
+	case *ast.CallExpr:
+		sel, ok := e.Fun.(*ast.SelectorExpr)
+		if !ok {
+			return false
+		}
+		if sel.Sel.Name != "OutOrStdout" &&
+			sel.Sel.Name != "OutOrStderr" &&
+			sel.Sel.Name != "ErrOrStderr" {
+			return false
+		}
+		ident, ok := sel.X.(*ast.Ident)
+		if !ok {
+			return false
+		}
+		return ident.Name == "cmd"
+	case *ast.SelectorExpr:
+		ident, ok := e.X.(*ast.Ident)
+		if !ok {
+			return false
+		}
+		if ident.Name != "os" {
+			return false
+		}
+		return e.Sel.Name == "Stdout" || e.Sel.Name == "Stderr"
+	}
+	return false
+}
diff --git a/internal/audit/doc.go b/internal/audit/doc.go
index d67b84bf5..8628d9141 100644
--- a/internal/audit/doc.go
+++ b/internal/audit/doc.go
@@ -4,21 +4,52 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package audit contains AST-based codebase invariant tests.
+// Package audit contains AST-based codebase invariant
+// tests that enforce project conventions at the syntax
+// tree level.
 //
-// Unlike internal/compliance (which uses file-level grep and shell tool
-// checks), audit tests use go/ast and go/packages to walk parsed syntax
-// trees. This gives type-aware, context-sensitive detection that cannot
-// be achieved with regex.
+// Unlike [internal/compliance] (which uses file-level
+// grep and shell tool checks), audit tests use go/ast
+// and go/packages to walk parsed syntax trees. This
+// gives type-aware, context-sensitive detection that
+// cannot be achieved with regex.
 //
-// Every file in this package is a _test.go file except this doc.go.
-// The package produces no binary output and is not importable.
+// Every file in this package is a _test.go file except
+// this doc.go. The package produces no binary output
+// and is not importable.
 //
-// Shared helpers live in helpers_test.go:
-//   - [loadPackages] loads and caches parsed packages via sync.Once.
+// # Shared Helpers
+//
+// helpers_test.go provides:
+//
+//   - [loadPackages] loads and caches parsed packages
+//     via sync.Once.
 //   - [isTestFile] filters _test.go files.
-//   - [posString] formats file:line for error messages.
+//   - [posString] formats file:line positions for
+//     error messages.
+//
+// # Check Catalog
+//
+// Each check lives in its own _test.go file, one test
+// function per file. Categories include:
+//
+//   - **Naming**: stuttery function names, descKey
+//     namespace alignment, mixed-visibility files.
+//   - **Error handling**: naked errors, errors.As
+//     usage, unchecked fmt returns, printf calls.
+//   - **Code hygiene**: magic strings, magic values,
+//     raw file I/O, raw logging, raw time formats,
+//     string-concat paths, literal whitespace.
+//   - **Structure**: CLI command structure, core
+//     structure, cross-package types, dead exports,
+//     type file conventions.
+//   - **Documentation**: doc comment alignment, doc
+//     comments, doc structure, package doc quality.
+//   - **Assets**: YAML content drift, YAML examples
+//     registry, YAML linkage.
+//   - **Permissions**: flagbind usage, import shadow,
+//     variable shadowing.
 //
-// Each check lives in its own _test.go file, one test function per
-// file. See specs/ast-audit-tests.md for the full check catalog.
+// See specs/ast-audit-tests.md for the full check
+// catalog and rationale.
 package audit
diff --git a/internal/bootstrap/bootstrap.go b/internal/bootstrap/bootstrap.go
index 3a37362a6..5f7574e1d 100644
--- a/internal/bootstrap/bootstrap.go
+++ b/internal/bootstrap/bootstrap.go
@@ -61,7 +61,7 @@ func Initialize(cmd *cobra.Command) *cobra.Command {
 
 	// Route Cobra's built-in help and completion into the
 	// narrow Shell group. These are the only commands that
-	// live there — everything else has a domain group.
+	// live there; everything else has a domain group.
 	cmd.SetHelpCommandGroupID(embedCmd.GroupShell)
 	cmd.SetCompletionCommandGroupID(embedCmd.GroupShell)
 
diff --git a/internal/bootstrap/bootstrap_test.go b/internal/bootstrap/bootstrap_test.go
index 6f6280229..31caded06 100644
--- a/internal/bootstrap/bootstrap_test.go
+++ b/internal/bootstrap/bootstrap_test.go
@@ -9,11 +9,14 @@ package bootstrap
 import (
 	"os"
 	"path/filepath"
+	"strings"
 	"testing"
 
 	"github.com/ActiveMemory/ctx/internal/cli/resolve"
 	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/ctx"
+	"github.com/ActiveMemory/ctx/internal/config/dir"
+	"github.com/ActiveMemory/ctx/internal/config/env"
 	"github.com/ActiveMemory/ctx/internal/config/flag"
 	"github.com/spf13/cobra"
 
@@ -43,13 +46,25 @@ func TestRootCmd(t *testing.T) {
 	if cmd.Long == "" {
 		t.Error("RootCmd().Long is empty")
 	}
+}
 
-	// Check global flags exist
-	contextDirFlag := cmd.PersistentFlags().Lookup(flag.ContextDir)
-	if contextDirFlag == nil {
-		t.Error("--context-dir flag not found")
-	}
+// TestRoot_NoContextDirFlag is the regression guard for the
+// removed --context-dir flag (spec:
+// specs/single-source-context-anchor.md). Cobra must reject
+// the flag with its standard "unknown flag" error.
+func TestRoot_NoContextDirFlag(t *testing.T) {
+	cmd := RootCmd()
+	cmd.SetOut(&discardWriter{})
+	cmd.SetErr(&discardWriter{})
+	cmd.SetArgs([]string{"--context-dir=/tmp", "status"})
 
+	err := cmd.Execute()
+	if err == nil {
+		t.Fatal("expected error for removed --context-dir flag")
+	}
+	if !strings.Contains(err.Error(), "unknown flag") {
+		t.Errorf("error = %q, want cobra unknown-flag error", err.Error())
+	}
 }
 
 func TestInitialize(t *testing.T) {
@@ -107,19 +122,19 @@ func TestRootCmdVersion(t *testing.T) {
 	}
 }
 
-func TestRootCmdAllowOutsideCwdFlag(t *testing.T) {
-	cmd := RootCmd()
-
-	flag := cmd.PersistentFlags().Lookup(flag.AllowOutsideCwd)
-	if flag == nil {
-		t.Fatal("--allow-outside-cwd flag not found")
-	}
-	if flag.DefValue != "false" {
-		t.Errorf("--allow-outside-cwd default = %q, want %q", flag.DefValue, "false")
+// TestRootCmdPersistentPreRun_CtxDirEnv: CTX_DIR env declares the
+// context directory; non-init annotated dummy bypasses the
+// initialized check.
+func TestRootCmdPersistentPreRun_CtxDirEnv(t *testing.T) {
+	tmp := t.TempDir()
+	ctxDir := filepath.Join(tmp, dir.Context)
+	if err := os.MkdirAll(ctxDir, 0o700); err != nil {
+		t.Fatal(err)
 	}
-}
+	t.Setenv(env.CtxDir, ctxDir)
+	rc.Reset()
+	t.Cleanup(rc.Reset)
 
-func TestRootCmdPersistentPreRun_ContextDir(t *testing.T) {
 	cmd := RootCmd()
 
 	dummy := &cobra.Command{
@@ -128,25 +143,26 @@ func TestRootCmdPersistentPreRun_ContextDir(t *testing.T) {
 		Run:         func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{
-		"--context-dir", "/tmp/test-ctx",
-		"--allow-outside-cwd", "dummy",
-	})
+	cmd.SetArgs([]string{"dummy"})
 
 	err := cmd.Execute()
 	if err != nil {
 		t.Fatalf("Execute() error: %v", err)
 	}
 
-	got := rc.ContextDir()
-	if got != "/tmp/test-ctx" {
-		t.Errorf("ContextDir() = %q, want %q", got, "/tmp/test-ctx")
+	got, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		t.Fatalf("ContextDir: %v", ctxErr)
+	}
+	if got != ctxDir {
+		t.Errorf("ContextDir() = %q, want %q", got, ctxDir)
 	}
 }
 
 func TestRootCmdPersistentPreRun_DefaultFlags(t *testing.T) {
-	// Test PersistentPreRun with default flags (no --context-dir, no --no-color)
-	// --allow-outside-cwd needed since test cwd may not have .context
+	// Test PersistentPreRun with default flags.
+	// The dummy command carries AnnotationSkipInit, so PersistentPreRunE
+	// skips the context-dir declaration gate and returns immediately.
 	cmd := RootCmd()
 
 	dummy := &cobra.Command{
@@ -155,7 +171,7 @@ func TestRootCmdPersistentPreRun_DefaultFlags(t *testing.T) {
 		Run:         func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	err := cmd.Execute()
 	if err != nil {
@@ -182,27 +198,15 @@ func TestInitializeSubcommandCount(t *testing.T) {
 	}
 }
 
-// TestRootCmdPersistentPreRun_BoundaryViolation tests that boundary validation
-// returns an error when --context-dir is outside cwd and --allow-outside-cwd
-// is not set.
-func TestRootCmdPersistentPreRun_BoundaryViolation(t *testing.T) {
-	cmd := RootCmd()
-	dummy := &cobra.Command{
-		Use:         "dummy",
-		Annotations: map[string]string{cli.AnnotationSkipInit: "true"},
-		Run:         func(cmd *cobra.Command, args []string) {},
-	}
-	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--context-dir", "/etc/not-inside-cwd", "dummy"})
-
-	execErr := cmd.Execute()
-	if execErr == nil {
-		t.Fatal("expected error from boundary violation")
-	}
-}
-
 func TestInitGuard_BlocksUninitializedCommand(t *testing.T) {
 	tmp := t.TempDir()
+	ctxDir := filepath.Join(tmp, dir.Context)
+	if err := os.MkdirAll(ctxDir, 0o700); err != nil {
+		t.Fatal(err)
+	}
+	t.Setenv(env.CtxDir, ctxDir)
+	rc.Reset()
+	t.Cleanup(rc.Reset)
 
 	cmd := RootCmd()
 	dummy := &cobra.Command{
@@ -210,7 +214,7 @@ func TestInitGuard_BlocksUninitializedCommand(t *testing.T) {
 		Run: func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--context-dir", tmp, "--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	execErr := cmd.Execute()
 	if execErr == nil {
@@ -224,6 +228,13 @@ func TestInitGuard_BlocksUninitializedCommand(t *testing.T) {
 
 func TestInitGuard_AllowsAnnotatedCommand(t *testing.T) {
 	tmp := t.TempDir() // empty - not initialized
+	ctxDir := filepath.Join(tmp, dir.Context)
+	if err := os.MkdirAll(ctxDir, 0o700); err != nil {
+		t.Fatal(err)
+	}
+	t.Setenv(env.CtxDir, ctxDir)
+	rc.Reset()
+	t.Cleanup(rc.Reset)
 
 	cmd := RootCmd()
 	dummy := &cobra.Command{
@@ -232,7 +243,7 @@ func TestInitGuard_AllowsAnnotatedCommand(t *testing.T) {
 		Run:         func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--context-dir", tmp, "--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	if execErr := cmd.Execute(); execErr != nil {
 		t.Fatalf("annotated command should succeed: %v", execErr)
@@ -241,6 +252,13 @@ func TestInitGuard_AllowsAnnotatedCommand(t *testing.T) {
 
 func TestInitGuard_AllowsHiddenCommand(t *testing.T) {
 	tmp := t.TempDir() // empty - not initialized
+	ctxDir := filepath.Join(tmp, dir.Context)
+	if err := os.MkdirAll(ctxDir, 0o700); err != nil {
+		t.Fatal(err)
+	}
+	t.Setenv(env.CtxDir, ctxDir)
+	rc.Reset()
+	t.Cleanup(rc.Reset)
 
 	cmd := RootCmd()
 	dummy := &cobra.Command{
@@ -249,7 +267,7 @@ func TestInitGuard_AllowsHiddenCommand(t *testing.T) {
 		Run:    func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--context-dir", tmp, "--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	if execErr := cmd.Execute(); execErr != nil {
 		t.Fatalf("hidden command should succeed: %v", execErr)
@@ -264,7 +282,7 @@ func TestInitGuard_AllowsGroupingCommand(t *testing.T) {
 		Short: "A grouping command",
 	}
 	cmd.AddCommand(group)
-	cmd.SetArgs([]string{"--allow-outside-cwd", "group"})
+	cmd.SetArgs([]string{"group"})
 
 	if execErr := cmd.Execute(); execErr != nil {
 		t.Fatalf("grouping command should succeed: %v", execErr)
@@ -301,23 +319,31 @@ func TestInitGuard_AllowsCompletionSubcommand(t *testing.T) {
 
 func TestInitGuard_AllowsInitializedCommand(t *testing.T) {
 	tmp := t.TempDir()
+	ctxDir := filepath.Join(tmp, dir.Context)
+	if mkErr := os.MkdirAll(ctxDir, 0o700); mkErr != nil {
+		t.Fatal(mkErr)
+	}
 
 	// Create required context files so Initialized() returns true.
 	for _, f := range ctx.FilesRequired {
-		path := filepath.Join(tmp, f)
+		path := filepath.Join(ctxDir, f)
 		content := []byte("# " + f + "\n")
 		if writeErr := os.WriteFile(path, content, 0o600); writeErr != nil {
 			t.Fatalf("setup: %v", writeErr)
 		}
 	}
 
+	t.Setenv(env.CtxDir, ctxDir)
+	rc.Reset()
+	t.Cleanup(rc.Reset)
+
 	cmd := RootCmd()
 	dummy := &cobra.Command{
 		Use: "dummy",
 		Run: func(cmd *cobra.Command, args []string) {},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--context-dir", tmp, "--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	if execErr := cmd.Execute(); execErr != nil {
 		t.Fatalf("initialized command should succeed: %v", execErr)
@@ -356,7 +382,7 @@ func TestResolveTool_FlagOverridesRC(t *testing.T) {
 		},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--allow-outside-cwd", "--tool", "cursor", "dummy"})
+	cmd.SetArgs([]string{"--tool", "cursor", "dummy"})
 
 	if err := cmd.Execute(); err != nil {
 		t.Fatalf("Execute() error: %v", err)
@@ -383,7 +409,7 @@ func TestResolveTool_FallsBackToRC(t *testing.T) {
 		},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	if err := cmd.Execute(); err != nil {
 		t.Fatalf("Execute() error: %v", err)
@@ -411,7 +437,7 @@ func TestResolveTool_ErrorMessage(t *testing.T) {
 		},
 	}
 	cmd.AddCommand(dummy)
-	cmd.SetArgs([]string{"--allow-outside-cwd", "dummy"})
+	cmd.SetArgs([]string{"dummy"})
 
 	if err := cmd.Execute(); err != nil {
 		t.Fatalf("Execute() error: %v", err)
diff --git a/internal/bootstrap/cmd.go b/internal/bootstrap/cmd.go
index 4daf4b693..d673fd8e0 100644
--- a/internal/bootstrap/cmd.go
+++ b/internal/bootstrap/cmd.go
@@ -20,11 +20,8 @@ import (
 	embedFlag "github.com/ActiveMemory/ctx/internal/config/embed/flag"
 	"github.com/ActiveMemory/ctx/internal/config/flag"
 	ctxContext "github.com/ActiveMemory/ctx/internal/context/validate"
-	"github.com/ActiveMemory/ctx/internal/err/fs"
 	errInit "github.com/ActiveMemory/ctx/internal/err/initialize"
-	"github.com/ActiveMemory/ctx/internal/flagbind"
 	"github.com/ActiveMemory/ctx/internal/rc"
-	"github.com/ActiveMemory/ctx/internal/validate"
 	writeBootstrap "github.com/ActiveMemory/ctx/internal/write/bootstrap"
 )
 
@@ -38,16 +35,9 @@ var version = cfgBootstrap.DefaultVersion
 // The root command provides the entry point for all ctx subcommands and
 // displays help information when invoked without arguments.
 //
-// Global flags:
-//   - --context-dir: Override the context directory path (default: .context)
-//   - --allow-outside-cwd: Allow context directory outside project root
-//
 // Returns:
 //   - *cobra.Command: The configured root command with usage and version info
 func RootCmd() *cobra.Command {
-	var contextDir string
-	var allowOutsideCwd bool
-
 	short, long := desc.Command(cmd.DescKeyCtx)
 
 	c := &cobra.Command{
@@ -56,43 +46,59 @@ func RootCmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyCtx),
 		Version: version,
+		// Cobra auto-prints returned errors to stderr by default;
+		// main.go also prints them via writeErr.With, producing a
+		// double-printed error. Silence cobra's path so writeErr is
+		// the sole printer. (SilenceUsage stays per-return so
+		// genuine cobra parse errors keep their help dump.)
+		SilenceErrors: true,
 		PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
-			// Apply global flag values
-			if contextDir != "" {
-				rc.OverrideContextDir(contextDir)
-			}
-			// Validate that the context directory stays within the project root.
-			// Skip if the CLI flag is set or .ctxrc has allow_outside_cwd: true.
-			if !allowOutsideCwd && !rc.AllowOutsideCwd() {
-				if validateErr := validate.Boundary(
-					rc.ContextDir(),
-				); validateErr != nil {
-					return fs.BoundaryViolation(validateErr)
-				}
-			}
-
-			// Skip init check for hidden commands (hooks have their own guards)
-			// and cobra's built-in completion subcommands (bash, zsh, fish,
-			// PowerShell) which must work in any directory.
+			// Skip every downstream check for administrative commands
+			// that must run without a declared or initialized context:
+			//   - Hidden commands (e.g. ctx system bootstrap; hooks
+			//     supply their own guards).
+			//   - Cobra's built-in shell-completion subcommands.
+			//   - Commands annotated with AnnotationSkipInit (init,
+			//     activate, deactivate, guide, why, doctor, config
+			//     switch/status, hub *).
+			//   - Grouping commands without a Run / RunE of their own
+			//     (they just print help for their subtree).
 			if cmd.Hidden {
 				return nil
 			}
 			if p := cmd.Parent(); p != nil && p.Name() == cli.CmdCompletion {
 				return nil
 			}
-
-			// Skip init check for annotated commands.
 			if _, ok := cmd.Annotations[cli.AnnotationSkipInit]; ok {
 				return nil
 			}
-
-			// Skip init check for grouping commands (no Run/RunE = just shows help).
 			if cmd.RunE == nil && cmd.Run == nil {
 				return nil
 			}
 
-			// Require initialization.
-			if !ctxContext.Initialized(rc.ContextDir()) {
+			// Under the single-source-anchor model, every non-exempt
+			// command requires CTX_DIR to be declared and to point at
+			// an existing .context/ directory. RequireContextDir
+			// returns a tailored error (with a next-step hint based on
+			// how many .context/ candidates are visible from CWD) when
+			// the declaration is missing or broken. The parent of the
+			// declared directory is the project root by contract; CWD
+			// has no say in project identity.
+			ctxDir, reqErr := rc.RequireContextDir()
+			if reqErr != nil {
+				// Actionable error, not a usage problem. Suppress
+				// cobra's help dump so the call-to-action stays
+				// the only thing on stderr. Genuine cobra errors
+				// (unknown subcommand, bad flag) still print usage
+				// because they happen before PreRunE runs.
+				cmd.SilenceUsage = true
+				return reqErr
+			}
+
+			// Require initialization: the declared directory must
+			// have been initialized before other commands operate.
+			if !ctxContext.Initialized(ctxDir) {
+				cmd.SilenceUsage = true
 				return errInit.NotInitialized()
 			}
 
@@ -114,16 +120,6 @@ func RootCmd() *cobra.Command {
 		}
 	})
 
-	// Global flags available to all subcommands
-	flagbind.PersistentStringFlag(
-		c, &contextDir,
-		flag.ContextDir, embedFlag.DescKeyContextDir,
-	)
-	flagbind.PersistentBoolFlag(
-		c, &allowOutsideCwd,
-		flag.AllowOutsideCwd,
-		embedFlag.DescKeyAllowOutsideCwd,
-	)
 	c.PersistentFlags().String(
 		flag.Tool,
 		"",
diff --git a/internal/bootstrap/doc.go b/internal/bootstrap/doc.go
index a6e290d4e..8ea4437f0 100644
--- a/internal/bootstrap/doc.go
+++ b/internal/bootstrap/doc.go
@@ -4,12 +4,70 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package bootstrap initializes the ctx CLI application.
+// Package bootstrap is the **CLI assembly layer** for ctx:
+// the place where every cobra command in the binary gets
+// constructed, grouped, and wired into the root command
+// before `cmd.Execute()` runs.
 //
-// It provides functions to create the root command and register all
-// subcommands. The typical usage pattern is:
+// `cmd/ctx/main.go` is intentionally tiny:
 //
 //	cmd := bootstrap.Initialize(bootstrap.RootCmd())
-//	if err := cmd.Execute(); err != nil {
-//	    // handle error
+//	if err := cmd.Execute(); err != nil { ... }
+//
+// All the actual command registration happens here so the
+// command tree is in one auditable place and so the audit
+// suite (`cli_cmd_structure_test`) can verify invariants
+// like "every command has a non-empty Use", "every command
+// has a Short", and "every group has at least one
+// command".
+//
+// # The Root Command
+//
+// [RootCmd] returns the bare root cobra command with the
+// banner, version flag, the `--tool` global flag, and
+// the persistent error formatter. It is intentionally
+// devoid of subcommands; [Initialize] adds them.
+//
+// # Group-Based Registration
+//
+// [Initialize] does the wiring through small grouped
+// helpers ([gettingStarted], [contextCmds], [artifacts],
+// [sessions], [runtimeCmds], [integrations],
+// [diagnostics], [hiddenCmds]), each of which returns a
+// `[]registration` that pairs a constructor with a
+// [Group] tag. The result is the cobra command tree the
+// user sees in `ctx --help`, organized into the same
+// sections documented in `docs/cli/index.md`.
+//
+// New commands plug in by:
+//
+//  1. Implementing a `Cmd() *cobra.Command` factory in
+//     `internal/cli/`.
+//  2. Adding the constructor to the right group helper
+//     in [group.go] under the matching `embedCmd.Group*`
+//     constant.
+//  3. Adding the `Use` and `DescKey` constants to
+//     [internal/config/embed/cmd] and the matching YAML
+//     entries to [internal/assets/commands].
+//
+// # Hidden Commands
+//
+// [hiddenCmds] keeps `ctx site` and `ctx system` out of
+// `ctx --help` because they are agent-/automation-facing
+// rather than user-facing. They still execute when
+// invoked directly. The criterion for "hidden" is "no
+// human is expected to type this".
+//
+// # Version Stamping
+//
+// The build embeds the version string into the package
+// at link time via `-ldflags` (see Makefile `build`
+// target); the value is exposed through [Version] and
+// surfaced by `ctx --version`.
+//
+// # Concurrency
+//
+// Bootstrap runs once at process start. Concurrent
+// execution is not a concern; cobra serializes
+// subcommand dispatch.
 package bootstrap
diff --git a/internal/bootstrap/group.go b/internal/bootstrap/group.go
index c64398f8d..3a29f6fa1 100644
--- a/internal/bootstrap/group.go
+++ b/internal/bootstrap/group.go
@@ -7,16 +7,15 @@
 package bootstrap
 
 import (
+	"github.com/ActiveMemory/ctx/internal/cli/activate"
 	"github.com/ActiveMemory/ctx/internal/cli/add"
 	"github.com/ActiveMemory/ctx/internal/cli/agent"
-	"github.com/ActiveMemory/ctx/internal/cli/backup"
-
 	"github.com/ActiveMemory/ctx/internal/cli/change"
 	"github.com/ActiveMemory/ctx/internal/cli/compact"
 	"github.com/ActiveMemory/ctx/internal/cli/config"
 	"github.com/ActiveMemory/ctx/internal/cli/connection"
+	"github.com/ActiveMemory/ctx/internal/cli/deactivate"
 	"github.com/ActiveMemory/ctx/internal/cli/decision"
-
 	"github.com/ActiveMemory/ctx/internal/cli/doctor"
 	"github.com/ActiveMemory/ctx/internal/cli/drift"
 	ctxFmt "github.com/ActiveMemory/ctx/internal/cli/fmt"
@@ -60,6 +59,8 @@ import (
 func gettingStarted() []registration {
 	return []registration{
 		{initialize.Cmd, embedCmd.GroupGettingStarted},
+		{activate.Cmd, embedCmd.GroupGettingStarted},
+		{deactivate.Cmd, embedCmd.GroupGettingStarted},
 		{status.Cmd, embedCmd.GroupGettingStarted},
 		{guide.Cmd, embedCmd.GroupGettingStarted},
 	}
@@ -69,7 +70,7 @@ func gettingStarted() []registration {
 // management group.
 //
 // These commands operate on the full set of context source-of-truth
-// files (TASKS.md, DECISIONS.md, LEARNINGS.md, CONVENTIONS.md) —
+// files (TASKS.md, DECISIONS.md, LEARNINGS.md, CONVENTIONS.md):
 // adding entries, loading for agents, formatting, reconciling with
 // the codebase, detecting drift, and archiving completed work.
 //
@@ -92,7 +93,7 @@ func contextCmds() []registration {
 // artifacts returns command registrations for the artifacts group.
 //
 // These commands operate on specific artifact files inside
-// .context/ — the DECISIONS.md, LEARNINGS.md, and TASKS.md
+// .context/: the DECISIONS.md, LEARNINGS.md, and TASKS.md
 // stores, plus the `reindex` shortcut that rebuilds the
 // decision/learning index tables in a single call.
 //
@@ -125,13 +126,12 @@ func sessions() []registration {
 // runtime configuration group.
 //
 // Returns:
-//   - []registration: Config, permission, hook, backup, and prune commands
+//   - []registration: Config, permission, hook, and prune commands
 func runtimeCmds() []registration {
 	return []registration{
 		{config.Cmd, embedCmd.GroupRuntime},
 		{permission.Cmd, embedCmd.GroupRuntime},
 		{hook.Cmd, embedCmd.GroupRuntime},
-		{backup.Cmd, embedCmd.GroupRuntime},
 		{prune.Cmd, embedCmd.GroupRuntime},
 	}
 }
diff --git a/internal/claude/doc.go b/internal/claude/doc.go
index df45a2ef8..5aaae9cd0 100644
--- a/internal/claude/doc.go
+++ b/internal/claude/doc.go
@@ -4,14 +4,46 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package claude provides Claude Code integration types and utilities.
+// Package claude provides Claude Code integration types
+// and utilities for ctx.
 //
-// It provides configuration types for reading/writing Claude Code's
-// settings.local.json (permissions) and embedded skill definitions.
+// # Configuration Types
 //
-// Hook logic has been moved to the internal/cli/system package as native
-// Go subcommands, deployed via the ctx Claude Code plugin.
+// The package defines Go structs that mirror Claude
+// Code's settings.local.json format:
 //
-// Embedded assets:
-//   - skills/*/SKILL.md: Agent skill definitions for Claude Code
+//   - [Settings] is the top-level structure containing
+//     hooks and permissions.
+//   - [HookConfig] maps lifecycle events (PreToolUse,
+//     PostToolUse, UserPromptSubmit, SessionEnd) to
+//     lists of [HookMatcher] entries.
+//   - [PermissionsConfig] holds allow/deny lists for
+//     tool patterns (e.g., "Bash(ctx status:*)").
+//
+// These types are used by ctx init to generate and
+// update the project-level Claude Code configuration
+// file.
+//
+// # Embedded Skills
+//
+// The package exposes embedded Agent Skill definitions
+// via two functions:
+//
+//   - [SkillList] returns the names of all embedded
+//     skill directories (e.g., "ctx-status",
+//     "ctx-reflect").
+//   - [SkillContent] returns the raw SKILL.md bytes
+//     for a given skill name.
+//
+// Skills follow the Agent Skills specification with
+// SKILL.md files containing YAML frontmatter (name,
+// description) and autonomy-focused instructions.
+// They are installed to .claude/skills/ via ctx init.
+//
+// # Hook Migration
+//
+// Hook logic has been moved to internal/cli/system as
+// native Go subcommands deployed via the ctx Claude
+// Code plugin. The types here remain for reading and
+// writing the settings.local.json file.
 package claude
diff --git a/internal/cli/activate/activate.go b/internal/cli/activate/activate.go
new file mode 100644
index 000000000..61d18d8a0
--- /dev/null
+++ b/internal/cli/activate/activate.go
@@ -0,0 +1,22 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package activate
+
+import (
+	"github.com/spf13/cobra"
+
+	activateRoot "github.com/ActiveMemory/ctx/internal/cli/activate/cmd/root"
+)
+
+// Cmd returns the `ctx activate` command for registration on the
+// root ctx command. See cmd/root for the full command definition.
+//
+// Returns:
+//   - *cobra.Command: the activate command.
+func Cmd() *cobra.Command {
+	return activateRoot.Cmd()
+}
diff --git a/internal/cli/activate/activate_test.go b/internal/cli/activate/activate_test.go
new file mode 100644
index 000000000..1ecb540c6
--- /dev/null
+++ b/internal/cli/activate/activate_test.go
@@ -0,0 +1,275 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package activate_test
+
+import (
+	"bytes"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/cli/activate"
+	"github.com/ActiveMemory/ctx/internal/config/dir"
+	"github.com/ActiveMemory/ctx/internal/config/env"
+)
+
+// runActivate invokes `ctx activate` with the given args and returns
+// (stdout, stderr, error) as separate buffers. Stream separation is
+// load-bearing: the eval-bindable shell content goes to stdout, the
+// human-readable advisories ("ctx activated at:", "ctx: also
+// visible upward:") go to stderr. Tests that conflate the two miss
+// regressions where an advisory leaks into the eval stream.
+//
+// The command inherits the test process's env; use t.Setenv /
+// t.Chdir to scope state.
+func runActivate(t *testing.T, args []string) (stdout, stderr string, err error) {
+	t.Helper()
+	c := activate.Cmd()
+	c.SetArgs(args)
+	var so, se bytes.Buffer
+	c.SetOut(&so)
+	c.SetErr(&se)
+	err = c.Execute()
+	return so.String(), se.String(), err
+}
+
+// TestActivate_NoArgs_NoCandidates: cwd with no .context/ anywhere →
+// NoCandidates error, no stdout emit, no advisory either.
+func TestActivate_NoArgs_NoCandidates(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+	t.Chdir(t.TempDir())
+
+	stdout, _, err := runActivate(t, nil)
+	if err == nil {
+		t.Fatalf("expected NoCandidates error, got nil (stdout=%q)", stdout)
+	}
+	if stdout != "" {
+		t.Errorf("stdout must be empty on error path: %q", stdout)
+	}
+}
+
+// TestActivate_NoArgs_OneCandidate: exactly one .context/ upward →
+// stdout carries the export, stderr carries the
+// `ctx: activated at:` advisory.
+func TestActivate_NoArgs_OneCandidate(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+
+	projectRoot := t.TempDir()
+	ctxPath := filepath.Join(projectRoot, dir.Context)
+	if err := os.MkdirAll(ctxPath, 0700); err != nil {
+		t.Fatalf("mkdir: %v", err)
+	}
+	t.Chdir(projectRoot)
+	t.Setenv("SHELL", "/bin/bash")
+
+	stdout, stderr, err := runActivate(t, nil)
+	if err != nil {
+		t.Fatalf("expected success, got err=%v", err)
+	}
+	if !strings.HasPrefix(stdout, "export CTX_DIR=") {
+		t.Errorf("stdout must start with export, got %q", stdout)
+	}
+	if !strings.Contains(stdout, ctxPath) {
+		t.Errorf("stdout missing path %q: %q", ctxPath, stdout)
+	}
+	// Activated-at advisory always announces the bind, even in
+	// the single-candidate case.
+	wantActivated := "ctx: activated at: " + ctxPath
+	if !strings.Contains(stderr, wantActivated) {
+		t.Errorf("stderr missing activated-at advisory %q: %q",
+			wantActivated, stderr)
+	}
+}
+
+// TestActivate_ErrorPath_StdoutEmpty guards the eval-recursion
+// trap surfaced by the smoke test: if any error path lets cobra
+// print Usage / Flags / Examples to stdout, `eval "$(ctx
+// activate)"` captures the Examples block (which literally
+// contains `eval "$(ctx activate)"`) and re-executes activate,
+// looping until the captured text mangles past the parser.
+//
+// Stdout MUST stay empty on every error path. Stderr can carry
+// the human-readable error; the eval shell never sees stderr.
+//
+// Uses the no-candidates case (zero `.context/` visible upward)
+// since multi-candidate is no longer an error case under the
+// innermost-wins policy.
+func TestActivate_ErrorPath_StdoutEmpty(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+	t.Chdir(t.TempDir())
+
+	stdout, stderr, err := runActivate(t, nil)
+	if err == nil {
+		t.Fatalf("expected NoCandidates error, got nil")
+	}
+	if stdout != "" {
+		t.Errorf("stdout must be empty on error path, got %q", stdout)
+	}
+	if !strings.Contains(stderr, "no .context/ directory found") {
+		t.Errorf("stderr should describe the error, got %q", stderr)
+	}
+}
+
+// TestActivate_NoArgs_ManyCandidates: two `.context/` dirs on the
+// upward path → innermost wins on stdout (eval-bindable),
+// stderr carries both the `ctx activated at:` line and one
+// `ctx: also visible upward:` line per other candidate. Matches
+// git/make innermost-project semantics.
+//
+// The split-stream assertion is load-bearing: putting any
+// advisory on stdout (the eval-captured stream) makes it
+// invisible to anyone running `eval "$(ctx activate)"`.
+func TestActivate_NoArgs_ManyCandidates(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+	t.Setenv("SHELL", "/bin/bash")
+
+	tempDir := t.TempDir()
+	outerCtx := filepath.Join(tempDir, dir.Context)
+	innerDir := filepath.Join(tempDir, "inner")
+	innerCtx := filepath.Join(innerDir, dir.Context)
+	startDir := filepath.Join(innerDir, "deep")
+
+	for _, d := range []string{outerCtx, innerCtx, startDir} {
+		if err := os.MkdirAll(d, 0700); err != nil {
+			t.Fatalf("mkdir %s: %v", d, err)
+		}
+	}
+	t.Chdir(startDir)
+
+	stdout, stderr, err := runActivate(t, nil)
+	if err != nil {
+		t.Fatalf("expected success (innermost wins), got err=%v", err)
+	}
+
+	// stdout: only the export line for the innermost candidate.
+	if !strings.HasPrefix(stdout, "export CTX_DIR=") {
+		t.Errorf("stdout must start with export, got %q", stdout)
+	}
+	if !strings.Contains(stdout, innerCtx) {
+		t.Errorf("export should bind the inner candidate %q: %q",
+			innerCtx, stdout)
+	}
+	if strings.Contains(stdout, "also visible") ||
+		strings.Contains(stdout, "activated at") {
+		t.Errorf("stdout must NOT carry advisories (eval invisibility): %q",
+			stdout)
+	}
+
+	// stderr: activated-at line for the inner, also-visible line for
+	// the outer.
+	wantActivated := "ctx: activated at: " + innerCtx
+	if !strings.Contains(stderr, wantActivated) {
+		t.Errorf("stderr missing %q: %q", wantActivated, stderr)
+	}
+	wantAdvisory := "ctx: also visible upward: " + outerCtx
+	if !strings.Contains(stderr, wantAdvisory) {
+		t.Errorf("stderr missing %q: %q", wantAdvisory, stderr)
+	}
+}
+
+// TestActivate_RejectsArgs guards the spec contract: `ctx activate
+// ` is removed under the single-source-anchor model. Any
+// positional argument must be rejected (either as cobra's
+// "accepts 0 arg(s)" or "unknown command", whichever cobra picks
+// for the literal value) and emit nothing on stdout.
+func TestActivate_RejectsArgs(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+	t.Chdir(t.TempDir())
+
+	stdout, _, err := runActivate(t, []string{"some-explicit-path"})
+	if err == nil {
+		t.Fatalf("expected cobra args rejection, got nil (stdout=%q)", stdout)
+	}
+	if strings.Contains(stdout, "export CTX_DIR") {
+		t.Errorf("stdout should not contain export on error: %q", stdout)
+	}
+}
+
+// TestActivate_StaleReplacementComment: parent shell has a stale
+// CTX_DIR pointing at a different project; activate emits a
+// `# ctx: replacing stale CTX_DIR=` comment before the export
+// so the user can see the change in `eval` output.
+func TestActivate_StaleReplacementComment(t *testing.T) {
+	stale := filepath.Join(t.TempDir(), "old", dir.Context)
+	if err := os.MkdirAll(stale, 0700); err != nil {
+		t.Fatalf("mkdir stale: %v", err)
+	}
+	t.Setenv(env.CtxDir, stale)
+
+	projectRoot := t.TempDir()
+	ctxPath := filepath.Join(projectRoot, dir.Context)
+	if err := os.MkdirAll(ctxPath, 0700); err != nil {
+		t.Fatalf("mkdir new: %v", err)
+	}
+	t.Chdir(projectRoot)
+	t.Setenv("SHELL", "/bin/bash")
+
+	stdout, _, err := runActivate(t, nil)
+	if err != nil {
+		t.Fatalf("expected success, got err=%v", err)
+	}
+	wantPrefix := fmt.Sprintf("# ctx: replacing stale %s=%s\n",
+		env.CtxDir, stale)
+	if !strings.HasPrefix(stdout, wantPrefix) {
+		t.Errorf("stdout missing stale-replacement comment.\n got: %q\nwant prefix: %q",
+			stdout, wantPrefix)
+	}
+	if !strings.Contains(stdout, "export CTX_DIR=") {
+		t.Errorf("stdout missing export: %q", stdout)
+	}
+	if !strings.Contains(stdout, ctxPath) {
+		t.Errorf("stdout missing new path %q: %q", ctxPath, stdout)
+	}
+}
+
+// TestActivate_NoStaleCommentOnFirstActivate: when CTX_DIR is unset
+// or matches the resolved value, the comment is suppressed.
+func TestActivate_NoStaleCommentOnFirstActivate(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+
+	projectRoot := t.TempDir()
+	ctxPath := filepath.Join(projectRoot, dir.Context)
+	if err := os.MkdirAll(ctxPath, 0700); err != nil {
+		t.Fatalf("mkdir: %v", err)
+	}
+	t.Chdir(projectRoot)
+	t.Setenv("SHELL", "/bin/bash")
+
+	stdout, _, err := runActivate(t, nil)
+	if err != nil {
+		t.Fatalf("expected success, got err=%v", err)
+	}
+	if strings.Contains(stdout, "replacing stale") {
+		t.Errorf("stdout should not contain stale comment: %q", stdout)
+	}
+}
+
+// TestActivate_ShellFlag: --shell zsh uses POSIX export syntax
+// (same output shape as bash; flag is just a dispatch key).
+func TestActivate_ShellFlag(t *testing.T) {
+	t.Setenv(env.CtxDir, "")
+
+	projectRoot := t.TempDir()
+	ctxPath := filepath.Join(projectRoot, dir.Context)
+	if err := os.MkdirAll(ctxPath, 0700); err != nil {
+		t.Fatalf("mkdir: %v", err)
+	}
+	t.Chdir(projectRoot)
+
+	stdout, _, err := runActivate(t, []string{"--shell", "zsh"})
+	if err != nil {
+		t.Fatalf("expected success, got err=%v", err)
+	}
+	if !strings.HasPrefix(stdout, "export CTX_DIR=") {
+		t.Errorf("expected export prefix, got %q", stdout)
+	}
+	if !strings.HasSuffix(strings.TrimSpace(stdout), "'") {
+		t.Errorf("expected trailing single quote (shell quoting), got %q", stdout)
+	}
+}
diff --git a/internal/cli/activate/cmd/root/cmd.go b/internal/cli/activate/cmd/root/cmd.go
new file mode 100644
index 000000000..cbd564453
--- /dev/null
+++ b/internal/cli/activate/cmd/root/cmd.go
@@ -0,0 +1,77 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package root
+
+import (
+	"github.com/spf13/cobra"
+
+	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
+	embedFlag "github.com/ActiveMemory/ctx/internal/config/embed/flag"
+	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
+)
+
+// Cmd returns the `ctx activate` cobra command.
+//
+// Args-free under the single-source-anchor model
+// (specs/single-source-context-anchor.md). Activation is always
+// project-local discovery via [rc.ScanCandidates] from CWD; the
+// explicit-path mode that previously accepted an argument was
+// removed because hub-client / hub-server scenarios store at
+// `~/.ctx/hub-data/` and never read `.context/` directly, so they
+// activate from the project root like everyone else.
+//
+// One flag remains:
+//
+//	--shell    override auto-detection (defaults to $SHELL).
+//
+// # Stdout discipline (critical)
+//
+// Activate's stdout is consumed by `eval "$(ctx activate)"`. Every
+// byte must be either valid shell or empty. Usage / Flags /
+// Examples blocks must NEVER reach stdout, because cobra's
+// Examples for this command literally contain
+// `eval "$(ctx activate)"`, which would re-execute activate inside
+// the eval and trigger an infinite loop on any error path.
+//
+// SilenceUsage is therefore set unconditionally below (rather than
+// only after a return) so cobra renders only the error to stderr
+// when something fails. SilenceErrors stays at the root level so
+// errors keep going to stderr (visible to the user) without being
+// captured by the eval.
+//
+// Returns:
+//   - *cobra.Command: configured activate command.
+func Cmd() *cobra.Command {
+	short, long := desc.Command(cmd.DescKeyActivate)
+	c := &cobra.Command{
+		Use:     cmd.UseActivate,
+		Short:   short,
+		Long:    long,
+		Example: desc.Example(cmd.DescKeyActivate),
+		Args:    cobra.NoArgs,
+		// Exempt from the global init / require-context-dir checks:
+		// activate's whole purpose is to help the user declare the
+		// context directory in the first place.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
+		// See the Stdout discipline note above. Without this, an
+		// error path (multi-candidate, no-candidates, etc.) prints
+		// Usage+Examples to stdout, gets captured by `$(...)`, and
+		// the embedded `eval "$(ctx activate)"` example re-runs the
+		// command. Loop.
+		SilenceUsage: true,
+		RunE: func(cmd *cobra.Command, args []string) error {
+			shell, _ := cmd.Flags().GetString(cFlag.Shell)
+			return Run(cmd, shell)
+		},
+	}
+	c.Flags().String(cFlag.Shell, "",
+		desc.Flag(embedFlag.DescKeyActivateShell),
+	)
+	return c
+}
diff --git a/internal/cli/activate/cmd/root/doc.go b/internal/cli/activate/cmd/root/doc.go
new file mode 100644
index 000000000..3e8708457
--- /dev/null
+++ b/internal/cli/activate/cmd/root/doc.go
@@ -0,0 +1,33 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package root implements the `ctx activate` cobra command.
+//
+// Activate is the shell-integration entry point under the
+// explicit-context-dir model (spec: specs/explicit-context-dir.md).
+// Its single job is to emit a `export CTX_DIR=...` line to stdout so
+// that callers can bind the context directory for their shell via
+// `eval "$(ctx activate)"`.
+//
+// Unlike most commands in the CLI, `activate` is in the exempt
+// allowlist: it does not call rc.RequireContextDir because
+// activate's reason for existing is precisely to help users declare
+// CTX_DIR in the first place.
+//
+// Resolution:
+//
+//   - With an explicit path argument: the path is validated strictly
+//     (exists, is a directory, contains at least one canonical
+//     context file). There is no --force escape hatch in v1.
+//   - Without arguments: the command scans upward from CWD using
+//     rc.ScanCandidates and emits the one visible candidate when
+//     there is exactly one. Zero candidates → NoCandidates error.
+//     Two or more candidates → Ambiguous error listing every path;
+//     activate refuses to pick automatically.
+//
+// This is the only command in the CLI that walks. All other
+// resolution flows through rc.ContextDir / rc.RequireContextDir.
+package root
diff --git a/internal/cli/activate/cmd/root/run.go b/internal/cli/activate/cmd/root/run.go
new file mode 100644
index 000000000..b7f506008
--- /dev/null
+++ b/internal/cli/activate/cmd/root/run.go
@@ -0,0 +1,76 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package root
+
+import (
+	"fmt"
+	"os"
+
+	"github.com/spf13/cobra"
+
+	"github.com/ActiveMemory/ctx/internal/cli/activate/core/emit"
+	"github.com/ActiveMemory/ctx/internal/cli/activate/core/resolve"
+	"github.com/ActiveMemory/ctx/internal/config/env"
+	cfgShell "github.com/ActiveMemory/ctx/internal/config/shell"
+	writeActivate "github.com/ActiveMemory/ctx/internal/write/activate"
+)
+
+// Run executes the `ctx activate` command.
+//
+// Resolves the target .context/ directory via [resolve.Selected]
+// (always project-local scan from CWD under the single-source-anchor
+// model), then prints the shell-specific export statement for
+// CTX_DIR to stdout.
+//
+// # Output shape
+//
+// Two channels:
+//
+//  1. **stdout**: consumed by `eval "$(ctx activate)"`. Every
+//     byte must be valid POSIX shell. Composed in order:
+//     (a) zero or one `# ctx: replacing stale CTX_DIR=\n`
+//     comment line when the parent shell already has [env.CtxDir]
+//     set to a different value than the resolved target;
+//     (b) the shell-specific `export CTX_DIR=\n` line.
+//
+//  2. **stderr**: informational advisories for the user. Always
+//     carries a `ctx activated at: ` line announcing the
+//     bound directory (single-candidate case included), and
+//     additionally one `ctx: also visible upward: ` line
+//     per other `.context/` candidate when more than one is
+//     visible upward. `eval` does not capture stderr, so these
+//     lines pass through to the terminal where the user sees
+//     them. Innermost wins (matches git/make nested-project
+//     semantics); the additional candidates are reported, not
+//     refused. The comment-on-stdout approach considered
+//     earlier was invisible to the only documented invocation
+//     form (`eval`), so it informed nobody.
+//
+// Parameters:
+//   - cmd:   cobra command providing stdout / stderr. Nil is a
+//     no-op via [writeActivate.Emit] / [writeActivate.AlsoVisible].
+//   - shell: value of the --shell flag; empty means auto-detect
+//     from $SHELL via [emit.DetectShell].
+//
+// Returns:
+//   - error: non-nil on resolution failure (no `.context/` visible
+//     from CWD upward); nil on successful emit.
+func Run(cmd *cobra.Command, shell string) error {
+	selected, others, err := resolve.Selected()
+	if err != nil {
+		return err
+	}
+	out := emit.Set(emit.DetectShell(shell), selected)
+	if existing := os.Getenv(env.CtxDir); existing != "" && existing != selected {
+		out = fmt.Sprintf(cfgShell.FormatStaleReplaceComment,
+			env.CtxDir, existing, out)
+	}
+	writeActivate.ActivatedAt(cmd, selected)
+	writeActivate.AlsoVisible(cmd, others)
+	writeActivate.Emit(cmd, out)
+	return nil
+}
diff --git a/internal/cli/activate/core/emit/doc.go b/internal/cli/activate/core/emit/doc.go
new file mode 100644
index 000000000..bc26d8b29
--- /dev/null
+++ b/internal/cli/activate/core/emit/doc.go
@@ -0,0 +1,28 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package emit produces the shell-specific strings used by
+// `ctx activate` and `ctx deactivate` to bind or clear CTX_DIR
+// for the current shell via `eval "$(ctx activate)"`.
+//
+// v1 supports bash, zsh, and POSIX sh. All three share identical
+// `export` / `unset` syntax. Fish / nushell / powershell can be
+// added later by extending [Set] and [Unset] without touching the
+// call sites. That extensibility is the only reason this lives in
+// its own package rather than inline in the command's Run.
+//
+// # Supported Shells
+//
+//	bash, zsh, sh: POSIX export / unset
+//	fish:          deferred (see specs/explicit-context-dir.md).
+//
+// # Detection
+//
+// [DetectShell] returns the first non-empty value of, in order:
+// the explicit --shell flag, the basename of $SHELL, and a bash
+// fallback. Users who want deterministic output in scripts should
+// pass --shell explicitly.
+package emit
diff --git a/internal/cli/activate/core/emit/emit.go b/internal/cli/activate/core/emit/emit.go
new file mode 100644
index 000000000..3e73dda95
--- /dev/null
+++ b/internal/cli/activate/core/emit/emit.go
@@ -0,0 +1,90 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package emit
+
+import (
+	"os"
+	"path/filepath"
+	"strings"
+
+	"github.com/ActiveMemory/ctx/internal/config/env"
+	cfgShell "github.com/ActiveMemory/ctx/internal/config/shell"
+)
+
+// emitters maps supported shell identifiers to their set-emitter.
+// Unknown shells fall back to POSIX export semantics via the
+// default branch in Set.
+var emitters = map[string]emitter{
+	cfgShell.Bash: posixSet,
+	cfgShell.Zsh:  posixSet,
+	cfgShell.Sh:   posixSet,
+}
+
+// unsetters maps supported shell identifiers to their unset-emitter.
+// Unknown shells fall back to POSIX unset semantics via the default
+// branch in Unset.
+var unsetters = map[string]emitter{
+	cfgShell.Bash: posixUnset,
+	cfgShell.Zsh:  posixUnset,
+	cfgShell.Sh:   posixUnset,
+}
+
+// DetectShell returns the shell identifier to emit for.
+//
+// Priority: explicit override > basename of $SHELL > bash fallback.
+// The returned value is always lowercase and suitable as a key into
+// the [emitters] / [unsetters] tables.
+//
+// Parameters:
+//   - override: explicit --shell flag value ("" to auto-detect).
+//
+// Returns:
+//   - string: one of [cfgShell.Bash], [cfgShell.Zsh], [cfgShell.Sh],
+//     or the original override (callers treat unknowns as POSIX).
+func DetectShell(override string) string {
+	if override != "" {
+		return strings.ToLower(override)
+	}
+	if s := os.Getenv(env.Shell); s != "" {
+		return strings.ToLower(filepath.Base(s))
+	}
+	return cfgShell.Bash
+}
+
+// Set returns the shell command that exports CTX_DIR=path, ending
+// with a newline so the output is directly consumable by
+// `eval "$(ctx activate)"`.
+//
+// Parameters:
+//   - shell: result of [DetectShell].
+//   - path:  absolute path to the selected context directory.
+//
+// Returns:
+//   - string: one-line export statement with trailing newline.
+func Set(shell, path string) string {
+	fn, ok := emitters[shell]
+	if !ok {
+		fn = posixSet
+	}
+	return fn(env.CtxDir, shellQuote(path))
+}
+
+// Unset returns the shell command that clears CTX_DIR for the
+// current shell, ending with a newline.
+//
+// Parameters:
+//   - shell: result of [DetectShell].
+//
+// Returns:
+//   - string: one-line unset statement with trailing newline.
+func Unset(shell string) string {
+	fn, ok := unsetters[shell]
+	if !ok {
+		fn = posixUnset
+	}
+	return fn(env.CtxDir, "")
+}
diff --git a/internal/cli/activate/core/emit/posix.go b/internal/cli/activate/core/emit/posix.go
new file mode 100644
index 000000000..9f547d774
--- /dev/null
+++ b/internal/cli/activate/core/emit/posix.go
@@ -0,0 +1,57 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package emit
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/ActiveMemory/ctx/internal/config/shell"
+)
+
+// posixSet emits `export KEY=VALUE\n` for bash/zsh/sh. Used as the
+// map value in the emitters table for all POSIX-family shells.
+//
+// Parameters:
+//   - key:         environment variable name (already well-formed).
+//   - quotedValue: value wrapped by shellQuote.
+//
+// Returns:
+//   - string: one-line export statement with trailing newline.
+func posixSet(key, quotedValue string) string {
+	return fmt.Sprintf(shell.FormatPOSIXExport, key, quotedValue)
+}
+
+// posixUnset emits `unset KEY\n` for bash/zsh/sh. The value
+// argument is ignored (unset has no payload) but kept in the
+// signature to match the emitter type.
+//
+// Parameters:
+//   - key: environment variable name to clear.
+//   - _:   unused; kept for emitter-signature compatibility.
+//
+// Returns:
+//   - string: one-line unset statement with trailing newline.
+func posixUnset(key, _ string) string {
+	return fmt.Sprintf(shell.FormatPOSIXUnset, key)
+}
+
+// shellQuote wraps s in single quotes, escaping any embedded single
+// quote as close-escape-reopen (`'` followed by `\'` followed by `'`).
+// The resulting string is safe to paste into any POSIX-compatible
+// shell regardless of s's contents.
+//
+// Parameters:
+//   - s: raw value (typically a filesystem path).
+//
+// Returns:
+//   - string: single-quoted, escape-safe shell literal.
+func shellQuote(s string) string {
+	return shell.SingleQuote +
+		strings.ReplaceAll(s, shell.SingleQuote, shell.SingleQuoteEscaped) +
+		shell.SingleQuote
+}
diff --git a/internal/cli/activate/core/emit/types.go b/internal/cli/activate/core/emit/types.go
new file mode 100644
index 000000000..8c2102b67
--- /dev/null
+++ b/internal/cli/activate/core/emit/types.go
@@ -0,0 +1,12 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package emit
+
+// emitter produces a shell-specific one-line statement for the given
+// key and pre-quoted value, terminated by a newline. Concrete
+// emitters live in posix.go; the dispatch table is in emit.go.
+type emitter func(key, quotedValue string) string
diff --git a/internal/cli/activate/core/resolve/doc.go b/internal/cli/activate/core/resolve/doc.go
new file mode 100644
index 000000000..b8e4be886
--- /dev/null
+++ b/internal/cli/activate/core/resolve/doc.go
@@ -0,0 +1,26 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package resolve picks the `.context/` directory that `ctx
+// activate` should emit a shell export for. It is the ONE place
+// in the CLI that walks the filesystem during context resolution;
+// all other commands honor `CTX_DIR` or error via
+// [rc.RequireContextDir].
+//
+// [Selected] is the single entry point. It walks upward from CWD
+// via [rc.ScanCandidates] and returns:
+//
+//   - the **innermost** visible `.context/` (selected),
+//   - any **additional** candidates further up the path,
+//   - or [errActivate.NoCandidates] when the walk finds none.
+//
+// Multi-candidate is not an error: workspace-level shared
+// `.context/` dirs alongside per-project ones are a legitimate
+// nested-project layout. Innermost wins (matching git / make
+// behavior in nested layouts), and the additional candidates are
+// surfaced so callers can include them as informational comments
+// in eval-able output.
+package resolve
diff --git a/internal/cli/activate/core/resolve/internal.go b/internal/cli/activate/core/resolve/internal.go
new file mode 100644
index 000000000..a437a716f
--- /dev/null
+++ b/internal/cli/activate/core/resolve/internal.go
@@ -0,0 +1,49 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package resolve
+
+import (
+	"os"
+
+	errActivate "github.com/ActiveMemory/ctx/internal/err/activate"
+	"github.com/ActiveMemory/ctx/internal/rc"
+)
+
+// scan returns the innermost visible .context/ candidate from CWD
+// alongside any additional candidates further up the path. The
+// scan walks via [rc.ScanCandidates] (innermost-first); resolution
+// itself never walks outside this function.
+//
+// Multi-candidate behavior is "innermost wins, the rest are
+// reported." This matches what `git` and `make` do for nested
+// project layouts (innermost project owns the working directory)
+// and supports legitimate workspace-level shared `.context/` dirs
+// next to per-project ones; the previous "refuse on multi" rule
+// was overly conservative for that workflow. Callers receive the
+// full list of additional candidates so they can surface them as
+// informational comments in eval-able output without overriding
+// the bind.
+//
+// Returns:
+//   - string: absolute path of the innermost (selected) candidate.
+//   - []string: zero-or-more additional candidates further up the
+//     path, in the order [rc.ScanCandidates] returned them
+//     (closest-first). Nil when only one candidate is visible.
+//   - error: [errActivate.NoCandidates] when the upward walk finds
+//     no `.context/` directory at all. Other errors are surfaced
+//     for I/O failures (e.g., os.Getwd).
+func scan() (string, []string, error) {
+	cwd, cwdErr := os.Getwd()
+	if cwdErr != nil {
+		return "", nil, cwdErr
+	}
+	candidates := rc.ScanCandidates(cwd)
+	if len(candidates) == 0 {
+		return "", nil, errActivate.NoCandidates()
+	}
+	return candidates[0], candidates[1:], nil
+}
diff --git a/internal/cli/activate/core/resolve/resolve.go b/internal/cli/activate/core/resolve/resolve.go
new file mode 100644
index 000000000..67d53c937
--- /dev/null
+++ b/internal/cli/activate/core/resolve/resolve.go
@@ -0,0 +1,32 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package resolve
+
+// Selected returns the innermost visible .context/ directory
+// alongside any additional candidates further up the path.
+//
+// Single-source-anchor model
+// (specs/single-source-context-anchor.md): activation is always
+// project-local scan from CWD. The explicit-path mode that used
+// to accept an argument was removed.
+//
+// Multi-candidate is no longer an error: workspace-level shared
+// `.context/` dirs alongside per-project ones are a legitimate
+// nested-project layout. Innermost wins (matching `git` / `make`
+// behavior in nested layouts), and the additional candidates are
+// surfaced so callers can include them as informational comments
+// in eval-able output.
+//
+// Returns:
+//   - string: absolute path of the resolved .context/ directory.
+//   - []string: additional candidates further up the path, nil
+//     when only one is visible.
+//   - error: [errActivate.NoCandidates] when no `.context/` is
+//     visible from CWD upward.
+func Selected() (string, []string, error) {
+	return scan()
+}
diff --git a/internal/cli/activate/doc.go b/internal/cli/activate/doc.go
new file mode 100644
index 000000000..c505a109b
--- /dev/null
+++ b/internal/cli/activate/doc.go
@@ -0,0 +1,36 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package activate implements the `ctx activate` command.
+//
+// Activate is the shell-integration entry point under the
+// explicit-context-dir resolution model introduced in
+// specs/explicit-context-dir.md. The command scans upward from CWD
+// (or validates an explicit path argument) and emits a
+// shell-specific `export CTX_DIR=...` statement to stdout, intended
+// to be consumed via `eval "$(ctx activate)"`.
+//
+// Activate is the ONLY command in the CLI that walks the filesystem
+// during resolution. Every other command reads the declared
+// CTX_DIR / --context-dir or calls [rc.RequireContextDir] and errors
+// loudly when neither is set. Centralizing walk-up in activate keeps
+// silent-inference bugs confined to a single supervised entry point.
+//
+// # Subpackages
+//
+//	cmd/root : cobra command definition and resolution logic.
+//	core/emit: shell-specific emitters for bash/zsh/sh.
+//
+// # Behavior Summary
+//
+// Explicit path:   strict validation (exists, is a directory,
+//
+//	contains CONSTITUTION.md or TASKS.md); no --force.
+//
+// No args:         count-based resolution: emit when exactly one
+//
+//	candidate is visible; refuse on zero or many.
+package activate
diff --git a/internal/cli/activate/testmain_test.go b/internal/cli/activate/testmain_test.go
new file mode 100644
index 000000000..a41bdbb63
--- /dev/null
+++ b/internal/cli/activate/testmain_test.go
@@ -0,0 +1,22 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package activate_test
+
+import (
+	"os"
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/assets/read/lookup"
+)
+
+// TestMain initializes the embedded text-asset lookup so activate's
+// error factories (internal/err/activate.*) resolve their DescKey
+// messages instead of returning empty strings.
+func TestMain(m *testing.M) {
+	lookup.Init()
+	os.Exit(m.Run())
+}
diff --git a/internal/cli/add/add_test.go b/internal/cli/add/add_test.go
index 2a9cdc0f6..cb47d3ac7 100644
--- a/internal/cli/add/add_test.go
+++ b/internal/cli/add/add_test.go
@@ -13,6 +13,7 @@ import (
 	"testing"
 
 	"github.com/ActiveMemory/ctx/internal/cli/initialize"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 // TestAddCommand tests the add command.
@@ -29,6 +30,8 @@ func TestAddCommand(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -69,6 +72,8 @@ func TestAddDecisionAndLearning(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -216,6 +221,8 @@ func TestPrependOrder(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -336,6 +343,8 @@ func TestAddFromFile(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
diff --git a/internal/cli/add/cmd/coverage_test.go b/internal/cli/add/cmd/coverage_test.go
index 6a493892b..a826a743b 100644
--- a/internal/cli/add/cmd/coverage_test.go
+++ b/internal/cli/add/cmd/coverage_test.go
@@ -25,12 +25,14 @@ import (
 	errAdd "github.com/ActiveMemory/ctx/internal/err/add"
 	errFs "github.com/ActiveMemory/ctx/internal/err/fs"
 	"github.com/ActiveMemory/ctx/internal/inspect"
+	"github.com/ActiveMemory/ctx/internal/rc"
 	"github.com/spf13/cobra"
 
 	"github.com/ActiveMemory/ctx/internal/cli/initialize"
 	entryType "github.com/ActiveMemory/ctx/internal/config/entry"
 	"github.com/ActiveMemory/ctx/internal/entity"
 	"github.com/ActiveMemory/ctx/internal/entry"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 // ---------------------------------------------------------------------------
@@ -659,7 +661,12 @@ func TestWriteEntry_FileNotFound(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
-	// No .context/ directory, so files won't exist
+	// Declare a non-existent context dir so we hit "file not found"
+	// rather than "context directory not declared".
+	t.Setenv("CTX_DIR", filepath.Join(tmpDir, ".context"))
+	rc.Reset()
+	t.Cleanup(rc.Reset)
+
 	err := entry.Write(entity.EntryParams{
 		Type:    "task",
 		Content: "something",
@@ -684,6 +691,8 @@ func TestRun_UnknownType(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
 	if err := initCmd.Execute(); err != nil {
@@ -715,6 +724,8 @@ func TestRun_NoContent(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
 	if err := initCmd.Execute(); err != nil {
@@ -745,6 +756,8 @@ func TestRun_TaskWithPriority(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
 	if err := initCmd.Execute(); err != nil {
@@ -784,6 +797,8 @@ func TestRun_TaskWithSection(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
 	if err := initCmd.Execute(); err != nil {
diff --git a/internal/cli/add/cmd/doc.go b/internal/cli/add/cmd/doc.go
index d762d10fb..8cd925bdc 100644
--- a/internal/cli/add/cmd/doc.go
+++ b/internal/cli/add/cmd/doc.go
@@ -4,10 +4,38 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package cmd wires the cobra subcommands for ctx add.
+// Package cmd wires the cobra subcommands for the
+// "ctx add" command tree.
 //
-// It registers decision, learning, convention, and task subcommands
-// under the add parent, following the cmd/root + core taxonomy.
-// Each subcommand delegates to the shared [root.Run] function
-// with type-specific flags.
+// # Purpose
+//
+// This package registers the root add command and its
+// type-specific subcommands under a single parent.
+// Each subcommand delegates to [root.Run] with flags
+// that control which context file receives the entry.
+//
+// # Subcommand Registration
+//
+// The package imports and wires the root subcommand
+// from the root/ child package. The root command
+// accepts a positional type argument (task, decision,
+// learning, convention) and dispatches accordingly.
+//
+// # Entry Types
+//
+// Supported entry types and their target files:
+//
+//   - task       -> TASKS.md
+//   - decision   -> DECISIONS.md
+//   - learning   -> LEARNINGS.md
+//   - convention -> CONVENTIONS.md
+//
+// Both singular and plural forms are accepted.
+//
+// # Output
+//
+// On success the command prints a confirmation line
+// naming the file that was updated. When --share is
+// set it also publishes the entry to a connected
+// ctx Hub instance.
 package cmd
diff --git a/internal/cli/add/cmd/root/doc.go b/internal/cli/add/cmd/root/doc.go
index 320685b49..d7440208b 100644
--- a/internal/cli/add/cmd/root/doc.go
+++ b/internal/cli/add/cmd/root/doc.go
@@ -1,13 +1,39 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx add command.
+// Package root implements **`ctx add`**, the command
+// that adds a new entry (task / decision / learning /
+// convention) to the corresponding `.context/` file with
+// validated provenance, canonical formatting, and an
+// auto-updated index table.
 //
-// [Cmd] builds the cobra.Command with type-specific flags.
-// [Run] validates arguments, extracts content from args or
-// --from-file, formats the entry using core/format, and inserts
-// it into the target context file using core/insert.
+// # Public Surface
+//
+//   - **[Cmd]**: cobra command with the type
+//     selector (`-t task|decision|learning|convention`)
+//     plus type-specific flags (`--priority`,
+//     `--rationale`, `--consequence`, `--lesson`,
+//     `--branch`, `--commit`, `--session-id`,
+//     `--from-file`, `--application`, etc.).
+//   - **[Run]**: validates the supplied flags
+//     against the type's required-fields list,
+//     extracts content from positional args or
+//     `--from-file`, formats the entry via the
+//     `core/format` siblings, and inserts it via
+//     [internal/cli/add/core/insert].
+//
+// # Validation Boundaries
+//
+// All hard checks (required fields, secret patterns,
+// length limits, provenance requirements per
+// `.ctxrc`) live in [internal/entry] so the rules
+// are identical regardless of caller (CLI here, MCP
+// `ctx_add` tool elsewhere).
+//
+// # Concurrency
+//
+// Single-process, sequential.
 package root
diff --git a/internal/cli/add/cmd/root/run.go b/internal/cli/add/cmd/root/run.go
index 737724efc..cf588fd6e 100644
--- a/internal/cli/add/cmd/root/run.go
+++ b/internal/cli/add/cmd/root/run.go
@@ -23,6 +23,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/entry"
 	errAdd "github.com/ActiveMemory/ctx/internal/err/add"
 	"github.com/ActiveMemory/ctx/internal/hub"
+	"github.com/ActiveMemory/ctx/internal/rc"
 	"github.com/ActiveMemory/ctx/internal/trace"
 	writeAdd "github.com/ActiveMemory/ctx/internal/write/add"
 	writeConnect "github.com/ActiveMemory/ctx/internal/write/connect"
@@ -42,6 +43,10 @@ import (
 //   - error: Non-nil if content is missing, type is invalid, required flags
 //     are missing, or file operations fail
 func Run(cmd *cobra.Command, args []string, flags entity.AddConfig) error {
+	if _, ctxErr := rc.RequireContextDir(); ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	fType := strings.ToLower(args[0])
 
 	content, extractErr := extract.Content(args, flags)
@@ -81,12 +86,17 @@ func Run(cmd *cobra.Command, args []string, flags entity.AddConfig) error {
 
 	writeAdd.Added(cmd, fName)
 
+	stateDir, dirErr := state.Dir()
+	if dirErr != nil {
+		return dirErr
+	}
+
 	// Best-effort: publish to ctx Hub if --share is set.
 	if flags.Share {
 		pubEntry := hub.PublishEntry{
 			Type:    fType,
 			Content: content,
-			Origin:  filepath.Base(state.Dir()),
+			Origin:  filepath.Base(stateDir),
 		}
 		if pubErr := corePub.Run(
 			cmd, []hub.PublishEntry{pubEntry},
@@ -104,7 +114,7 @@ func Run(cmd *cobra.Command, args []string, flags entity.AddConfig) error {
 	// so the new entry is always #1 in file order. This coupling is
 	// intentional: if the prepend logic changes, this must be updated.
 	if fType == cfgEntry.Decision || fType == cfgEntry.Learning {
-		_ = trace.Record(fType+cfgTrace.RefFirstEntry, state.Dir())
+		_ = trace.Record(fType+cfgTrace.RefFirstEntry, stateDir)
 	}
 
 	return nil
diff --git a/internal/cli/add/core/doc.go b/internal/cli/add/core/doc.go
index 999ecf4cc..72a3e4e84 100644
--- a/internal/cli/add/core/doc.go
+++ b/internal/cli/add/core/doc.go
@@ -4,12 +4,61 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains shared helpers for the add command:
-// entry formatting, content extraction, section insertion, and
-// input normalization.
-//
-// Subpackages: entry (predicates), example (type-specific examples),
-// extract (content from args/file), format (Markdown formatting),
-// insert (section-aware file insertion), normalize (section name
-// resolution).
+// Package core contains business logic for the add command.
+//
+// This package is an umbrella for the subpackages that power
+// ctx add. It does not export functions itself; instead it
+// coordinates five concerns through its child packages:
+//
+// # Entry Classification
+//
+// The entry subpackage classifies user input into context
+// file types (task, decision, learning, convention) using
+// predicate functions such as FileTypeIsTask. It also
+// detects when a task description is complex enough to
+// warrant a spec nudge via NeedsSpec.
+//
+// # Content Extraction
+//
+// The extract subpackage resolves the entry body from one
+// of three sources in priority order: the --file flag, CLI
+// positional arguments, or piped stdin. It returns an error
+// when no source provides content.
+//
+// # Markdown Formatting
+//
+// The format subpackage renders each entry type into its
+// Markdown representation. Tasks become checkbox items with
+// provenance tags (session, branch, commit, timestamp).
+// Decisions and learnings become structured ADR-style
+// sections with context, rationale, and consequence fields.
+// Conventions become simple list items.
+//
+// # Section-Aware Insertion
+//
+// The insert subpackage places formatted entries into the
+// correct position within existing context files. Tasks
+// land before the first pending item or after an explicit
+// section header. Decisions and learnings insert in
+// reverse-chronological order before existing entries.
+// Conventions append at the end.
+//
+// # Section Normalization
+//
+// The normalize subpackage ensures user-provided section
+// names carry the correct Markdown heading prefix before
+// insertion.
+//
+// # Example Text
+//
+// The example subpackage loads type-specific usage examples
+// from embedded YAML assets for display in cobra help text.
+//
+// # Data Flow
+//
+// The cmd/ layer calls extract.Content to obtain text, then
+// entry predicates to choose a formatter from the format
+// package, and finally insert.AppendEntry to merge the
+// result into the target file. The write/ layer persists
+// the bytes to disk.
 package core
diff --git a/internal/cli/add/core/entry/doc.go b/internal/cli/add/core/entry/doc.go
index 08048f8fb..2f6ef2f4e 100644
--- a/internal/cli/add/core/entry/doc.go
+++ b/internal/cli/add/core/entry/doc.go
@@ -4,9 +4,40 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package entry provides type predicates for context file entries.
+// Package entry provides type predicates and complexity
+// detection for context file entries used by the add
+// command.
 //
-// [FileTypeIsTask], [FileTypeIsDecision], and [FileTypeIsLearning]
-// check whether a file type string matches the corresponding entry
-// kind. Used by the add command to apply type-specific formatting.
+// # Type Predicates
+//
+// Three predicate functions classify a user-supplied file
+// type string into its canonical entry kind:
+//
+//   - [FileTypeIsTask] returns true when the input
+//     resolves to a task entry (e.g. "task", "tasks").
+//   - [FileTypeIsDecision] returns true for decision
+//     entries (e.g. "decision", "decisions").
+//   - [FileTypeIsLearning] returns true for learning
+//     entries (e.g. "learning", "learnings").
+//
+// Each function delegates to config/entry.FromUserInput
+// for alias resolution, so callers never deal with raw
+// string matching.
+//
+// # Spec Nudge Detection
+//
+// [NeedsSpec] inspects task content to decide whether the
+// add command should suggest creating a feature spec. It
+// fires when the text exceeds the length threshold from
+// .ctxrc (rc.SpecNudgeMinLen) or contains any of the
+// design-signal words configured via rc.SpecSignalWords.
+// The check is case-insensitive.
+//
+// # Data Flow
+//
+// The cmd/ layer passes the user-provided type string to
+// these predicates to select the correct formatter in the
+// format subpackage and the correct insertion strategy in
+// the insert subpackage. NeedsSpec is called after content
+// extraction to optionally emit a nudge message.
 package entry
diff --git a/internal/cli/add/core/example/doc.go b/internal/cli/add/core/example/doc.go
index 2bb7b67ee..44de731ae 100644
--- a/internal/cli/add/core/example/doc.go
+++ b/internal/cli/add/core/example/doc.go
@@ -4,10 +4,26 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package example provides type-specific usage examples for the
-// add command help text.
+// Package example provides type-specific usage examples
+// for the add command help text.
 //
-// [ForType] returns a formatted example string for the given entry
-// type (decision, learning, task, convention), displayed in the
-// cobra command's Example field.
+// # ForType
+//
+// [ForType] returns a formatted example string for a given
+// entry type such as "decision", "task", "learning", or
+// "convention". The examples are loaded from the embedded
+// commands.yaml asset via the desc package.
+//
+// The lookup key is formed by prefixing the entry type with
+// the add-command example key prefix defined in
+// config/embed/cmd. When the type is unrecognized or the
+// key is missing, ForType falls back to a generic default
+// example keyed by the default suffix.
+//
+// # Usage
+//
+// The cmd/ layer calls ForType during cobra command setup
+// to populate the Example field of each add subcommand.
+// This keeps example text centralized in YAML rather than
+// scattered across Go source files.
 package example
diff --git a/internal/cli/add/core/extract/doc.go b/internal/cli/add/core/extract/doc.go
index e83073a4a..dd0906c94 100644
--- a/internal/cli/add/core/extract/doc.go
+++ b/internal/cli/add/core/extract/doc.go
@@ -4,10 +4,39 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package extract reads entry content from command arguments or
-// from a file specified by --from-file.
+// Package extract reads entry content from the available
+// input sources for the add command.
 //
-// [Content] joins positional arguments into a single string, or
-// reads the file path from flags. Returns an error if both sources
-// are empty or if the file cannot be read.
+// # Content Resolution
+//
+// [Content] resolves the entry body by checking three
+// sources in strict priority order:
+//
+//  1. File flag: when AddConfig.FromFile is set, the
+//     file is read via io.SafeReadUserFile and its
+//     trimmed contents are returned.
+//  2. Positional arguments: when args has more than
+//     one element, args[1:] are joined with a space
+//     separator.
+//  3. Piped stdin: when stdin is not a terminal
+//     (character device), all lines are read with a
+//     bufio.Scanner and joined with newlines.
+//
+// If none of the sources produce content, Content returns
+// an errAdd.NoContent error so the cmd/ layer can display
+// a usage hint.
+//
+// # Error Handling
+//
+// File read failures surface as errFs.FileRead errors
+// that include the original path. Stdin scanner errors
+// surface as errFs.StdinRead. Both wrap the underlying
+// OS error for inspection.
+//
+// # Data Flow
+//
+// The cmd/ layer calls Content early in the add pipeline.
+// The returned string is then passed to a formatter in
+// the format subpackage and finally to insert.AppendEntry
+// for placement into the target context file.
 package extract
diff --git a/internal/cli/add/core/format/doc.go b/internal/cli/add/core/format/doc.go
index 28ddb4120..e452ba5cd 100644
--- a/internal/cli/add/core/format/doc.go
+++ b/internal/cli/add/core/format/doc.go
@@ -4,11 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package format renders context entries into Markdown with
-// structured sections.
+// Package format renders context entries into their
+// Markdown representation for the add command.
 //
-// Each entry type has its own formatter: [Task] adds priority
-// labels, [Decision] adds Context/Rationale/Consequence sections,
-// [Learning] adds Context/Lesson/Application sections, and
-// [Convention] wraps content with a timestamp header.
+// # Entry Formatters
+//
+// Each entry type has a dedicated formatter that returns
+// a ready-to-insert Markdown string:
+//
+//   - [Task] produces a checkbox item with provenance
+//     tags. The output includes optional priority, the
+//     truncated session ID, git branch, commit hash, and
+//     a compact timestamp. Tags use the template strings
+//     from the tpl package.
+//   - [Decision] produces a structured ADR section with
+//     a timestamped heading, status marker, and three
+//     subsections: Context, Rationale, and Consequence.
+//   - [Learning] produces a structured section with a
+//     timestamped heading and three subsections: Context,
+//     Lesson, and Application.
+//   - [Convention] produces a simple Markdown list item
+//     prefixed with "- ".
+//
+// # Provenance Helpers
+//
+// Two unexported helpers support the Task formatter:
+//
+//   - truncateSessionID shortens a UUID to ShortIDLen
+//     characters, defaulting to "unknown" when empty.
+//   - defaultProvenance returns a value or "unknown"
+//     when empty, used for branch and commit fields.
+//
+// # Data Flow
+//
+// The cmd/ layer selects a formatter based on the entry
+// type predicates in the entry subpackage, passes the
+// user-supplied content and metadata, and hands the
+// resulting string to insert.AppendEntry for placement
+// in the target context file.
 package format
diff --git a/internal/cli/add/core/insert/doc.go b/internal/cli/add/core/insert/doc.go
index 00ff022ab..5f8b709dc 100644
--- a/internal/cli/add/core/insert/doc.go
+++ b/internal/cli/add/core/insert/doc.go
@@ -1,15 +1,46 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package insert handles section-aware insertion of entries into
-// context files.
-//
-// [AppendEntry] is the main entry point — it reads the target file,
-// finds the correct insertion point, and writes the updated content.
-// [AfterHeader] inserts below a specific heading, [Task] handles
-// task-specific logic (phase sections), and [AppendAtEnd] adds to
-// the file bottom as a fallback.
+// Package insert handles **section-aware insertion** of
+// new entries into context files, picking the right
+// location inside the target file (under the matching
+// Phase header, after the latest entry of the same type,
+// or at the file bottom as a fallback) instead of just
+// appending blindly.
+//
+// The package is the why-`ctx add` knows-where-to-put-
+// things engine. Without it every add would dump at the
+// bottom and tasks would lose their phase grouping.
+//
+// # Public Surface
+//
+//   - **[AppendEntry](file, entry, opts)**: top-level
+//     entry point. Reads `file`, decides where to
+//     insert based on `opts.Type` and `opts.Phase`,
+//     writes the result back.
+//   - **[AfterHeader](lines, header, content)**:
+//     pure helper: insert `content` immediately after
+//     `header` (or at the end of `header`'s
+//     section, depending on the rule). Returns the
+//     new line slice.
+//   - **[Task](lines, entry, phase)**: task-specific
+//     placement: finds the right Phase header (per
+//     CONSTITUTION, tasks must stay in their Phase
+//     forever) and inserts under it.
+//   - **[AppendAtEnd](lines, content)**: fallback
+//     when no smarter location can be inferred.
+//
+// # Constitutional Honors
+//
+// The TASKS.md rule "tasks stay in their Phase
+// section permanently" is enforced here by
+// [Task]: a new task always gets the explicit Phase
+// header it was added under, never floats free.
+//
+// # Concurrency
+//
+// Filesystem-bound. Sequential within a single call.
 package insert
diff --git a/internal/cli/add/core/normalize/doc.go b/internal/cli/add/core/normalize/doc.go
index 061d6e8ee..c8f867d46 100644
--- a/internal/cli/add/core/normalize/doc.go
+++ b/internal/cli/add/core/normalize/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package normalize resolves and canonicalizes section names for
-// the add command.
+// Package normalize resolves and canonicalizes section
+// names for the add command.
 //
-// [TargetSection] maps user-provided section names (including
-// common aliases and abbreviations) to canonical section headers
-// used in TASKS.md and other context files.
+// # TargetSection
+//
+// [TargetSection] ensures a user-provided section name
+// carries the correct Markdown heading prefix. If the
+// input does not already start with "## ", the function
+// prepends it. The heading prefix is read from the
+// token.HeadingLevelThreeStart constant.
+//
+// Callers must not pass an empty string. The empty case
+// is handled by insert.Task before this function is
+// reached, so TargetSection always receives a non-empty
+// section name.
+//
+// # Usage Example
+//
+// A user runs:
+//
+//	ctx add task "fix tests" --section "Phase 1"
+//
+// The insert subpackage calls TargetSection("Phase 1")
+// which returns "## Phase 1". The insert logic then
+// searches for that heading in TASKS.md and places the
+// new task below it.
+//
+// # Data Flow
+//
+// The insert subpackage is the sole caller. It invokes
+// TargetSection inside TaskAfterSection before scanning
+// the file content for the heading. This keeps heading
+// format knowledge centralized in one function.
 package normalize
diff --git a/internal/cli/add/doc.go b/internal/cli/add/doc.go
index 7c80f31b7..f6d900308 100644
--- a/internal/cli/add/doc.go
+++ b/internal/cli/add/doc.go
@@ -4,24 +4,35 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package add provides the "ctx add" command for appending entries to context
-// files.
+// Package add provides the "ctx add" command for appending
+// entries to context files.
 //
-// It supports adding decisions, tasks, learnings, and conventions to their
-// respective files in the .context/ directory. Content can be provided via
-// command argument, --file flag, or stdin pipe.
+// The add command is the primary write interface for
+// populating .context/ files. It accepts content via
+// positional argument, --file flag, or stdin pipe and
+// routes entries to the appropriate file based on the
+// entry type argument.
 //
-// Supported entry types (defined in [config.FileType]):
-//   - decision/decisions: Appends to DECISIONS.md
-//   - task/tasks: Inserts into TASKS.md before first unchecked task,
-//     or under a named section when --section is provided
-//   - learning/learnings: Appends to LEARNINGS.md
-//   - convention/conventions: Appends to CONVENTIONS.md
+// # Supported Entry Types
 //
-// Example usage:
+// Entry types map to [config.FileType] values:
 //
-//	ctx add decision "Use PostgreSQL for primary database"
-//	ctx add task "Implement auth" --priority high --section "Phase 1"
+//   - decision / decisions: appends to DECISIONS.md
+//   - task / tasks: inserts into TASKS.md before the
+//     first unchecked item, or under a named section
+//     when --section is provided
+//   - learning / learnings: appends to LEARNINGS.md
+//   - convention / conventions: appends to CONVENTIONS.md
+//
+// # Example Usage
+//
+//	ctx add decision "Use PostgreSQL for primary DB"
+//	ctx add task "Implement auth" --section "Phase 1"
 //	ctx add learning --file notes.md
 //	echo "Use camelCase" | ctx add convention
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
+//   - core: file-type routing and content insertion logic
 package add
diff --git a/internal/cli/agent/agent_test.go b/internal/cli/agent/agent_test.go
index 485334f66..812256893 100644
--- a/internal/cli/agent/agent_test.go
+++ b/internal/cli/agent/agent_test.go
@@ -11,6 +11,7 @@ import (
 	"testing"
 
 	"github.com/ActiveMemory/ctx/internal/cli/initialize"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 // TestAgentCommand tests the agent command.
@@ -27,6 +28,8 @@ func TestAgentCommand(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -57,6 +60,8 @@ func TestAgentJSONOutput(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
diff --git a/internal/cli/agent/cmd/root/cmd.go b/internal/cli/agent/cmd/root/cmd.go
index 432df3258..44099c2c5 100644
--- a/internal/cli/agent/cmd/root/cmd.go
+++ b/internal/cli/agent/cmd/root/cmd.go
@@ -56,6 +56,11 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyAgent),
 		RunE: func(cmd *cobra.Command, args []string) error {
+			ctxDir, ctxErr := rc.RequireContextDir()
+			if ctxErr != nil {
+				cmd.SilenceUsage = true
+				return ctxErr
+			}
 			if !cmd.Flags().Changed(cFlag.Budget) {
 				budget = rc.TokenBudget()
 			}
@@ -73,10 +78,15 @@ func Cmd() *cobra.Command {
 				skillBody = sk
 			}
 
-			// Tier 8: Load ctx Hub entries.
+			// Tier 8: Load ctx Hub entries using the already-resolved
+			// ctxDir from the top-level RequireContextDir gate.
 			var sharedBodies []string
 			if includeShare {
-				sharedBodies = coreHub.LoadBodies()
+				var hubErr error
+				sharedBodies, hubErr = coreHub.LoadBodies(ctxDir)
+				if hubErr != nil {
+					return hubErr
+				}
 			}
 
 			return Run(
diff --git a/internal/cli/agent/cmd/root/doc.go b/internal/cli/agent/cmd/root/doc.go
index 2363bd5c1..1a1f723e6 100644
--- a/internal/cli/agent/cmd/root/doc.go
+++ b/internal/cli/agent/cmd/root/doc.go
@@ -1,13 +1,45 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx agent command for generating
-// AI-ready context packets.
+// Package root implements **`ctx agent`**, the command
+// that produces an AI-ready, token-budgeted context
+// packet for injection into the next prompt.
 //
-// [Cmd] builds the cobra.Command with --budget, --format, and
-// --json flags. [Run] loads context, assembles a budget-aware
-// packet via core/budget, and renders it as Markdown or JSON.
+// `ctx agent` is the most-called user-facing command in
+// production: tool integrations (Claude Code's
+// `PreToolUse` hook, Copilot CLI's session-start hook,
+// the Cursor MCP server) all invoke it on every prompt
+// to assemble what the AI sees.
+//
+// # Public Surface
+//
+//   - **[Cmd]**: cobra command with `--budget N`
+//     (default 8000), `--format markdown|json`,
+//     `--include-hub`, and the `--prompt `
+//     companion that lets the budget allocator score
+//     for relevance against the user's actual prompt.
+//   - **[Run]**: loads context via
+//     [internal/context/load], optionally folds in
+//     hub entries (`--include-hub`), assembles the
+//     packet via [internal/cli/agent/core/budget],
+//     scores entries via
+//     [internal/cli/agent/core/score], and renders
+//     to stdout.
+//
+// # Performance
+//
+// The whole call typically completes in 50-150 ms on
+// a project with hundreds of entries. The cost is
+// dominated by file IO (the per-file token estimator
+// is fast), which is why
+// [internal/context/load] reads the smallest set of
+// files needed and the budget allocator stops as
+// soon as the budget is exhausted.
+//
+// # Concurrency
+//
+// Single-process, sequential.
 package root
diff --git a/internal/cli/agent/cmd/root/run.go b/internal/cli/agent/cmd/root/run.go
index a55796c77..ba952818b 100644
--- a/internal/cli/agent/cmd/root/run.go
+++ b/internal/cli/agent/cmd/root/run.go
@@ -50,7 +50,11 @@ func Run(
 	skillBody string,
 	hubBodies []string,
 ) error {
-	if coreCooldown.Active(session, cooldown) {
+	active, cooldownErr := coreCooldown.Active(session, cooldown)
+	if cooldownErr != nil {
+		return cooldownErr
+	}
+	if active {
 		return nil
 	}
 
@@ -76,10 +80,14 @@ func Run(
 			hubBodies,
 		)
 	}
-
-	if outputErr == nil {
-		coreCooldown.TouchTombstone(session)
+	if outputErr != nil {
+		return outputErr
 	}
 
-	return outputErr
+	// Output succeeded: persist the tombstone so subsequent
+	// invocations inside the cooldown window stay silent. A
+	// failure here (disk full, permission denied) is a rare
+	// edge case we surface rather than swallow: without the
+	// marker the next run will not suppress.
+	return coreCooldown.TouchTombstone(session)
 }
diff --git a/internal/cli/agent/core/budget/doc.go b/internal/cli/agent/core/budget/doc.go
index 1b5cfc462..1f9c5c15f 100644
--- a/internal/cli/agent/core/budget/doc.go
+++ b/internal/cli/agent/core/budget/doc.go
@@ -1,16 +1,76 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package budget implements the token-budgeted context assembly
-// algorithm for the agent command.
-//
-// [AssemblePacket] allocates tokens across seven tiers (constitution,
-// tasks, conventions, decisions, learnings, steering, skill). [Split] divides
-// remaining budget between two scored sections. [FillSection]
-// applies two-tier degradation: full entries then title-only
-// summaries. [FitItems] and [EstimateSliceTokens] handle
-// per-item token accounting.
+// Package budget implements the **token-budgeted context
+// assembly algorithm** behind `ctx agent`. Given a token budget
+// (`--budget N`, default 8000) and a loaded [entity.Context],
+// it produces an AI-ready packet that maximizes information
+// density without exceeding the budget. This is the single most
+// performance-sensitive operation in ctx because it runs at
+// the head of every prompt in tool integrations that use the
+// hook+MCP pipeline.
+//
+// # The Seven-Tier Allocation
+//
+// [AssemblePacket] walks the seven content tiers in priority
+// order, each with its own share of the budget:
+//
+//  1. **CONSTITUTION**: always full; never truncated.
+//  2. **TASKS**: current and pending work.
+//  3. **CONVENTIONS**: coding patterns the AI must follow.
+//  4. **DECISIONS**: index table, then full entries as
+//     budget permits.
+//  5. **LEARNINGS**: same shape as decisions.
+//  6. **STEERING**: matched files for this prompt.
+//  7. **SKILL**: bundled instructions if a skill matched.
+//
+// Lower tiers see whatever budget the higher tiers leave
+// behind. The constitution invariant ("context loading is the
+// first step of every session") translates into "the
+// constitution is always in the packet, no exceptions".
+//
+// # Two-Tier Degradation
+//
+// [FillSection] handles the per-section degradation: when full
+// entries do not fit, it **falls back to title-only summaries**
+// (the index-table form) so the AI still sees that an entry
+// exists and can request it by ID. The degradation point is
+// chosen to maximize the count of entries the AI sees,
+// trading depth for breadth.
+//
+// # Splitting Between Two Sections
+//
+// [Split] divides the remaining budget between two scored
+// sections (typically DECISIONS vs LEARNINGS) using a
+// score-weighted ratio: a section with twice the relevance
+// score gets twice the budget share. Score comes from
+// [internal/cli/agent/core/score]; budget enforces.
+//
+// # Token Accounting
+//
+// [EstimateSliceTokens] is the rough-but-stable estimator
+// used throughout: ~4 chars per token for English Markdown,
+// with adjustments for code-fence-heavy content. It is not
+// the exact count the model will see but is consistent
+// enough to keep the assembled packet under budget.
+// [FitItems] is the greedy item picker: takes the highest-
+// scored items first, stops when the next one would push
+// the running total over budget.
+//
+// # Render Path
+//
+// [render.go] formats the assembled tiers into the final
+// markdown packet with section headers, separators, and
+// the read-order preamble the AI uses to navigate the
+// content. [out.go] writes the packet to stdout (or to the
+// MCP response, depending on caller).
+//
+// # Concurrency
+//
+// All functions are pure data transformations over the
+// loaded context. Concurrent callers never race; the
+// algorithm holds no module-level state.
 package budget
diff --git a/internal/cli/agent/core/cooldown/cooldown.go b/internal/cli/agent/core/cooldown/cooldown.go
index 3de0d182e..b10ee3592 100644
--- a/internal/cli/agent/core/cooldown/cooldown.go
+++ b/internal/cli/agent/core/cooldown/cooldown.go
@@ -7,6 +7,7 @@
 package cooldown
 
 import (
+	"errors"
 	"os"
 	"path/filepath"
 	"time"
@@ -14,9 +15,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/config/agent"
 	"github.com/ActiveMemory/ctx/internal/config/dir"
 	"github.com/ActiveMemory/ctx/internal/config/fs"
-	"github.com/ActiveMemory/ctx/internal/config/warn"
 	ctxIo "github.com/ActiveMemory/ctx/internal/io"
-	ctxLog "github.com/ActiveMemory/ctx/internal/log/warn"
 	"github.com/ActiveMemory/ctx/internal/rc"
 )
 
@@ -28,16 +27,33 @@ import (
 //   - cooldown: duration to suppress repeated output
 //
 // Returns:
-//   - bool: true if tombstone exists and is within the cooldown window
-func Active(session string, cooldown time.Duration) bool {
+//   - bool: true when the tombstone exists and is within the cooldown
+//     window. Always false when cooldown is disabled for this call
+//     (empty session or non-positive cooldown) or when no tombstone
+//     has ever been written.
+//   - error: [os.ErrNotExist] is treated as a legitimate "not active"
+//     exit condition and NOT returned. Any other failure (context
+//     directory undeclared, permission denied, I/O failure) is
+//     surfaced so callers do not silently treat it as "not active"
+//     and emit output they meant to suppress.
+func Active(session string, cooldown time.Duration) (bool, error) {
 	if session == "" || cooldown <= 0 {
-		return false
+		return false, nil
 	}
-	info, err := os.Stat(TombstonePath(session))
-	if err != nil {
-		return false
+	path, pathErr := TombstonePath(session)
+	if pathErr != nil {
+		return false, pathErr
+	}
+	info, statErr := os.Stat(path)
+	if statErr != nil {
+		if errors.Is(statErr, os.ErrNotExist) {
+			// No prior emission; legitimately not active.
+			return false, nil
+		}
+		// Permission denied, I/O failure, etc.: surface.
+		return false, statErr
 	}
-	return time.Since(info.ModTime()) < cooldown
+	return time.Since(info.ModTime()) < cooldown, nil
 }
 
 // TouchTombstone creates or updates the tombstone file for the given
@@ -45,14 +61,22 @@ func Active(session string, cooldown time.Duration) bool {
 //
 // Parameters:
 //   - session: session identifier (typically the caller's PID)
-func TouchTombstone(session string) {
+//
+// Returns:
+//   - error: nil on an empty session (no-op). Non-nil when the
+//     tombstone path cannot be resolved or the file cannot be
+//     written. Callers decide whether a persistence failure
+//     warrants aborting the command; this helper no longer
+//     logs and swallows on its own.
+func TouchTombstone(session string) error {
 	if session == "" {
-		return
+		return nil
 	}
-	p := TombstonePath(session)
-	if writeErr := ctxIo.SafeWriteFile(p, nil, fs.PermSecret); writeErr != nil {
-		ctxLog.Warn(warn.Write, p, writeErr)
+	p, pathErr := TombstonePath(session)
+	if pathErr != nil {
+		return pathErr
 	}
+	return ctxIo.SafeWriteFile(p, nil, fs.PermSecret)
 }
 
 // TombstonePath returns the filesystem path for a session's tombstone.
@@ -61,14 +85,22 @@ func TouchTombstone(session string) {
 //   - session: session identifier
 //
 // Returns:
-//   - string: absolute path in the system temp directory
-func TombstonePath(session string) string {
-	stateDir := filepath.Join(rc.ContextDir(), dir.State)
-	mkdirErr := ctxIo.SafeMkdirAll(
+//   - string: absolute path under the context state directory.
+//   - error: non-nil when the context directory is not declared or
+//     the state directory cannot be created. Previously this helper
+//     logged the mkdir error and returned the path anyway, guaranteeing
+//     a second failure on the subsequent write; propagating keeps the
+//     first failure authoritative.
+func TombstonePath(session string) (string, error) {
+	ctxDir, err := rc.ContextDir()
+	if err != nil {
+		return "", err
+	}
+	stateDir := filepath.Join(ctxDir, dir.State)
+	if mkdirErr := ctxIo.SafeMkdirAll(
 		stateDir, fs.PermRestrictedDir,
-	)
-	if mkdirErr != nil {
-		ctxLog.Warn(warn.Mkdir, stateDir, mkdirErr)
+	); mkdirErr != nil {
+		return "", mkdirErr
 	}
-	return filepath.Join(stateDir, agent.TombstonePrefix+session)
+	return filepath.Join(stateDir, agent.TombstonePrefix+session), nil
 }
diff --git a/internal/cli/agent/core/cooldown/doc.go b/internal/cli/agent/core/cooldown/doc.go
index 80f727f35..3bbcb928d 100644
--- a/internal/cli/agent/core/cooldown/doc.go
+++ b/internal/cli/agent/core/cooldown/doc.go
@@ -4,11 +4,45 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package cooldown prevents redundant context loads within a
-// single session.
+// Package cooldown prevents redundant context emissions
+// within a single agent session.
 //
-// [Active] checks whether a cooldown tombstone exists and is
-// recent enough. [TouchTombstone] creates or refreshes the
-// tombstone file. [TombstonePath] returns the state file path
-// for a given session ID.
+// When the agent command outputs a context packet, it
+// writes a tombstone file to mark the emission time. On
+// subsequent invocations within the same session, the
+// cooldown check skips output if the tombstone is still
+// fresh, avoiding noise in short-lived tool loops.
+//
+// # Active
+//
+// [Active] checks whether the cooldown tombstone for a
+// given session identifier exists and was modified within
+// the specified duration. It returns false when the
+// session string is empty, the duration is non-positive,
+// or the tombstone file is missing or stale.
+//
+// # TouchTombstone
+//
+// [TouchTombstone] creates or updates the tombstone file
+// for a session. It writes an empty file with restricted
+// permissions (fs.PermSecret) to the state directory
+// under .context/state/. Write failures are logged as
+// warnings but do not propagate errors, so the agent
+// command never fails due to cooldown bookkeeping.
+//
+// # TombstonePath
+//
+// [TombstonePath] returns the absolute filesystem path
+// for a session's tombstone. The file is placed in the
+// .context/state/ directory with a prefix defined by
+// agent.TombstonePrefix. The state directory is created
+// on demand with restricted permissions.
+//
+// # Data Flow
+//
+// The agent command's Run function checks Active before
+// assembling a context packet. If Active returns true,
+// Run exits early. Otherwise it builds the packet,
+// emits it, and calls TouchTombstone to start the
+// cooldown window.
 package cooldown
diff --git a/internal/cli/agent/core/doc.go b/internal/cli/agent/core/doc.go
index 99c4db4cd..f6e3d27bf 100644
--- a/internal/cli/agent/core/doc.go
+++ b/internal/cli/agent/core/doc.go
@@ -1,12 +1,66 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core provides helper functions for the agent command.
+// Package core contains business logic for the agent
+// command, which assembles an AI-optimized context packet
+// from project files.
 //
-// This package contains budget allocation, scoring, cooldown management,
-// content extraction, and output rendering logic used by the agent
-// command's Run function.
+// This package is an umbrella that coordinates six
+// subpackages, each handling one aspect of the context
+// assembly pipeline:
+//
+// # Budget Allocation (budget/)
+//
+// The budget subpackage parses the --budget flag, splits
+// the token budget across context sections using priority
+// weights, and renders the final packet with truncation
+// when content exceeds the allocation.
+//
+// # Cooldown Management (cooldown/)
+//
+// The cooldown subpackage prevents redundant emissions in
+// rapid tool loops by maintaining per-session tombstone
+// files with a configurable time-to-live.
+//
+// # Content Extraction (extract/)
+//
+// The extract subpackage pulls structured items from
+// context files: bullet items, checkbox tasks (checked
+// and unchecked), constitution rules, and active tasks
+// from TASKS.md.
+//
+// # Hub Content (hub/)
+//
+// The hub subpackage loads shared knowledge from the
+// .context/hub/ directory, where files received from a
+// ctx Hub instance are stored.
+//
+// # File Ordering (sort/)
+//
+// The sort subpackage determines the read order for
+// context files based on the priority sequence defined
+// in config, filtering out empty files.
+//
+// # Steering and Skills (steering/)
+//
+// The steering subpackage loads steering files filtered
+// by the current tool and resolves named skills from
+// the .context/skills/ directory.
+//
+// # Score (score/)
+//
+// The score subpackage evaluates context completeness by
+// checking which context files exist and are populated,
+// producing a numeric health score.
+//
+// # Data Flow
+//
+// The cmd/ layer calls into these subpackages to build
+// the context packet: sort determines file order, extract
+// pulls items, budget allocates space, steering adds
+// instructions, and the result is rendered to stdout.
+// Cooldown gates the entire pipeline.
 package core
diff --git a/internal/cli/agent/core/extract/doc.go b/internal/cli/agent/core/extract/doc.go
index a61e2fb4a..e9dfe234f 100644
--- a/internal/cli/agent/core/extract/doc.go
+++ b/internal/cli/agent/core/extract/doc.go
@@ -4,11 +4,47 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package extract pulls structured items from context file content.
+// Package extract pulls structured items from context
+// file content for inclusion in agent context packets.
 //
-// [BulletItems] extracts markdown list items up to a limit.
-// [CheckboxItems] extracts task checkboxes. [UncheckedTasks]
-// returns only pending tasks. [ActiveTasks] combines unchecked
-// tasks from the loaded context. [ConstitutionRules] extracts
-// inviolable rules from CONSTITUTION.md.
+// # Bullet Extraction
+//
+// [BulletItems] parses Markdown content and returns up
+// to a caller-specified limit of bullet list items. It
+// strips the "- " prefix, skips empty items and lines
+// that start with "#" (headers). The regex pattern comes
+// from config/regex.BulletItem.
+//
+// # Checkbox Extraction
+//
+// [CheckboxItems] extracts text from both checked and
+// unchecked Markdown checkbox items ("- [x]" and
+// "- [ ]"). It delegates to config/regex.Task for
+// pattern matching and task.Content for field extraction.
+//
+// # Unchecked Task Filtering
+//
+// [UncheckedTasks] returns only pending tasks (those
+// matching "- [ ]") with the checkbox prefix preserved
+// for display. It uses regex.TaskMultiline to handle
+// multi-line task bodies and task.Pending to filter.
+//
+// # Context-Aware Helpers
+//
+// Two convenience functions operate on a loaded Context:
+//
+//   - [ActiveTasks] extracts unchecked tasks from the
+//     TASKS.md file in the context.
+//   - [ConstitutionRules] extracts checkbox items from
+//     CONSTITUTION.md for inclusion as inviolable rules.
+//
+// Both return nil when the target file is absent.
+//
+// # Data Flow
+//
+// The budget subpackage calls these functions to populate
+// individual sections of the context packet. BulletItems
+// feeds into decisions, learnings, and conventions.
+// ActiveTasks feeds into the tasks section. Constitution
+// rules are emitted first as hard constraints.
 package extract
diff --git a/internal/cli/agent/core/hub/doc.go b/internal/cli/agent/core/hub/doc.go
index 524814a44..f3e71e63f 100644
--- a/internal/cli/agent/core/hub/doc.go
+++ b/internal/cli/agent/core/hub/doc.go
@@ -1,13 +1,36 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package hub loads ctx Hub entries from
+// Package hub loads shared knowledge files from
 // .context/hub/ for inclusion in agent context packets.
 //
-// Key exports: [LoadBodies].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # LoadBodies
+//
+// [LoadBodies] reads all Markdown files from the hub
+// directory and returns their contents as a string
+// slice. It silently skips directories, non-Markdown
+// files, empty files, and files that fail to read.
+//
+// The hub directory is resolved as a subdirectory of
+// rc.ContextDir() using the path defined in
+// cfgHub.DirHub. When the directory does not exist,
+// LoadBodies returns nil, making shared knowledge
+// entirely opt-in.
+//
+// # File Reading
+//
+// Each file is read through io.SafeReadUserFile, which
+// enforces size limits and symlink safety. Only files
+// with the .md extension (file.ExtMarkdown) are
+// considered.
+//
+// # Data Flow
+//
+// The budget subpackage calls LoadBodies during context
+// assembly to append shared knowledge sections to the
+// agent packet. Hub content is additive and does not
+// displace project-local context files.
 package hub
diff --git a/internal/cli/agent/core/hub/load.go b/internal/cli/agent/core/hub/load.go
index a0ae7be48..b2c85ae0b 100644
--- a/internal/cli/agent/core/hub/load.go
+++ b/internal/cli/agent/core/hub/load.go
@@ -14,22 +14,37 @@ import (
 	"github.com/ActiveMemory/ctx/internal/config/file"
 	cfgHub "github.com/ActiveMemory/ctx/internal/config/hub"
 	"github.com/ActiveMemory/ctx/internal/io"
-	"github.com/ActiveMemory/ctx/internal/rc"
 )
 
 // LoadBodies reads all markdown files from .context/hub/
 // and returns their contents as strings.
 //
-// Returns nil if the shared directory does not exist or is
-// empty (shared knowledge is opt-in).
+// ctxDir is supplied by the caller so this function does not
+// re-resolve it; the caller decides whether "no context dir" is
+// benign and handles it before invoking us.
+//
+// Any directory read failure (including a missing hub directory)
+// is propagated so the caller can surface it. [LoadBodies] is only
+// invoked when the user explicitly requested shared content (e.g.
+// `ctx agent --include-share`); telling them "everything is fine,
+// here's an empty list" when the hub directory does not exist hides
+// a real setup gap.
+//
+// Per-file read failures inside an existing hub directory are still
+// tolerated silently. One unreadable sibling should not blank the
+// rest.
+//
+// Parameters:
+//   - ctxDir: absolute path to the context directory
 //
 // Returns:
 //   - []string: file contents, one per shared file
-func LoadBodies() []string {
-	dir := filepath.Join(rc.ContextDir(), cfgHub.DirHub)
+//   - error: non-nil on any directory read failure
+func LoadBodies(ctxDir string) ([]string, error) {
+	dir := filepath.Join(ctxDir, cfgHub.DirHub)
 	entries, readErr := os.ReadDir(dir)
 	if readErr != nil {
-		return nil
+		return nil, readErr
 	}
 
 	var bodies []string
@@ -48,5 +63,5 @@ func LoadBodies() []string {
 		}
 		bodies = append(bodies, string(data))
 	}
-	return bodies
+	return bodies, nil
 }
diff --git a/internal/cli/agent/core/score/doc.go b/internal/cli/agent/core/score/doc.go
index d5ad96275..f01b8d977 100644
--- a/internal/cli/agent/core/score/doc.go
+++ b/internal/cli/agent/core/score/doc.go
@@ -1,15 +1,56 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package score computes relevance scores for context entries to
-// prioritize budget allocation.
-//
-// [Recency] scores by age (7d=1.0, 30d=0.7, 90d=0.4, older=0.2).
-// [Relevance] scores by keyword overlap with active tasks (0.0-1.0).
-// [Score] combines both into a 0.0-2.0 range. [All] scores a
-// batch of entries. [ExtractTaskKeywords] builds the keyword set
-// from active task text.
+// Package score computes **per-entry relevance scores** so
+// the budgeted context-assembly algorithm in
+// [internal/cli/agent/core/budget] can decide which
+// decisions, learnings, and conventions to inject when there
+// is not enough budget to inject all of them.
+//
+// The score is a deliberately simple two-component number:
+// **recency** plus **relevance to current work**. Either
+// component alone produces a poor ranking; together they
+// approximate "what would a helpful colleague pull off the
+// shelf?".
+//
+// # The Two Components
+//
+//   - **[Recency](entry)**: bucketed by age;
+//
+//     ≤  7 days   → 1.0
+//     ≤ 30 days   → 0.7
+//     ≤ 90 days   → 0.4
+//     older       → 0.2
+//
+//     Buckets (rather than a continuous decay) keep the
+//     ordering stable across small input shifts and make
+//     the scoring trivially debuggable.
+//
+//   - **[Relevance](entry, taskKeywords)**: fraction of
+//     the entry's salient tokens that overlap with
+//     [ExtractTaskKeywords](activeTasks). Range 0.0-1.0.
+//     Stop words come from the embedded list in
+//     [internal/assets/read/lookup.StopWords].
+//
+// [Score](entry, taskKeywords) sums the two for a 0.0-2.0
+// composite. [All](entries, taskKeywords) is the bulk
+// scorer that returns parallel slices for the budget
+// allocator.
+//
+// # Why Bucketed Recency
+//
+// A continuous exponential decay would be technically
+// purer but produces "score jitter": entries reorder
+// minute-to-minute as their ages cross the decimal
+// boundary. Bucketed recency means an entry's relative
+// rank only changes when it crosses a real threshold
+// (week, month, quarter), which is the cadence at which
+// users actually expect their context to age.
+//
+// # Concurrency
+//
+// All functions are pure. Concurrent callers never race.
 package score
diff --git a/internal/cli/agent/core/sort/doc.go b/internal/cli/agent/core/sort/doc.go
index 07942b0e2..9d2cc1021 100644
--- a/internal/cli/agent/core/sort/doc.go
+++ b/internal/cli/agent/core/sort/doc.go
@@ -4,11 +4,35 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package sort provides the file read-order for context assembly.
+// Package sort provides the file read-order for context
+// assembly in the agent command.
 //
-// [ReadOrder] returns context file names in priority order as
-// defined by config.FileReadOrder, filtered to files that exist
-// in the loaded context. Constitution rules come first, then
-// tasks, conventions, architecture, decisions, learnings,
+// # ReadOrder
+//
+// [ReadOrder] returns context file paths in the priority
+// sequence defined by cfgCtx.ReadOrder. The order places
+// high-priority files first: constitution, then tasks,
+// conventions, architecture, decisions, learnings,
 // glossary, and playbook.
+//
+// The function iterates cfgCtx.ReadOrder, looks up each
+// file name in the loaded Context, and includes it only
+// when the file exists and is non-empty. Paths are
+// returned as full paths by joining ctx.Dir with the
+// file name.
+//
+// # Filtering
+//
+// Empty files are excluded so the agent packet does not
+// waste budget on placeholder files. The emptiness check
+// uses the IsEmpty field on entity.ContextFile, which is
+// set during context loading.
+//
+// # Data Flow
+//
+// The budget subpackage calls ReadOrder to determine
+// which files to process and in what sequence. The
+// returned paths feed into the section assembly loop
+// where each file's content is extracted, scored, and
+// truncated to fit the token budget.
 package sort
diff --git a/internal/cli/agent/core/steering/doc.go b/internal/cli/agent/core/steering/doc.go
index 313ef5fca..fc8738eb4 100644
--- a/internal/cli/agent/core/steering/doc.go
+++ b/internal/cli/agent/core/steering/doc.go
@@ -1,12 +1,42 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package steering loads steering and skill content
-// for inclusion in the agent context packet.
+// Package steering loads steering and skill content for
+// inclusion in the agent context packet.
 //
-// It filters steering files by tool and inclusion mode,
-// and resolves named skills from the skills directory.
+// # LoadBodies
+//
+// [LoadBodies] reads all steering files from the
+// steering directory (rc.SteeringDir), filters them by
+// the current tool (rc.Tool), and returns the body
+// content of each matching file as a string slice.
+//
+// Steering files are YAML-frontmattered Markdown files
+// that contain tool-specific instructions. The filtering
+// uses steering.Filter with the current tool identifier,
+// so only files that apply to the active AI tool are
+// included in the context packet. When the steering
+// directory is missing or contains no applicable files,
+// LoadBodies returns nil.
+//
+// # LoadSkill
+//
+// [LoadSkill] loads a named skill from the
+// .context/skills/ directory and returns its body
+// content. Skills are standalone instruction files that
+// can be referenced by name in agent prompts.
+//
+// When the skill is not found, LoadSkill returns an
+// errSkill.NotFound error. Other read failures return
+// errSkill.LoadQuoted with the underlying cause.
+//
+// # Data Flow
+//
+// The budget subpackage calls LoadBodies during context
+// assembly to append steering instructions after the
+// core context files. LoadSkill is called when a
+// specific skill is requested via the --skill flag.
 package steering
diff --git a/internal/cli/agent/core/steering/steering.go b/internal/cli/agent/core/steering/steering.go
index 71b83907b..869398a76 100644
--- a/internal/cli/agent/core/steering/steering.go
+++ b/internal/cli/agent/core/steering/steering.go
@@ -56,9 +56,11 @@ func LoadBodies() []string {
 //   - string: Body content of the loaded skill
 //   - error: Non-nil if the skill is missing or unreadable
 func LoadSkill(name string) (string, error) {
-	skillsDir := filepath.Join(
-		rc.ContextDir(), dir.Skills,
-	)
+	ctxDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return "", ctxErr
+	}
+	skillsDir := filepath.Join(ctxDir, dir.Skills)
 
 	sk, loadErr := skill.Load(skillsDir, name)
 	if loadErr != nil {
diff --git a/internal/cli/agent/doc.go b/internal/cli/agent/doc.go
index 245361fde..3961ee736 100644
--- a/internal/cli/agent/doc.go
+++ b/internal/cli/agent/doc.go
@@ -74,4 +74,9 @@
 // applies a len/4 heuristic. This deliberately overestimates, which
 // is correct for budgeting: it is better to include slightly less
 // than to overflow the context window.
+//
+// # Subpackages
+//
+//	cmd/root: cobra command definition and flag binding
+//	core: budget assembly, scoring, and formatting logic
 package agent
diff --git a/internal/cli/backup/cmd.go b/internal/cli/backup/cmd.go
deleted file mode 100644
index 5d21bf4e1..000000000
--- a/internal/cli/backup/cmd.go
+++ /dev/null
@@ -1,44 +0,0 @@
-//   /    ctx:                         https://ctx.ist
-// ,'`./    do you remember?
-// `.,'\
-//   \    Copyright 2026-present Context contributors.
-//                 SPDX-License-Identifier: Apache-2.0
-
-package backup
-
-import (
-	"github.com/spf13/cobra"
-
-	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
-	"github.com/ActiveMemory/ctx/internal/config/archive"
-	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
-	"github.com/ActiveMemory/ctx/internal/config/embed/flag"
-	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
-)
-
-// Cmd returns the "ctx backup" top-level command.
-//
-// Returns:
-//   - *cobra.Command: Configured backup command
-func Cmd() *cobra.Command {
-	short, long := desc.Command(cmd.DescKeyBackup)
-
-	c := &cobra.Command{
-		Use:     cmd.UseBackup,
-		Short:   short,
-		Long:    long,
-		Example: desc.Example(cmd.DescKeyBackup),
-		RunE: func(cmd *cobra.Command, _ []string) error {
-			return Run(cmd)
-		},
-	}
-
-	c.Flags().String(cFlag.Scope, archive.BackupScopeAll,
-		desc.Flag(flag.DescKeyBackupScope),
-	)
-	c.Flags().Bool(cFlag.JSON, false,
-		desc.Flag(flag.DescKeyBackupJson),
-	)
-
-	return c
-}
diff --git a/internal/cli/backup/doc.go b/internal/cli/backup/doc.go
deleted file mode 100644
index f310039ec..000000000
--- a/internal/cli/backup/doc.go
+++ /dev/null
@@ -1,14 +0,0 @@
-//   /    ctx:                         https://ctx.ist
-// ,'`./    do you remember?
-// `.,'\
-//   \    Copyright 2026-present Context contributors.
-//                 SPDX-License-Identifier: Apache-2.0
-
-// Package backup implements the ctx backup top-level command.
-//
-// Creates timestamped tar.gz archives of project context and/or
-// global Claude Code data. Optionally copies archives to an SMB
-// share configured via CTX_BACKUP_SMB_URL.
-//
-// Key exports: [Cmd], [Run].
-package backup
diff --git a/internal/cli/backup/run.go b/internal/cli/backup/run.go
deleted file mode 100644
index f2093ee0f..000000000
--- a/internal/cli/backup/run.go
+++ /dev/null
@@ -1,99 +0,0 @@
-//   /    ctx:                         https://ctx.ist
-// ,'`./    do you remember?
-// `.,'\
-//   \    Copyright 2026-present Context contributors.
-//                 SPDX-License-Identifier: Apache-2.0
-
-package backup
-
-import (
-	"encoding/json"
-	"os"
-	"time"
-
-	"github.com/spf13/cobra"
-
-	coreArchive "github.com/ActiveMemory/ctx/internal/cli/system/core/archive"
-	"github.com/ActiveMemory/ctx/internal/config/archive"
-	"github.com/ActiveMemory/ctx/internal/config/env"
-	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
-	"github.com/ActiveMemory/ctx/internal/config/token"
-	"github.com/ActiveMemory/ctx/internal/entity"
-	errBackup "github.com/ActiveMemory/ctx/internal/err/backup"
-	errInit "github.com/ActiveMemory/ctx/internal/err/initialize"
-	"github.com/ActiveMemory/ctx/internal/write/backup"
-)
-
-// Run executes the backup command logic.
-//
-// Creates timestamped tar.gz archives of project context and/or global
-// Claude Code data. Optionally copies archives to an SMB share.
-//
-// Parameters:
-//   - cmd: Cobra command for output and flag access
-//
-// Returns:
-//   - error: Non-nil on invalid scope, home directory lookup failure,
-//     SMB parse error, or archive creation failure
-func Run(cmd *cobra.Command) error {
-	scope, _ := cmd.Flags().GetString(cFlag.Scope)
-	jsonOut, _ := cmd.Flags().GetBool(cFlag.JSON)
-
-	switch scope {
-	case archive.BackupScopeProject,
-		archive.BackupScopeGlobal,
-		archive.BackupScopeAll:
-	default:
-		return errBackup.InvalidScope(scope)
-	}
-
-	home, homeErr := os.UserHomeDir()
-	if homeErr != nil {
-		return errInit.HomeDir(homeErr)
-	}
-
-	smbURL := os.Getenv(env.BackupSMBURL)
-	smbSubdir := os.Getenv(env.BackupSMBSubdir)
-	var smb *coreArchive.SMBConfig
-	if smbURL != "" {
-		var smbErr error
-		smb, smbErr = coreArchive.ParseSMBConfig(smbURL, smbSubdir)
-		if smbErr != nil {
-			return errBackup.SMBConfig(smbErr)
-		}
-	}
-
-	timestamp := time.Now().Format(archive.BackupTimestampFormat)
-	var results []entity.BackupResult
-
-	if scope == archive.BackupScopeProject || scope == archive.BackupScopeAll {
-		result, projErr := coreArchive.BackupProject(
-			cmd.ErrOrStderr(), home, timestamp, smb,
-		)
-		if projErr != nil {
-			return errBackup.Project(projErr)
-		}
-		results = append(results, result)
-	}
-
-	if scope == archive.BackupScopeGlobal || scope == archive.BackupScopeAll {
-		result, globalErr := coreArchive.BackupGlobal(
-			cmd.ErrOrStderr(), home, timestamp, smb,
-		)
-		if globalErr != nil {
-			return errBackup.Global(globalErr)
-		}
-		results = append(results, result)
-	}
-
-	if jsonOut {
-		enc := json.NewEncoder(cmd.OutOrStdout())
-		enc.SetIndent("", token.Indent2)
-		return enc.Encode(results)
-	}
-
-	for _, r := range results {
-		backup.ResultLine(cmd, r.Scope, r.Archive, r.Size, r.SMBDest)
-	}
-	return nil
-}
diff --git a/internal/cli/change/cmd/root/doc.go b/internal/cli/change/cmd/root/doc.go
index 88d5d464a..ec72a12da 100644
--- a/internal/cli/change/cmd/root/doc.go
+++ b/internal/cli/change/cmd/root/doc.go
@@ -4,10 +4,38 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx change command for detecting
-// context and code changes since a reference time.
+// Package root implements the "ctx change" command for
+// detecting context and code changes since a reference
+// time.
 //
-// [Cmd] builds the cobra.Command with --since flag. [Run] resolves
-// the reference time (from flag, markers, or event log), scans for
-// context file changes and git history, and renders a summary.
+// # What It Does
+//
+// The command shows what changed in the project since
+// a given point in time. It scans both context files
+// (.context/ directory) and git history, then renders
+// a unified summary to stdout.
+//
+// # Flags
+//
+//   - --since: Reference time as a duration (e.g.
+//     "24h", "7d") or a date string (e.g.
+//     "2026-03-01"). When omitted the command falls
+//     back to session markers or the event log to
+//     find the most recent reference point.
+//
+// # Output
+//
+// A human-readable list grouped by change source.
+// Context-file changes show which files were modified
+// and how many entries were added. Code changes show
+// a git-log summary with commit counts and affected
+// files.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and binds the
+// --since flag. [Run] resolves the reference time
+// via [detect.ReferenceTime], scans for context and
+// code changes via [scan], and renders through
+// [render.List].
 package root
diff --git a/internal/cli/change/cmd/root/run.go b/internal/cli/change/cmd/root/run.go
index 123824a7f..de484c2b9 100644
--- a/internal/cli/change/cmd/root/run.go
+++ b/internal/cli/change/cmd/root/run.go
@@ -13,6 +13,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/cli/change/core/render"
 	"github.com/ActiveMemory/ctx/internal/cli/change/core/scan"
 	errInit "github.com/ActiveMemory/ctx/internal/err/initialize"
+	"github.com/ActiveMemory/ctx/internal/rc"
 	writeChange "github.com/ActiveMemory/ctx/internal/write/change"
 )
 
@@ -29,6 +30,10 @@ import (
 // Returns:
 //   - error: Non-nil if reference time detection fails
 func Run(cmd *cobra.Command, since string) error {
+	if _, ctxErr := rc.RequireContextDir(); ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	refTime, refLabel, err := detect.ReferenceTime(since)
 	if err != nil {
 		return errInit.DetectReferenceTime(err)
diff --git a/internal/cli/change/core/cmd_test.go b/internal/cli/change/core/cmd_test.go
index b8fbb2b43..09693bdb7 100644
--- a/internal/cli/change/core/cmd_test.go
+++ b/internal/cli/change/core/cmd_test.go
@@ -212,7 +212,10 @@ func TestDetectReferenceTime_Fallback(t *testing.T) {
 }
 
 func TestDetectReferenceTime_FromMarkers(t *testing.T) {
-	tmp := t.TempDir()
+	tmp := filepath.Join(t.TempDir(), ".context")
+	if mkErr := os.MkdirAll(tmp, 0o700); mkErr != nil {
+		t.Fatalf("mkdir: %v", mkErr)
+	}
 	t.Setenv("CTX_DIR", tmp)
 	rc.Reset()
 
@@ -260,7 +263,10 @@ func TestDetectReferenceTime_FromMarkers(t *testing.T) {
 }
 
 func TestFindContextChanges(t *testing.T) {
-	tmp := t.TempDir()
+	tmp := filepath.Join(t.TempDir(), ".context")
+	if mkErr := os.MkdirAll(tmp, 0o700); mkErr != nil {
+		t.Fatalf("mkdir: %v", mkErr)
+	}
 	t.Setenv("CTX_DIR", tmp)
 	rc.Reset()
 
@@ -300,7 +306,10 @@ func TestFindContextChanges(t *testing.T) {
 }
 
 func TestFindContextChanges_EmptyDir(t *testing.T) {
-	tmp := t.TempDir()
+	tmp := filepath.Join(t.TempDir(), ".context")
+	if mkErr := os.MkdirAll(tmp, 0o700); mkErr != nil {
+		t.Fatalf("mkdir: %v", mkErr)
+	}
 	t.Setenv("CTX_DIR", tmp)
 	rc.Reset()
 
diff --git a/internal/cli/change/core/detect/detect.go b/internal/cli/change/core/detect/detect.go
index c900bc171..91e67d71e 100644
--- a/internal/cli/change/core/detect/detect.go
+++ b/internal/cli/change/core/detect/detect.go
@@ -22,16 +22,25 @@ import (
 )
 
 // FromMarkers finds the second most recent ctx-loaded-* marker file.
-// The most recent is the current session's marker.
+// The most recent is the current session's marker, so the reference
+// point for change detection is the one before it.
 //
 // Returns:
-//   - time.Time: Marker file modification time
-//   - bool: True if a valid marker was found
-func FromMarkers() (time.Time, bool) {
-	stateDir := filepath.Join(rc.ContextDir(), dir.State)
+//   - time.Time: Marker file modification time on success.
+//   - error: [errCtx.ErrDirNotDeclared] when no context dir is
+//     declared; the underlying error from [os.ReadDir] when the state
+//     directory cannot be read; [os.ErrNotExist] when fewer than two
+//     marker files exist (no previous session to compare against).
+//     Callers treat any non-nil error as "try the next source".
+func FromMarkers() (time.Time, error) {
+	ctxDir, err := rc.ContextDir()
+	if err != nil {
+		return time.Time{}, err
+	}
+	stateDir := filepath.Join(ctxDir, dir.State)
 	entries, readDirErr := os.ReadDir(stateDir)
 	if readDirErr != nil {
-		return time.Time{}, false
+		return time.Time{}, readDirErr
 	}
 
 	type markerInfo struct {
@@ -51,7 +60,8 @@ func FromMarkers() (time.Time, bool) {
 	}
 
 	if len(markers) < 2 {
-		return time.Time{}, false
+		// No previous-session marker on disk yet.
+		return time.Time{}, os.ErrNotExist
 	}
 
 	// Sort by modtime descending.
@@ -60,20 +70,28 @@ func FromMarkers() (time.Time, bool) {
 	})
 
 	// Second most recent = previous session.
-	return markers[1].modTime, true
+	return markers[1].modTime, nil
 }
 
 // FromEvents scans events.jsonl in reverse for the last
 // context-load-gate event.
 //
 // Returns:
-//   - time.Time: Event timestamp
-//   - bool: True if a valid event was found
-func FromEvents() (time.Time, bool) {
-	eventsPath := filepath.Join(rc.ContextDir(), dir.State, event.FileLog)
+//   - time.Time: Event timestamp on success.
+//   - error: [errCtx.ErrDirNotDeclared] when no context dir is
+//     declared; the underlying error from the event log reader when
+//     the file cannot be read; [os.ErrNotExist] when no matching
+//     load-gate event is present or its timestamp cannot be parsed.
+//     Callers treat any non-nil error as "try the next source".
+func FromEvents() (time.Time, error) {
+	ctxDir, err := rc.ContextDir()
+	if err != nil {
+		return time.Time{}, err
+	}
+	eventsPath := filepath.Join(ctxDir, dir.State, event.FileLog)
 	data, readErr := io.SafeReadUserFile(eventsPath)
 	if readErr != nil {
-		return time.Time{}, false
+		return time.Time{}, readErr
 	}
 
 	lines := strings.Split(strings.TrimSpace(string(data)), token.NewlineLF)
@@ -84,9 +102,10 @@ func FromEvents() (time.Time, bool) {
 			continue
 		}
 		if t, ok := ExtractTimestamp(line); ok {
-			return t, true
+			return t, nil
 		}
 	}
 
-	return time.Time{}, false
+	// No matching load-gate event in the log.
+	return time.Time{}, os.ErrNotExist
 }
diff --git a/internal/cli/change/core/detect/doc.go b/internal/cli/change/core/detect/doc.go
index 62303cd45..02f84bace 100644
--- a/internal/cli/change/core/detect/doc.go
+++ b/internal/cli/change/core/detect/doc.go
@@ -1,13 +1,52 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package detect resolves reference timestamps for change detection.
+// Package detect resolves the **reference timestamp**
+// `ctx change` uses to compute "what changed since". The
+// CLI offers three signals; this package picks the best
+// one and returns it.
 //
-// [FromMarkers] reads the session marker file for the last known
-// timestamp. [FromEvents] reads the event log. [ReferenceTime]
-// combines both with the --since flag to pick the best reference.
-// [ParseSinceFlag] parses user-provided duration or date strings.
+// `ctx change` answers "what moved since I was last in
+// this project?": context file edits, code commits,
+// directories touched. Picking the right "since when"
+// is the package's only job.
+//
+// # The Three Signals
+//
+// In priority order:
+//
+//  1. **Explicit `--since`**: `ctx change --since
+//     2026-04-12` or `--since 3d`. Parsed by
+//     [ParseSinceFlag] into a time.Time.
+//  2. **Session marker**: `[FromMarkers]` reads
+//     `state/session-event.jsonl` for the timestamp
+//     of the last session-end event. The most useful
+//     "since" for "since I was last here".
+//  3. **Event log**: `[FromEvents]` falls back to
+//     the newest hook event timestamp when no
+//     session-end marker exists.
+//
+// [ReferenceTime] composes the three: returns the
+// `--since` value when set; otherwise the more recent
+// of the marker / event timestamps; falls back to
+// "30 days ago" when nothing is known.
+//
+// # Flag Parsing
+//
+// [ParseSinceFlag] accepts:
+//
+//   - **Date**: `2026-04-12` (parsed midnight
+//     UTC).
+//   - **Duration**: `3d`, `12h`, `2w`, `1m` (rich
+//     duration syntax beyond Go's stdlib).
+//   - **`yesterday`**, **`today`**: relative
+//     keywords.
+//
+// # Concurrency
+//
+// Filesystem-bound and stateless. Concurrent
+// callers never race.
 package detect
diff --git a/internal/cli/change/core/detect/parse.go b/internal/cli/change/core/detect/parse.go
index d7ba4c484..bd4e9a503 100644
--- a/internal/cli/change/core/detect/parse.go
+++ b/internal/cli/change/core/detect/parse.go
@@ -40,12 +40,12 @@ func ReferenceTime(since string) (time.Time, string, error) {
 	}
 
 	// Try marker files.
-	if t, ok := FromMarkers(); ok {
+	if t, markersErr := FromMarkers(); markersErr == nil {
 		return t, format.DurationAgo(time.Since(t)), nil
 	}
 
 	// Try events.jsonl.
-	if t, ok := FromEvents(); ok {
+	if t, eventsErr := FromEvents(); eventsErr == nil {
 		return t, format.DurationAgo(time.Since(t)), nil
 	}
 
diff --git a/internal/cli/change/core/doc.go b/internal/cli/change/core/doc.go
index 79cc141fa..3e9518998 100644
--- a/internal/cli/change/core/doc.go
+++ b/internal/cli/change/core/doc.go
@@ -4,10 +4,52 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains shared helpers for the change command:
-// reference time detection, filesystem scanning, code summarization,
-// and output rendering.
+// Package core contains business logic for the change
+// command, which reports what changed in the project
+// since a reference point in time.
 //
-// Subpackages: detect (reference time), render (output formatting),
-// scan (filesystem and git queries).
+// This package is an umbrella that coordinates three
+// subpackages, each handling one stage of the change
+// detection pipeline:
+//
+// # Reference Time Detection (detect/)
+//
+// The detect subpackage resolves the reference time
+// from which changes are measured. It parses human
+// timestamps, duration strings, and session identifiers
+// to produce a concrete time.Time.
+//
+// # Filesystem and Git Scanning (scan/)
+//
+// The scan subpackage queries two data sources:
+//
+//   - Context file changes: it reads .context/ and
+//     returns Markdown files modified after the
+//     reference time, sorted by modification time
+//     descending.
+//   - Code changes: it runs git log to summarize
+//     commit count, latest message, affected
+//     directories, and contributing authors since the
+//     reference time.
+//
+// # Output Rendering (render/)
+//
+// The render subpackage formats scan results for two
+// audiences:
+//
+//   - [render.List] produces a full Markdown report for
+//     terminal display with headings, file lists, and
+//     code summaries.
+//   - [render.ChangesForHook] produces a compact
+//     single-line summary for hook relay injection
+//     into AI tool prompts.
+//
+// # Data Flow
+//
+// The cmd/ layer resolves a reference time via detect,
+// calls scan.FindContextChanges and
+// scan.SummarizeCodeChanges, then passes the results
+// to render.List or render.ChangesForHook depending on
+// the output mode. The write/ layer handles final
+// output to the cobra command.
 package core
diff --git a/internal/cli/change/core/render/doc.go b/internal/cli/change/core/render/doc.go
index 56783df9d..9a26b81f7 100644
--- a/internal/cli/change/core/render/doc.go
+++ b/internal/cli/change/core/render/doc.go
@@ -4,9 +4,44 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package render formats change detection results for terminal output.
+// Package render formats change detection results for
+// terminal output and hook injection.
 //
-// [List] renders context file changes and code summaries as a
-// human-readable list. [ChangesForHook] renders the same data in
-// a compact format suitable for hook relay output.
+// # List
+//
+// [List] renders the full CLI output for ctx changes.
+// It builds a Markdown document with three sections:
+//
+//   - A reference point label showing the time anchor.
+//   - A context changes section listing each modified
+//     .context/ file with its modification timestamp.
+//   - A code changes section showing commit count,
+//     latest commit message, affected directories, and
+//     contributing authors.
+//
+// When no changes are found in either category, List
+// emits a "no changes" message instead.
+//
+// # ChangesForHook
+//
+// [ChangesForHook] renders the same data in a compact
+// single-line format suitable for hook relay injection.
+// It concatenates context file names and commit counts
+// into a brief summary prefixed with a standard label.
+// Returns an empty string when there are no changes,
+// allowing the hook layer to skip injection entirely.
+//
+// # Commit Count Formatting
+//
+// The unexported commitCount helper formats an integer
+// commit count with correct singular/plural text using
+// localized templates from the embedded assets. It
+// returns "1 commit" or "N commits" as appropriate.
+//
+// # Data Flow
+//
+// The cmd/ layer calls List for terminal output or
+// ChangesForHook for hook relay mode. Both functions
+// receive the reference label, context changes, and
+// code summary produced by the scan subpackage.
 package render
diff --git a/internal/cli/change/core/scan/doc.go b/internal/cli/change/core/scan/doc.go
index fde9dff57..d2511eb5f 100644
--- a/internal/cli/change/core/scan/doc.go
+++ b/internal/cli/change/core/scan/doc.go
@@ -4,11 +4,51 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package scan queries the filesystem and git history for changes
-// since a reference time.
+// Package scan queries the filesystem and git history
+// for changes since a reference time.
 //
-// [FindContextChanges] returns .context/ files modified after the
-// reference time. [SummarizeCodeChanges] extracts commit counts,
-// latest message, affected directories, and authors from git log.
-// [UniqueTopDirs] deduplicates directory paths from git output.
+// # FindContextChanges
+//
+// [FindContextChanges] reads the .context/ directory
+// and returns Markdown files whose modification time is
+// after the given reference time. Results are sorted by
+// modification time descending (most recent first).
+// Directories and non-Markdown files are skipped.
+//
+// # SummarizeCodeChanges
+//
+// [SummarizeCodeChanges] produces a CodeSummary by
+// running git log commands. It collects four pieces of
+// information:
+//
+//   - Commit count since the reference time.
+//   - Latest commit message (first line of oneline).
+//   - Unique top-level directories touched by commits.
+//   - Unique author names from the commit history.
+//
+// All git failures produce an empty summary rather than
+// an error, so the change command works gracefully in
+// non-git directories.
+//
+// # Helper Functions
+//
+// [GitLogSince] wraps execGit.LogSince with a --since
+// filter derived from the reference time. The time is
+// formatted as RFC 3339 internally so no caller input
+// reaches exec.Command, satisfying gosec G204.
+//
+// [UniqueTopDirs] extracts unique top-level directory
+// names from newline-separated file paths. It splits
+// each path at the first "/" and deduplicates.
+//
+// [UniqueLines] returns sorted unique non-empty lines
+// from newline-separated output, used for deduplicating
+// author names.
+//
+// # Data Flow
+//
+// The cmd/ layer calls FindContextChanges and
+// SummarizeCodeChanges with the resolved reference
+// time, then passes both results to the render
+// subpackage for formatting.
 package scan
diff --git a/internal/cli/change/core/scan/scan.go b/internal/cli/change/core/scan/scan.go
index 5b9217fad..4402475ab 100644
--- a/internal/cli/change/core/scan/scan.go
+++ b/internal/cli/change/core/scan/scan.go
@@ -29,7 +29,10 @@ import (
 //   - []entity.ContextChange: Modified files sorted by modtime descending
 //   - error: Non-nil if the context directory cannot be read
 func FindContextChanges(refTime time.Time) ([]entity.ContextChange, error) {
-	dir := rc.ContextDir()
+	dir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return nil, ctxErr
+	}
 	entries, readDirErr := os.ReadDir(dir)
 	if readDirErr != nil {
 		return nil, readDirErr
diff --git a/internal/cli/change/doc.go b/internal/cli/change/doc.go
index eed98f6c0..085ff1b6b 100644
--- a/internal/cli/change/doc.go
+++ b/internal/cli/change/doc.go
@@ -4,10 +4,26 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package change implements the ctx change command, which detects
-// context and code changes since the last session or a specified
-// time.
+// Package change implements the ctx change command, which
+// detects context and code changes since the last session
+// or a specified time.
 //
-// It registers the root subcommand and delegates to core/ for
-// detection, scanning, and rendering.
+// The change command scans .context/ files and the git
+// working tree for modifications, additions, and deletions
+// that occurred after a reference timestamp. This helps
+// AI agents and users understand what shifted between
+// sessions without manually diffing files.
+//
+// # Detection Strategy
+//
+// The core/ subpackage walks context files and compares
+// modification times against a cutoff. Git-tracked source
+// files are checked via git diff. Results are grouped by
+// change type (context vs code) and rendered as a
+// human-readable summary or JSON.
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
+//   - core: change detection, scanning, and rendering
 package change
diff --git a/internal/cli/cli_test.go b/internal/cli/cli_test.go
index 429461b31..f3206c686 100644
--- a/internal/cli/cli_test.go
+++ b/internal/cli/cli_test.go
@@ -65,6 +65,13 @@ func TestBinaryIntegration(t *testing.T) {
 		t.Fatalf("failed to create test dir: %v", err)
 	}
 
+	// Under the explicit-context-dir model each subprocess invocation
+	// must declare CTX_DIR. t.Setenv mutates the current process env
+	// and exec.Cmd with cmd.Env == nil inherits that env, so a single
+	// Setenv here propagates to every child below, and is unset
+	// automatically at test end.
+	t.Setenv("CTX_DIR", filepath.Join(testDir, ".context"))
+
 	// Subtest: ctx init creates expected files
 	t.Run("init creates expected files", func(t *testing.T) {
 		initCmd := exec.Command(binaryPath, "init") //nolint:gosec // test binary
diff --git a/internal/cli/compact/cmd/root/doc.go b/internal/cli/compact/cmd/root/doc.go
index c7b46352e..cdea11ab3 100644
--- a/internal/cli/compact/cmd/root/doc.go
+++ b/internal/cli/compact/cmd/root/doc.go
@@ -4,11 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx compact command for archiving
-// completed tasks and cleaning up context files.
+// Package root implements the "ctx compact" command for
+// cleaning up context files by archiving completed tasks
+// and removing empty sections.
 //
-// [Cmd] builds the cobra.Command with --archive flag. [Run] calls
-// core.CompactTasks to move completed tasks to the Completed
-// section, removes empty sections, and optionally archives to
-// .context/archive/.
+// # What It Does
+//
+// The command performs two housekeeping passes over
+// the .context/ directory:
+//
+//  1. Task compaction: moves completed tasks in
+//     TASKS.md to a "Completed (Recent)" section.
+//     When --archive or .ctxrc auto_archive is
+//     enabled, older completed tasks are moved to
+//     .context/archive/ files.
+//
+//  2. Section cleanup: removes empty markdown
+//     sections from all context files (TASKS.md,
+//     DECISIONS.md, LEARNINGS.md, CONVENTIONS.md).
+//
+// # Flags
+//
+//   - --archive: Create .context/archive/ for old
+//     completed tasks. Also enabled automatically
+//     when the auto_archive option is set in .ctxrc.
+//
+// # Output
+//
+// Prints a heading, per-file change counts (tasks
+// moved, sections removed), and a final summary
+// line. When nothing changed it prints "all clean".
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with the --archive
+// flag. [Run] loads context via [context/load],
+// delegates task compaction to [core/task], reloads
+// context, then runs [tidy.CompactContext] for
+// section cleanup.
 package root
diff --git a/internal/cli/compact/cmd/root/run.go b/internal/cli/compact/cmd/root/run.go
index 0dc19218a..5d50ca340 100644
--- a/internal/cli/compact/cmd/root/run.go
+++ b/internal/cli/compact/cmd/root/run.go
@@ -34,6 +34,10 @@ import (
 // Returns:
 //   - error: Non-nil if context loading fails or .context/ is not found
 func Run(cmd *cobra.Command, archive bool) error {
+	if _, ctxErr := rc.RequireContextDir(); ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	ctx, err := load.Do("")
 	if err != nil {
 		if _, ok := errors.AsType[*errCtx.NotFoundError](err); ok {
diff --git a/internal/cli/compact/compact_test.go b/internal/cli/compact/compact_test.go
index c4a2df472..8a1285b8e 100644
--- a/internal/cli/compact/compact_test.go
+++ b/internal/cli/compact/compact_test.go
@@ -13,6 +13,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/cli/add"
 	"github.com/ActiveMemory/ctx/internal/cli/initialize"
 	taskComplete "github.com/ActiveMemory/ctx/internal/cli/task/cmd/complete"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 // TestCompactCommand tests the compact command.
@@ -29,6 +30,8 @@ func TestCompactCommand(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -58,6 +61,8 @@ func TestCompactWithTasks(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
diff --git a/internal/cli/compact/core/doc.go b/internal/cli/compact/core/doc.go
index 65f91c889..5eda61a58 100644
--- a/internal/cli/compact/core/doc.go
+++ b/internal/cli/compact/core/doc.go
@@ -4,11 +4,39 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains the pure-logic compaction algorithm for
-// the compact command.
+// Package core contains business logic for the compact
+// command, which moves completed tasks from TASKS.md
+// into a Completed section and optionally archives them.
 //
-// [CompactTasks] takes the current TASKS.md content, identifies
-// completed top-level tasks (with all children complete), moves
-// them to the Completed section, and returns a [CompactResult]
-// with no I/O side effects — callers own file writes.
+// This package delegates its work to the task
+// subpackage. It does not export functions directly.
+//
+// # Task Compaction (task/)
+//
+// The task subpackage provides [task.CompactTasks],
+// which orchestrates the full compaction pipeline:
+//
+//  1. Calls tidy.CompactContext to scan TASKS.md for
+//     checked items ("- [x]") outside the Completed
+//     section. Only top-level tasks where all nested
+//     subtasks are also complete are considered.
+//  2. Reports each moved and skipped task via the
+//     write/compact output helpers, truncating long
+//     descriptions for display.
+//  3. Writes the updated TASKS.md content to disk.
+//  4. When the --archive flag is set, writes completed
+//     task blocks to a dated file in .context/archive/
+//     using tidy.WriteArchive.
+//
+// CompactTasks returns the number of tasks moved and
+// an error if the file write fails. A zero count with
+// nil error means no completed tasks were found.
+//
+// # Data Flow
+//
+// The cmd/ layer loads the context, calls
+// task.CompactTasks with the cobra command, loaded
+// context, and archive flag. The write/ layer handles
+// user-facing output messages. The tidy package owns
+// the pure-logic compaction algorithm.
 package core
diff --git a/internal/cli/compact/core/task/doc.go b/internal/cli/compact/core/task/doc.go
index 5008de213..a6d538cc5 100644
--- a/internal/cli/compact/core/task/doc.go
+++ b/internal/cli/compact/core/task/doc.go
@@ -4,8 +4,29 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package task moves completed tasks to the Completed section
-// and optionally archives them. Tasks with incomplete subtasks
-// are skipped. Archive output goes to .context/archive/ when
-// enabled via flags or .ctxrc configuration.
+// Package task moves completed tasks from their original
+// phase sections in TASKS.md into the Completed section at
+// the bottom of the file.
+//
+// The compaction algorithm scans TASKS.md for checked items
+// ("- [x]") that appear outside the Completed section. Each
+// match includes its nested content: indented lines below
+// the task line are treated as subtasks or details and move
+// together with their parent. Tasks with at least one
+// unchecked subtask are skipped to avoid orphaning
+// in-progress work.
+//
+// # Archival
+//
+// When the archive flag is set, compacted tasks are also
+// written to a dated file in .context/archive/ with a
+// standardized heading. This preserves traceability for
+// completed work without cluttering the active task list.
+//
+// # Write Safety
+//
+// File writes use [internal/io.SafeWriteFile] which writes
+// atomically (temp + rename) to avoid partial writes on
+// crash. The function returns the count of tasks moved so
+// callers can report progress.
 package task
diff --git a/internal/cli/compact/doc.go b/internal/cli/compact/doc.go
index 5087bf98b..cc0a3266b 100644
--- a/internal/cli/compact/doc.go
+++ b/internal/cli/compact/doc.go
@@ -4,10 +4,27 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package compact implements the "ctx compact" command for cleaning up
-// and consolidating context files.
+// Package compact implements the "ctx compact" command for
+// cleaning up and consolidating context files.
 //
-// The compact command performs maintenance on .context/ files, including
-// moving completed tasks to a dedicated section, optionally archiving
-// old content, and removing empty sections.
+// The compact command performs maintenance on .context/
+// files by moving completed tasks to a dedicated archive
+// section, removing empty sections, and optionally
+// archiving old content that exceeds configured retention
+// thresholds. This keeps the active context lean so that
+// AI agents can consume it within token budgets.
+//
+// # What Compact Does
+//
+// When invoked, compact walks TASKS.md and relocates
+// checked-off items to a "Completed" section at the
+// bottom of the file. Empty headings left behind are
+// pruned. Other context files (DECISIONS.md, LEARNINGS.md)
+// can be compacted to remove superseded entries.
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
+//   - core: task relocation, section pruning, and
+//     archival logic
 package compact
diff --git a/internal/cli/config/cmd/schema/doc.go b/internal/cli/config/cmd/schema/doc.go
index 8c6189523..3fc99e304 100644
--- a/internal/cli/config/cmd/schema/doc.go
+++ b/internal/cli/config/cmd/schema/doc.go
@@ -4,9 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package schema implements the ctx config schema subcommand.
+// Package schema implements the "ctx config schema"
+// subcommand that prints the embedded JSON Schema for
+// the .ctxrc configuration file.
 //
-// [Cmd] builds the cobra.Command that outputs the embedded JSON
-// Schema for .ctxrc to stdout, enabling editor integration and
-// validation tooling.
+// # What It Does
+//
+// Reads the JSON Schema from the embedded asset
+// bundle and writes it to stdout. The schema
+// defines all valid .ctxrc fields, their types,
+// defaults, and constraints.
+//
+// # Flags
+//
+// None. The command accepts no arguments.
+//
+// # Output
+//
+// Raw JSON written to stdout. The output can be
+// piped to a file or used for editor integration:
+//
+//	ctx config schema > ctxrc-schema.json
+//
+// Editors that support JSON Schema (VS Code,
+// JetBrains, Neovim with LSP) can use this file
+// to provide autocompletion and validation when
+// editing .ctxrc.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command. The RunE handler
+// reads the schema via [schema.Schema] from the
+// embedded assets and writes it through
+// [writeConfig.Schema].
 package schema
diff --git a/internal/cli/config/cmd/status/doc.go b/internal/cli/config/cmd/status/doc.go
index 33f8439c1..3e1cb352e 100644
--- a/internal/cli/config/cmd/status/doc.go
+++ b/internal/cli/config/cmd/status/doc.go
@@ -4,10 +4,34 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package status implements the ctx config status subcommand.
+// Package status implements the "ctx config status"
+// subcommand that displays the active .ctxrc profile.
 //
-// [Cmd] builds the cobra.Command. [Run] reads the active .ctxrc
-// file, detects the current profile, and displays resolved
-// configuration values including context directory, token budget,
-// and notification settings.
+// # What It Does
+//
+// Detects which configuration profile is currently
+// active (base or dev) and prints it to stdout. The
+// profile is determined by comparing the contents of
+// .ctxrc against known profile files (.ctxrc.dev,
+// .ctxrc.base).
+//
+// # Flags
+//
+// None. The command accepts no arguments.
+//
+// # Output
+//
+// A single line identifying the active profile name
+// (e.g. "base" or "dev"). This is useful for scripts
+// and shell prompts that need to know which config
+// is active.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with the
+// AnnotationSkipInit annotation so it works before
+// full context initialization. [Run] calls
+// [profile.Detect] to identify the active profile
+// and writes the result through
+// [writeConfig.ProfileStatus].
 package status
diff --git a/internal/cli/config/cmd/status/run_test.go b/internal/cli/config/cmd/status/run_test.go
index 1efe6b7eb..cfd7c3b03 100644
--- a/internal/cli/config/cmd/status/run_test.go
+++ b/internal/cli/config/cmd/status/run_test.go
@@ -17,11 +17,12 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/config/file"
 	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 const (
 	devContent  = "profile: dev\nnotify:\n  events:\n    - loop\n"
-	baseContent = "profile: base\n# context_dir: .context\n"
+	baseContent = "profile: base\n"
 )
 
 func newTestCmd() *cobra.Command {
@@ -39,7 +40,11 @@ func chdirWithCleanup(t *testing.T, dir string) {
 	t.Helper()
 	origDir, _ := os.Getwd()
 	_ = os.Chdir(dir)
-	rc.Reset()
+	// Under the explicit-context-dir model, .ctxrc is read from
+	// `filepath.Dir(CTX_DIR)/.ctxrc`. Declaring CTX_DIR at
+	// `/.context` keeps this test's root-adjacent .ctxrc
+	// visible to the loader.
+	testctx.Declare(t, dir)
 	t.Cleanup(func() {
 		_ = os.Chdir(origDir)
 		rc.Reset()
diff --git a/internal/cli/config/cmd/switchcmd/doc.go b/internal/cli/config/cmd/switchcmd/doc.go
index 58b8f8718..343b3c192 100644
--- a/internal/cli/config/cmd/switchcmd/doc.go
+++ b/internal/cli/config/cmd/switchcmd/doc.go
@@ -4,9 +4,41 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package switchcmd implements the ctx config switch subcommand.
+// Package switchcmd implements the "ctx config switch"
+// subcommand for switching between .ctxrc profiles.
 //
-// [Cmd] builds the cobra.Command. [Run] switches the active
-// .ctxrc profile by copying the named profile file over .ctxrc.
-// Profiles are stored as .ctxrc. files in the project root.
+// # What It Does
+//
+// Switches the active configuration by copying a
+// named profile file over .ctxrc. Profiles are stored
+// as .ctxrc. files in the project root (e.g.
+// .ctxrc.dev, .ctxrc.base).
+//
+// # Arguments
+//
+// An optional positional argument specifying the
+// target profile:
+//
+//   - dev: switch to the development profile
+//   - base: switch to the base (production) profile
+//   - prod: alias for base
+//   - (none): toggle between dev and base
+//
+// # Flags
+//
+// None.
+//
+// # Output
+//
+// A confirmation line showing which profile is now
+// active, e.g. "Switched to dev profile."
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with the
+// AnnotationSkipInit annotation so it works before
+// full context initialization. [Run] normalizes the
+// profile name (e.g. "prod" -> "base"), calls
+// [profile.SwitchTo] to copy the profile file, and
+// writes confirmation via [writeConfig.SwitchConfirm].
 package switchcmd
diff --git a/internal/cli/config/cmd/switchcmd/run_test.go b/internal/cli/config/cmd/switchcmd/run_test.go
index 8de63cbd2..c6b71b7d3 100644
--- a/internal/cli/config/cmd/switchcmd/run_test.go
+++ b/internal/cli/config/cmd/switchcmd/run_test.go
@@ -18,11 +18,12 @@ import (
 	"github.com/ActiveMemory/ctx/internal/cli/config/core/profile"
 	"github.com/ActiveMemory/ctx/internal/config/file"
 	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 const (
 	devContent  = "profile: dev\nnotify:\n  events:\n    - loop\n"
-	baseContent = "profile: base\n# context_dir: .context\n"
+	baseContent = "profile: base\n"
 )
 
 func setupProfiles(t *testing.T) string {
@@ -42,7 +43,7 @@ func setupProfiles(t *testing.T) string {
 
 	origDir, _ := os.Getwd()
 	_ = os.Chdir(root)
-	rc.Reset()
+	testctx.Declare(t, root)
 	t.Cleanup(func() {
 		_ = os.Chdir(origDir)
 		rc.Reset()
diff --git a/internal/cli/config/core/doc.go b/internal/cli/config/core/doc.go
index dbf8843b9..db06f3ae0 100644
--- a/internal/cli/config/core/doc.go
+++ b/internal/cli/config/core/doc.go
@@ -4,11 +4,43 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains configuration management helpers for the
-// config command.
+// Package core contains business logic for the config
+// command, which manages runtime configuration profiles
+// (.ctxrc files).
 //
-// [DetectProfile] reads the active profile name from .ctxrc.
-// [SwitchTo] copies a named profile over .ctxrc. [CopyProfile]
-// performs the file copy. [GitRoot] resolves the repository root
-// for locating project-level config files.
+// This package delegates its work to the profile
+// subpackage. It does not export functions directly.
+//
+// # Profile Management (profile/)
+//
+// The profile subpackage provides three exported
+// functions:
+//
+//   - [profile.Detect] reads the active profile name
+//     from the parsed .ctxrc via rc.RC(). Returns an
+//     empty string when no profile is set.
+//   - [profile.SwitchTo] copies the requested profile
+//     file over .ctxrc. It handles three cases: the
+//     profile is already active (no-op message), .ctxrc
+//     did not exist (created message), or a switch
+//     occurred (switched message). The source file is
+//     .ctxrc.base for the base profile and .ctxrc.dev
+//     for the dev profile.
+//   - [profile.Copy] performs the low-level file copy
+//     from a source profile file to .ctxrc using safe
+//     I/O helpers.
+//
+// [profile.GitRoot] resolves the git repository root
+// directory, used by the cmd/ layer to locate project-
+// level config files. It returns an error when git is
+// unavailable or the working directory is outside a
+// repository.
+//
+// # Data Flow
+//
+// The cmd/ layer calls profile.GitRoot to find the
+// project root, then profile.Detect to check the
+// current profile, and profile.SwitchTo to apply the
+// requested change. Status messages are returned as
+// strings for the write/ layer to display.
 package core
diff --git a/internal/cli/config/core/profile/doc.go b/internal/cli/config/core/profile/doc.go
index 3e0628881..bb7810d06 100644
--- a/internal/cli/config/core/profile/doc.go
+++ b/internal/cli/config/core/profile/doc.go
@@ -4,8 +4,48 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package profile manages .ctxrc profile detection, copying,
-// and switching. Supports toggling between dev and base
-// profiles, with prod as an alias for base. Also provides
-// git root detection for profile file operations.
+// Package profile manages **`.ctxrc` profile detection,
+// copying, and switching**, the engine behind
+// `ctx config switch` that lets a user toggle between
+// (typically) `dev` and `base` configurations without
+// hand-editing `.ctxrc`.
+//
+// The package is the *mutator*; the read-side cache
+// lives in [internal/rc].
+//
+// # The Profile Convention
+//
+// Profiles are stored as **per-profile files** in the
+// project root:
+//
+//   - `.ctxrc`: the active configuration.
+//   - `.ctxrc.dev`: the dev profile (verbose
+//     logs, webhook events, ...).
+//   - `.ctxrc.base`: the base / production
+//     profile (clean defaults).
+//
+// `prod` is recognized as an alias for `base`. New
+// profiles plug in as `.ctxrc.`.
+//
+// # Public Surface
+//
+//   - **[Active]**: returns the name of the
+//     currently-active profile (read from `.ctxrc`'s
+//     `profile:` field).
+//   - **[Detect](root)**: lists every available
+//     profile (by glob).
+//   - **[Switch](root, name)**: copies
+//     `.ctxrc.` over `.ctxrc`. Atomic via the
+//     standard write-temp-rename pattern. Refuses
+//     to switch to an unknown profile.
+//   - **[GitRoot]**: resolves the project's git
+//     root for path operations (the profile files
+//     live there, not in the current working
+//     subdirectory).
+//
+// # Concurrency
+//
+// Filesystem-bound and stateless. `ctx` is
+// single-process; concurrent switches are not a
+// design concern.
 package profile
diff --git a/internal/cli/config/core/profile/profile_test.go b/internal/cli/config/core/profile/profile_test.go
index 3c59512fd..927b58cfb 100644
--- a/internal/cli/config/core/profile/profile_test.go
+++ b/internal/cli/config/core/profile/profile_test.go
@@ -13,18 +13,19 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/config/file"
 	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 const (
 	devContent  = "profile: dev\nnotify:\n  events:\n    - loop\n"
-	baseContent = "profile: base\n# context_dir: .context\n"
+	baseContent = "profile: base\n"
 )
 
 func chdirWithCleanup(t *testing.T, dir string) {
 	t.Helper()
 	origDir, _ := os.Getwd()
 	_ = os.Chdir(dir)
-	rc.Reset()
+	testctx.Declare(t, dir)
 	t.Cleanup(func() {
 		_ = os.Chdir(origDir)
 		rc.Reset()
diff --git a/internal/cli/config/doc.go b/internal/cli/config/doc.go
index cda8f755d..6bdb7595e 100644
--- a/internal/cli/config/doc.go
+++ b/internal/cli/config/doc.go
@@ -4,10 +4,29 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package config implements the ctx config command group for
-// managing runtime configuration profiles.
+// Package config implements the ctx config command group
+// for managing runtime configuration profiles.
 //
-// Subcommands: schema (output JSON Schema), status (show active
-// config), switch (change profile). Profiles are .ctxrc.
-// files that can be swapped via ctx config switch.
+// Runtime configuration lives in .ctxrc files at the
+// project root. Multiple profiles can coexist as
+// .ctxrc. files and be swapped with ctx config
+// switch. The active profile controls tool selection,
+// hook behavior, notification routing, and other
+// runtime knobs without modifying code or context files.
+//
+// # Subcommands
+//
+//   - schema: outputs the JSON Schema for .ctxrc,
+//     useful for editor validation and documentation
+//   - status: prints the active profile name and key
+//     configuration values
+//   - switch: activates a named profile by symlinking
+//     or copying .ctxrc. to .ctxrc
+//
+// # Subpackages
+//
+//	cmd/schema: JSON Schema output implementation
+//	cmd/status: active config display
+//	cmd/switchcmd: profile switching logic
+//	core: shared config helpers
 package config
diff --git a/internal/cli/connection/cmd/listen/doc.go b/internal/cli/connection/cmd/listen/doc.go
index 7abff5624..8e6a5a104 100644
--- a/internal/cli/connection/cmd/listen/doc.go
+++ b/internal/cli/connection/cmd/listen/doc.go
@@ -4,10 +4,36 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package listen provides the cobra command for
-// ctx connection listen.
+// Package listen implements the "ctx connection listen"
+// subcommand that streams context entries from a
+// connected ctx Hub in real time.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Opens a persistent gRPC stream to the hub using
+// the Listen RPC. As new entries arrive they are
+// written to .context/hub/ as markdown files and a
+// receipt line is printed to stdout. The stream
+// runs until the user presses Ctrl-C.
+//
+// # Flags
+//
+// None. The command accepts no arguments. Connection
+// settings (hub address, token, subscribed types)
+// are read from the encrypted config file at
+// .context/.connect.enc.
+//
+// # Output
+//
+// Prints "Listening..." on startup, then one line
+// per received entry showing the entry type. Entries
+// are filtered by the types configured via
+// "ctx connection subscribe".
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [coreListen.Run] which handles config
+// loading, gRPC client setup, signal handling, and
+// the streaming receive loop.
 package listen
diff --git a/internal/cli/connection/cmd/publish/doc.go b/internal/cli/connection/cmd/publish/doc.go
index d55e6fa37..4d31646c5 100644
--- a/internal/cli/connection/cmd/publish/doc.go
+++ b/internal/cli/connection/cmd/publish/doc.go
@@ -4,10 +4,41 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package publish provides the cobra command for
-// ctx connection publish.
+// Package publish implements the "ctx connection publish"
+// subcommand that sends context entries to a connected
+// ctx Hub.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Takes a type and content as positional arguments,
+// wraps them into a timestamped publish entry, and
+// sends them to the hub via the Publish RPC. This
+// is the manual publish path; the --share flag on
+// "ctx add" uses the same core logic automatically.
+//
+// # Arguments
+//
+// Requires exactly two positional arguments:
+//
+//   - args[0]: entry type (e.g. "decision",
+//     "learning")
+//   - args[1]: entry content text
+//
+// # Flags
+//
+// None. Connection settings are read from the
+// encrypted config at .context/.connect.enc.
+//
+// # Output
+//
+// Prints a confirmation line showing how many
+// entries were published (always 1 for this
+// command).
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command, constructs a
+// [hub.PublishEntry] with the current timestamp,
+// and delegates to [corePub.Run] for config loading,
+// gRPC client setup, and the publish call.
 package publish
diff --git a/internal/cli/connection/cmd/register/doc.go b/internal/cli/connection/cmd/register/doc.go
index 850c482db..50ee97db9 100644
--- a/internal/cli/connection/cmd/register/doc.go
+++ b/internal/cli/connection/cmd/register/doc.go
@@ -4,10 +4,40 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package register provides the cobra command for
-// ctx connection register.
+// Package register implements the "ctx connection register"
+// subcommand that registers this project with a ctx Hub
+// instance.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Connects to the hub at the given address, sends the
+// admin token and project name, receives a client
+// token in return, and stores the encrypted connection
+// config in .context/.connect.enc. After registration
+// the project can publish, sync, and listen for
+// entries.
+//
+// # Arguments
+//
+// Requires exactly one positional argument:
+//
+//   - args[0]: hub gRPC address (host:port)
+//
+// # Flags
+//
+//   - --token (required): the admin token printed
+//     by "ctx hub start" at server startup. Used to
+//     authenticate the registration request.
+//
+// # Output
+//
+// Prints a confirmation line showing the assigned
+// client ID from the hub.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command, binds the --token
+// flag, marks it required, and delegates to
+// [coreReg.Run] which handles the gRPC handshake
+// and config persistence.
 package register
diff --git a/internal/cli/connection/cmd/status/doc.go b/internal/cli/connection/cmd/status/doc.go
index 2a01ff8e3..3565c8346 100644
--- a/internal/cli/connection/cmd/status/doc.go
+++ b/internal/cli/connection/cmd/status/doc.go
@@ -4,10 +4,34 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package status provides the cobra command for
-// ctx connection status.
+// Package status implements the "ctx connection status"
+// subcommand that displays the current hub connection
+// state and entry statistics.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Loads the encrypted connection config, connects to
+// the hub, calls the Status RPC, and prints a summary
+// showing the hub address, total stored entries, and
+// number of connected clients.
+//
+// # Flags
+//
+// None. The command accepts no arguments. Connection
+// settings are read from .context/.connect.enc.
+//
+// # Output
+//
+// A human-readable status block including:
+//
+//   - Hub address (host:port)
+//   - Total entries stored in the hub
+//   - Number of currently connected clients
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [coreStatus.Run] which handles config
+// loading, gRPC client setup, the status call, and
+// output formatting.
 package status
diff --git a/internal/cli/connection/cmd/subscribe/doc.go b/internal/cli/connection/cmd/subscribe/doc.go
index 7e02cf305..39a650d7c 100644
--- a/internal/cli/connection/cmd/subscribe/doc.go
+++ b/internal/cli/connection/cmd/subscribe/doc.go
@@ -4,10 +4,36 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package subscribe provides the cobra command for
-// ctx connection subscribe.
+// Package subscribe implements the "ctx connection subscribe"
+// subcommand that configures which entry types this
+// project receives from the hub.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Updates the subscription type list in the encrypted
+// connection config (.context/.connect.enc). Subsequent
+// listen and sync operations will only receive entries
+// matching the subscribed types.
+//
+// # Arguments
+//
+// One or more positional arguments specifying entry
+// types to subscribe to (e.g. "decision", "learning",
+// "task", "convention").
+//
+// # Flags
+//
+// None.
+//
+// # Output
+//
+// Prints a confirmation line listing the subscribed
+// types.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [coreSub.Run] which loads the existing
+// config, updates the types list, saves the config
+// back, and prints confirmation.
 package subscribe
diff --git a/internal/cli/connection/cmd/sync/doc.go b/internal/cli/connection/cmd/sync/doc.go
index 2d569e762..d6ed1b455 100644
--- a/internal/cli/connection/cmd/sync/doc.go
+++ b/internal/cli/connection/cmd/sync/doc.go
@@ -4,10 +4,34 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package sync provides the cobra command for
-// ctx connection sync.
+// Package sync implements the "ctx connection sync"
+// subcommand that pulls new entries from a connected
+// ctx Hub into the local .context/hub/ directory.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Loads the connection config and sync state, connects
+// to the hub, pulls entries added since the last sync
+// sequence number, renders them as markdown files in
+// .context/hub/, and updates the sync state so the
+// next run only fetches new entries.
+//
+// # Flags
+//
+// None. The command accepts no arguments. Connection
+// settings are read from .context/.connect.enc and
+// sync state is tracked in a lock-guarded state file.
+//
+// # Output
+//
+// Prints a summary line showing how many entries were
+// synced (e.g. "Synced 3 entries."). When there are
+// no new entries it prints "Synced 0 entries."
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates to
+// [coreSync.Run] which handles config loading, state
+// locking, gRPC client setup, entry pulling, markdown
+// rendering, and state persistence.
 package sync
diff --git a/internal/cli/connection/core/config/config.go b/internal/cli/connection/core/config/config.go
index f626c2327..e17f1e7ca 100644
--- a/internal/cli/connection/core/config/config.go
+++ b/internal/cli/connection/core/config/config.go
@@ -37,8 +37,12 @@ func Save(cfg Config) error {
 		return encErr
 	}
 
+	path, pathErr := filePath()
+	if pathErr != nil {
+		return pathErr
+	}
 	return io.SafeWriteFile(
-		filePath(), encrypted, fs.PermSecret,
+		path, encrypted, fs.PermSecret,
 	)
 }
 
@@ -50,9 +54,11 @@ func Save(cfg Config) error {
 func Load() (Config, error) {
 	var cfg Config
 
-	encrypted, readErr := io.SafeReadUserFile(
-		filePath(),
-	)
+	path, pathErr := filePath()
+	if pathErr != nil {
+		return cfg, pathErr
+	}
+	encrypted, readErr := io.SafeReadUserFile(path)
 	if readErr != nil {
 		return cfg, readErr
 	}
diff --git a/internal/cli/connection/core/config/doc.go b/internal/cli/connection/core/config/doc.go
index e7f6e4a60..13c00e750 100644
--- a/internal/cli/connection/core/config/doc.go
+++ b/internal/cli/connection/core/config/doc.go
@@ -1,13 +1,54 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package config manages the encrypted connection config
-// for ctx Hub client operations.
+// Package config manages the encrypted connection
+// configuration for ctx Hub client operations.
 //
-// Key exports: [Load], [Save], [Config].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Config Type
+//
+// [Config] is the persisted hub connection state. It
+// holds three fields:
+//
+//   - HubAddr: the gRPC address (host:port) of the hub.
+//   - Token: the client bearer token received during
+//     registration.
+//   - Types: an optional list of subscribed entry types
+//     for filtered listening.
+//
+// Config is serialized as JSON and encrypted at rest.
+//
+// # Save
+//
+// [Save] persists a Config to disk. It marshals the
+// struct to JSON, encrypts the bytes using AES-GCM via
+// crypto.Encrypt, and writes the ciphertext to
+// .context/.connect.enc with restricted permissions
+// (fs.PermSecret). The encryption key is loaded from
+// the global key path via crypto.GlobalKeyPath.
+//
+// # Load
+//
+// [Load] reads and decrypts the stored configuration.
+// It reads the ciphertext from .connect.enc, loads the
+// encryption key, decrypts via crypto.Decrypt, and
+// unmarshals the JSON into a Config struct. Returns an
+// error when the file is missing, the key is
+// unreadable, or decryption fails.
+//
+// # Key Management
+//
+// The unexported loadKey helper reads the encryption
+// key from crypto.GlobalKeyPath(). The unexported
+// filePath helper resolves the absolute path to
+// .connect.enc within the context directory.
+//
+// # Data Flow
+//
+// The register subpackage calls Save after receiving a
+// client token from the hub. The listen, publish, and
+// status subpackages call Load to retrieve credentials
+// before making gRPC calls.
 package config
diff --git a/internal/cli/connection/core/config/path.go b/internal/cli/connection/core/config/path.go
index e27bf5cee..c331e36de 100644
--- a/internal/cli/connection/core/config/path.go
+++ b/internal/cli/connection/core/config/path.go
@@ -18,8 +18,13 @@ import (
 //
 // Returns:
 //   - string: Absolute path to the encrypted connect file
-func filePath() string {
-	return filepath.Join(rc.ContextDir(), cfgHub.FileConnect)
+//   - error: non-nil when the context directory is not declared
+func filePath() (string, error) {
+	ctxDir, err := rc.ContextDir()
+	if err != nil {
+		return "", err
+	}
+	return filepath.Join(ctxDir, cfgHub.FileConnect), nil
 }
 
 // loadKey reads the encryption key from the global key
diff --git a/internal/cli/connection/core/listen/doc.go b/internal/cli/connection/core/listen/doc.go
index d8cf6a68e..94295b488 100644
--- a/internal/cli/connection/core/listen/doc.go
+++ b/internal/cli/connection/core/listen/doc.go
@@ -1,13 +1,42 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
 // Package listen implements real-time hub entry streaming
-// for ctx connection listen.
+// for the ctx connection listen command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Run
+//
+// [Run] opens a persistent gRPC stream to the ctx Hub
+// and writes each received entry to .context/hub/ as it
+// arrives. The function blocks until the user presses
+// Ctrl-C or an unrecoverable error occurs.
+//
+// The execution flow is:
+//
+//  1. Load the encrypted connection config via
+//     connectCfg.Load to obtain the hub address and
+//     bearer token.
+//  2. Dial the hub with hub.NewClient, establishing a
+//     gRPC connection.
+//  3. Set up a signal handler for os.Interrupt using
+//     signal.NotifyContext so Ctrl-C cancels the
+//     stream context.
+//  4. Print a "listening" status message via
+//     writeConnect.Listening.
+//  5. Call client.Listen with the configured entry type
+//     filters and a callback that writes each received
+//     EntryMsg to disk via render.WriteEntries, then
+//     prints a confirmation via writeConnect.EntryReceived.
+//  6. On context cancellation (Ctrl-C), return nil.
+//     On any other error, propagate it to the cmd/ layer.
+//
+// # Data Flow
+//
+// The cmd/ layer calls Run as the cobra RunE function.
+// Run loads config, dials the hub, and streams entries.
+// The write/ layer handles all user-facing output. The
+// render subpackage persists entries to .context/hub/.
 package listen
diff --git a/internal/cli/connection/core/publish/doc.go b/internal/cli/connection/core/publish/doc.go
index 63633e25e..b8eb482af 100644
--- a/internal/cli/connection/core/publish/doc.go
+++ b/internal/cli/connection/core/publish/doc.go
@@ -1,13 +1,39 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
 // Package publish implements entry publishing to the hub
-// for ctx connection publish.
+// for the ctx connection publish command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Run
+//
+// [Run] sends local entries to the ctx Hub via the
+// Publish RPC. It accepts a slice of hub.PublishEntry
+// values prepared by the cmd/ layer.
+//
+// The execution flow is:
+//
+//  1. Load the encrypted connection config via
+//     connectCfg.Load to obtain the hub address and
+//     bearer token.
+//  2. Dial the hub with hub.NewClient, establishing a
+//     gRPC connection.
+//  3. Call client.Publish with the entries, sending
+//     them in a single batch RPC.
+//  4. Print a confirmation showing the number of
+//     published entries via writeConnect.Published.
+//
+// The function returns an error if config loading,
+// connection setup, or the publish RPC fails. The gRPC
+// connection is closed via a deferred Close call.
+//
+// # Data Flow
+//
+// The cmd/ layer prepares PublishEntry values from
+// command arguments and passes them to Run. Run handles
+// config, networking, and output. Future versions may
+// support reading entries from local context files with
+// a --new flag.
 package publish
diff --git a/internal/cli/connection/core/register/doc.go b/internal/cli/connection/core/register/doc.go
index 82b3159a8..494083f7f 100644
--- a/internal/cli/connection/core/register/doc.go
+++ b/internal/cli/connection/core/register/doc.go
@@ -1,13 +1,45 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
 // Package register implements hub registration logic for
-// ctx connection register.
+// the ctx connection register command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Run
+//
+// [Run] registers the current project with a ctx Hub
+// instance. It exchanges an admin token for a client
+// token and persists the encrypted connection config.
+//
+// The execution flow is:
+//
+//  1. Dial the hub at the provided gRPC address using
+//     hub.NewClient with an empty bearer token (the
+//     admin token is sent as a registration parameter,
+//     not as a connection credential).
+//  2. Derive the project name from the context
+//     directory path using filepath.Base.
+//  3. Call client.Register with the admin token and
+//     project name. The hub returns a client ID and a
+//     client bearer token for future RPCs.
+//  4. Build a connectCfg.Config with the hub address
+//     and client token, then persist it via
+//     connectCfg.Save. The config is encrypted at rest
+//     in .context/.connect.enc.
+//  5. Print a confirmation with the assigned client ID
+//     via writeConnect.Registered.
+//
+// The function returns an error if dialing, registration,
+// or config persistence fails. The gRPC connection is
+// closed via a deferred Close call.
+//
+// # Data Flow
+//
+// The cmd/ layer extracts the hub address and admin
+// token from flags or arguments and passes them to Run.
+// After registration, the listen, publish, and status
+// subpackages use the stored config for authenticated
+// hub communication.
 package register
diff --git a/internal/cli/connection/core/register/register.go b/internal/cli/connection/core/register/register.go
index 2931daec5..13878602d 100644
--- a/internal/cli/connection/core/register/register.go
+++ b/internal/cli/connection/core/register/register.go
@@ -42,7 +42,12 @@ func Run(
 	}
 	defer func() { _ = client.Close() }()
 
-	projectName := filepath.Base(rc.ContextDir())
+	ctxDir, ctxErr := rc.RequireContextDir()
+	if ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
+	projectName := filepath.Base(ctxDir)
 
 	resp, regErr := client.Register(
 		context.Background(),
diff --git a/internal/cli/connection/core/render/doc.go b/internal/cli/connection/core/render/doc.go
index 3f4301a11..0fd4b863a 100644
--- a/internal/cli/connection/core/render/doc.go
+++ b/internal/cli/connection/core/render/doc.go
@@ -4,10 +4,43 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package render converts hub entries to markdown files
-// in .context/hub/ with origin tags and date headers.
+// Package render is the **client-side renderer** that
+// turns hub entries received from `ctx connection sync` /
+// `ctx connection listen` into markdown files under
+// `.context/hub/` so the local agent can read them.
 //
-// Key exports: [WriteEntries].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// Each entry becomes a markdown block with a date
+// header, an origin tag (which project published it),
+// and the entry body, all separated by horizontal
+// rules. The format is the same one defined by
+// [internal/assets/tpl.HubEntryMarkdown] so what
+// ships through the gRPC pipe is what lands on disk.
+//
+// # Public Surface
+//
+//   - **[WriteEntries](dir, entries)**: appends
+//     each entry to the matching per-type file
+//     (`decisions.md`, `learnings.md`,
+//     `conventions.md`, `tasks.md`) under `dir`,
+//     formatting via [HubEntryMarkdown]. Idempotent
+//     by entry sequence number; re-running with
+//     the same sequence range produces no
+//     duplicates because the importer tracks last-
+//     seen sequence per file.
+//
+// # File Layout
+//
+//   - `.context/hub/decisions.md`
+//   - `.context/hub/learnings.md`
+//   - `.context/hub/conventions.md`
+//   - `.context/hub/tasks.md`
+//   - `.context/hub/.sync-state.json`: last-seen
+//     sequence per type so resume is exact.
+//
+// # Concurrency
+//
+// Filesystem-bound. Concurrent renderers against
+// the same hub directory would race; the
+// `ctx connection listen` daemon is single-instance
+// per project by convention.
 package render
diff --git a/internal/cli/connection/core/render/format.go b/internal/cli/connection/core/render/format.go
index 39c9a0bd9..710b0c359 100644
--- a/internal/cli/connection/core/render/format.go
+++ b/internal/cli/connection/core/render/format.go
@@ -75,7 +75,7 @@ func writeEntry(b *strings.Builder, e *hub.EntryMsg) {
 	ts := time.Unix(e.Timestamp, 0).UTC()
 	date := ts.Format(cfgTime.DateFormat)
 	if _, err := fmt.Fprintf(b,
-		tpl.TplEntryMarkdown,
+		tpl.HubEntryMarkdown,
 		date, firstLine(e.Content),
 		e.Origin, e.Content,
 	); err != nil {
diff --git a/internal/cli/connection/core/render/render.go b/internal/cli/connection/core/render/render.go
index 61f9ecefd..b3d84e4cd 100644
--- a/internal/cli/connection/core/render/render.go
+++ b/internal/cli/connection/core/render/render.go
@@ -25,7 +25,11 @@ import (
 // Returns:
 //   - error: non-nil if directory creation or write fails
 func WriteEntries(entries []hub.EntryMsg) error {
-	dir := filepath.Join(rc.ContextDir(), cfgHub.DirHub)
+	ctxDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return ctxErr
+	}
+	dir := filepath.Join(ctxDir, cfgHub.DirHub)
 	if mkErr := io.SafeMkdirAll(
 		dir, fs.PermKeyDir,
 	); mkErr != nil {
diff --git a/internal/cli/connection/core/render/render_test.go b/internal/cli/connection/core/render/render_test.go
index 6917a47ee..fc5aa3617 100644
--- a/internal/cli/connection/core/render/render_test.go
+++ b/internal/cli/connection/core/render/render_test.go
@@ -13,7 +13,7 @@ import (
 	"testing"
 
 	"github.com/ActiveMemory/ctx/internal/hub"
-	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 func TestWriteEntries_CreatesFiles(t *testing.T) {
@@ -28,7 +28,7 @@ func TestWriteEntries_CreatesFiles(t *testing.T) {
 		t.Fatal(chErr)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
-	rc.Reset()
+	testctx.Declare(t, tmpDir)
 
 	entries := []hub.EntryMsg{
 		{
@@ -95,7 +95,7 @@ func TestWriteEntries_AppendsToExisting(t *testing.T) {
 		t.Fatal(chErr)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
-	rc.Reset()
+	testctx.Declare(t, tmpDir)
 
 	// Pre-populate a file.
 	existing := "## Existing content\n\n"
diff --git a/internal/cli/connection/core/status/doc.go b/internal/cli/connection/core/status/doc.go
index 640ebf1f3..eac026842 100644
--- a/internal/cli/connection/core/status/doc.go
+++ b/internal/cli/connection/core/status/doc.go
@@ -1,13 +1,39 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\
+// `.,'\\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package status implements hub status display for
-// ctx connection status.
+// Package status implements hub status display for the
+// ctx connection status command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Run
+//
+// [Run] queries the ctx Hub for connection health and
+// entry statistics, then prints a summary to the
+// terminal.
+//
+// The execution flow is:
+//
+//  1. Load the encrypted connection config via
+//     connectCfg.Load to obtain the hub address and
+//     bearer token.
+//  2. Dial the hub with hub.NewClient, establishing a
+//     gRPC connection.
+//  3. Call client.Status to retrieve server-side
+//     statistics including total entry count and the
+//     number of connected clients.
+//  4. Print the hub address, total entries, and
+//     connected client count via writeConnect.Status.
+//
+// The function returns an error if config loading,
+// connection setup, or the status RPC fails. The gRPC
+// connection is closed via a deferred Close call.
+//
+// # Data Flow
+//
+// The cmd/ layer calls Run as the cobra RunE function.
+// Run handles config loading, networking, and output
+// delegation. The write/connect package formats the
+// final user-facing output.
 package status
diff --git a/internal/cli/connection/core/subscribe/doc.go b/internal/cli/connection/core/subscribe/doc.go
index efa1c4d59..458571094 100644
--- a/internal/cli/connection/core/subscribe/doc.go
+++ b/internal/cli/connection/core/subscribe/doc.go
@@ -4,10 +4,35 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package subscribe implements subscription type updates
-// for ctx connection subscribe.
+// Package subscribe implements subscription type management
+// for the ctx connection subscribe command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Overview
+//
+// This package provides the business logic for updating
+// which entry types a connection subscribes to. When a
+// user runs ctx connection subscribe, the command layer
+// delegates to [Run], which persists the new subscription
+// list to the connection configuration.
+//
+// # Behavior
+//
+// [Run] replaces the subscribed entry types in the
+// connection config file and persists the change to disk.
+//
+// # Data Flow
+//
+// The subscribe pipeline works as follows:
+//
+//  1. The cmd layer invokes [Run] with cobra args
+//     containing the desired entry types.
+//  2. [Run] loads the current connection config via
+//     the config sub-package.
+//  3. The Types field is replaced with the new list.
+//  4. The updated config is saved back to disk.
+//  5. A confirmation message is printed via the
+//     write/connect layer.
+//
+// If the config file cannot be loaded or saved, the
+// error propagates back to the cmd layer for display.
 package subscribe
diff --git a/internal/cli/connection/core/sync/doc.go b/internal/cli/connection/core/sync/doc.go
index 2bf244cb4..51523c397 100644
--- a/internal/cli/connection/core/sync/doc.go
+++ b/internal/cli/connection/core/sync/doc.go
@@ -4,10 +4,50 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package sync implements hub entry sync logic for
-// ctx connection sync.
+// Package sync implements hub-to-local entry
+// synchronisation for the ctx connection sync command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Overview
+//
+// This package pulls new journal entries from a remote
+// hub and writes them as markdown files into the local
+// .context/hub/ directory. It tracks progress with a
+// sequence-based sync state so only new entries are
+// fetched on each invocation.
+//
+// # Behavior
+//
+// [Run] acquires a file lock, dials the hub over gRPC,
+// fetches entries newer than the last-seen sequence, and
+// writes them as markdown into .context/hub/.
+//
+// # Data Flow
+//
+// When [Run] is called it performs these steps:
+//
+//  1. Loads connection config (hub address, token,
+//     subscribed types) via the config sub-package.
+//  2. Acquires a file-based lock to prevent concurrent
+//     syncs from colliding.
+//  3. Reads the persisted sync state to obtain the
+//     last-seen sequence number.
+//  4. Dials the hub via gRPC, requesting all entries
+//     after the last sequence for the subscribed types.
+//  5. Renders received entries as markdown through the
+//     render sub-package.
+//  6. Updates the sync state with the highest sequence
+//     number from the batch.
+//  7. Reports the count of synced entries to the user.
+//
+// # Lock File
+//
+// A lock file at .context/hub/.sync.lock prevents two
+// sync processes from running at the same time. The
+// lock is released via a deferred cleanup function
+// returned by loadState.
+//
+// # Internal Types
+//
+//   - state: tracks the LastSequence field, persisted
+//     as JSON in .context/hub/.sync_state.json.
 package sync
diff --git a/internal/cli/connection/core/sync/state.go b/internal/cli/connection/core/sync/state.go
index a08787ba6..5763f386a 100644
--- a/internal/cli/connection/core/sync/state.go
+++ b/internal/cli/connection/core/sync/state.go
@@ -26,7 +26,11 @@ import (
 //   - error: Non-nil on I/O or lock-contention failure
 func loadState() (state, func(), error) {
 	var s state
-	dir := filepath.Join(rc.ContextDir(), cfgHub.DirHub)
+	ctxDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return s, nil, ctxErr
+	}
+	dir := filepath.Join(ctxDir, cfgHub.DirHub)
 	lockPath := filepath.Join(dir, cfgHub.FileSyncLock)
 
 	if mkErr := io.SafeMkdirAll(
@@ -76,7 +80,11 @@ func loadState() (state, func(), error) {
 // Returns:
 //   - error: Non-nil on marshal or I/O failure
 func saveState(s state) error {
-	dir := filepath.Join(rc.ContextDir(), cfgHub.DirHub)
+	ctxDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return ctxErr
+	}
+	dir := filepath.Join(ctxDir, cfgHub.DirHub)
 	data, marshalErr := json.MarshalIndent(
 		s, "", cfgHub.JSONIndent,
 	)
diff --git a/internal/cli/connection/doc.go b/internal/cli/connection/doc.go
index 786c2efb1..23ee53c8c 100644
--- a/internal/cli/connection/doc.go
+++ b/internal/cli/connection/doc.go
@@ -4,10 +4,30 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package connect provides the ctx connection command group
-// for ctx Hub client operations.
+// Package connection provides the ctx connection command
+// group for ctx Hub client operations.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// The connection command manages the relationship between
+// a local project and a remote ctx Hub instance. It
+// handles device registration, topic subscription,
+// context publishing, and real-time event listening.
+//
+// # Subcommands
+//
+//   - register: register this device with a Hub instance
+//   - subscribe: subscribe to context topics on the Hub
+//   - sync: pull latest context from subscribed topics
+//   - publish: push local context entries to the Hub
+//   - listen: stream real-time events from the Hub
+//   - status: show connection state and subscription info
+//
+// # Subpackages
+//
+//	cmd/register: device registration flow
+//	cmd/subscribe: topic subscription management
+//	cmd/sync: context pull from Hub
+//	cmd/publish: context push to Hub
+//	cmd/listen: real-time event streaming
+//	cmd/status: connection status display
+//	core: shared Hub client helpers
 package connection
diff --git a/internal/cli/deactivate/cmd/root/cmd.go b/internal/cli/deactivate/cmd/root/cmd.go
new file mode 100644
index 000000000..c2e7bf6d8
--- /dev/null
+++ b/internal/cli/deactivate/cmd/root/cmd.go
@@ -0,0 +1,58 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package root
+
+import (
+	"github.com/spf13/cobra"
+
+	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
+	embedFlag "github.com/ActiveMemory/ctx/internal/config/embed/flag"
+	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
+)
+
+// Cmd returns the `ctx deactivate` cobra command.
+//
+// Accepts one flag:
+//
+//	--shell    override auto-detection (defaults to $SHELL).
+//
+// # Stdout discipline (critical)
+//
+// Same eval-recursion hazard as `ctx activate`: stdout is consumed
+// by `eval "$(ctx deactivate)"`, so cobra must never print Usage /
+// Flags / Examples on stdout (the Examples block contains the eval
+// invocation literally). [SilenceUsage] is set unconditionally
+// below; errors keep going to stderr via the root [SilenceErrors]
+// settings.
+//
+// Returns:
+//   - *cobra.Command: configured deactivate command.
+func Cmd() *cobra.Command {
+	short, long := desc.Command(cmd.DescKeyDeactivate)
+	c := &cobra.Command{
+		Use:     cmd.UseDeactivate,
+		Short:   short,
+		Long:    long,
+		Example: desc.Example(cmd.DescKeyDeactivate),
+		Args:    cobra.NoArgs,
+		// Exempt from the global init / require-context-dir checks:
+		// `unset CTX_DIR` must work regardless of current state.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
+		// See the Stdout discipline note above.
+		SilenceUsage: true,
+		RunE: func(cmd *cobra.Command, _ []string) error {
+			shell, _ := cmd.Flags().GetString(cFlag.Shell)
+			return Run(cmd, shell)
+		},
+	}
+	c.Flags().String(cFlag.Shell, "",
+		desc.Flag(embedFlag.DescKeyActivateShell),
+	)
+	return c
+}
diff --git a/internal/cli/deactivate/cmd/root/doc.go b/internal/cli/deactivate/cmd/root/doc.go
new file mode 100644
index 000000000..43b96fc90
--- /dev/null
+++ b/internal/cli/deactivate/cmd/root/doc.go
@@ -0,0 +1,19 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package root implements the `ctx deactivate` cobra command.
+//
+// The command emits a shell-specific `unset CTX_DIR` statement to
+// stdout, paired with `ctx activate` for symmetric shell integration.
+// Like activate, deactivate is in the exempt allowlist: it does not
+// require a declared context directory to run (clearing CTX_DIR when
+// it is already unset is a harmless no-op).
+//
+// Usage:
+//
+//	eval "$(ctx deactivate)"
+//	ctx deactivate --shell zsh
+package root
diff --git a/internal/cli/deactivate/cmd/root/run.go b/internal/cli/deactivate/cmd/root/run.go
new file mode 100644
index 000000000..0db2507db
--- /dev/null
+++ b/internal/cli/deactivate/cmd/root/run.go
@@ -0,0 +1,34 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package root
+
+import (
+	"github.com/spf13/cobra"
+
+	"github.com/ActiveMemory/ctx/internal/cli/activate/core/emit"
+	writeActivate "github.com/ActiveMemory/ctx/internal/write/activate"
+)
+
+// Run executes the `ctx deactivate` command: emit a shell-specific
+// `unset CTX_DIR` statement to stdout so the caller can clear the
+// binding via `eval "$(ctx deactivate)"`.
+//
+// The command never errors under normal operation; unsetting an
+// already-unset variable is a no-op across supported shells.
+//
+// Parameters:
+//   - cmd: cobra command providing stdout.
+//   - shell: value of the --shell flag; empty auto-detects from
+//     $SHELL via emit.DetectShell.
+//
+// Returns:
+//   - error: always nil; kept in the signature for Cobra RunE
+//     compatibility.
+func Run(cmd *cobra.Command, shell string) error {
+	writeActivate.Emit(cmd, emit.Unset(emit.DetectShell(shell)))
+	return nil
+}
diff --git a/internal/cli/deactivate/deactivate.go b/internal/cli/deactivate/deactivate.go
new file mode 100644
index 000000000..e602527ea
--- /dev/null
+++ b/internal/cli/deactivate/deactivate.go
@@ -0,0 +1,22 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package deactivate
+
+import (
+	"github.com/spf13/cobra"
+
+	deactivateRoot "github.com/ActiveMemory/ctx/internal/cli/deactivate/cmd/root"
+)
+
+// Cmd returns the `ctx deactivate` command for registration on the
+// root ctx command. See cmd/root for the full command definition.
+//
+// Returns:
+//   - *cobra.Command: the deactivate command.
+func Cmd() *cobra.Command {
+	return deactivateRoot.Cmd()
+}
diff --git a/internal/cli/deactivate/deactivate_test.go b/internal/cli/deactivate/deactivate_test.go
new file mode 100644
index 000000000..a1a2427a8
--- /dev/null
+++ b/internal/cli/deactivate/deactivate_test.go
@@ -0,0 +1,73 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package deactivate_test
+
+import (
+	"bytes"
+	"strings"
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/cli/deactivate"
+)
+
+// runDeactivate invokes `ctx deactivate` with the given args and
+// returns (stdout, error).
+func runDeactivate(t *testing.T, args []string) (string, error) {
+	t.Helper()
+	c := deactivate.Cmd()
+	c.SetArgs(args)
+	var out bytes.Buffer
+	c.SetOut(&out)
+	c.SetErr(&out)
+	err := c.Execute()
+	return out.String(), err
+}
+
+// TestDeactivate_DefaultShell: no --shell flag → autodetect from
+// $SHELL → bash emitter → `unset CTX_DIR`.
+func TestDeactivate_DefaultShell(t *testing.T) {
+	t.Setenv("SHELL", "/bin/bash")
+
+	stdout, err := runDeactivate(t, nil)
+	if err != nil {
+		t.Fatalf("unexpected err: %v", err)
+	}
+	if strings.TrimSpace(stdout) != "unset CTX_DIR" {
+		t.Errorf("stdout = %q, want 'unset CTX_DIR\\n'", stdout)
+	}
+}
+
+// TestDeactivate_ExplicitZsh: --shell zsh → same POSIX unset
+// statement (v1 bash/zsh/sh share syntax).
+func TestDeactivate_ExplicitZsh(t *testing.T) {
+	stdout, err := runDeactivate(t, []string{"--shell", "zsh"})
+	if err != nil {
+		t.Fatalf("unexpected err: %v", err)
+	}
+	if !strings.Contains(stdout, "unset CTX_DIR") {
+		t.Errorf("stdout missing unset: %q", stdout)
+	}
+}
+
+// TestDeactivate_UnknownShell: unknown shell → POSIX unset fallback.
+func TestDeactivate_UnknownShell(t *testing.T) {
+	stdout, err := runDeactivate(t, []string{"--shell", "rc"})
+	if err != nil {
+		t.Fatalf("unexpected err: %v", err)
+	}
+	if !strings.Contains(stdout, "unset CTX_DIR") {
+		t.Errorf("stdout missing unset fallback: %q", stdout)
+	}
+}
+
+// TestDeactivate_RejectsPositionalArgs: deactivate takes no args.
+func TestDeactivate_RejectsPositionalArgs(t *testing.T) {
+	_, err := runDeactivate(t, []string{"unexpected-arg"})
+	if err == nil {
+		t.Fatalf("expected error for positional arg, got nil")
+	}
+}
diff --git a/internal/cli/deactivate/doc.go b/internal/cli/deactivate/doc.go
new file mode 100644
index 000000000..a8792267d
--- /dev/null
+++ b/internal/cli/deactivate/doc.go
@@ -0,0 +1,27 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+// Package deactivate implements the `ctx deactivate` command.
+//
+// Deactivate is the counterpart to `ctx activate` under the
+// explicit-context-dir resolution model. It emits a shell-specific
+// `unset CTX_DIR` statement to stdout, intended for consumption via
+// `eval "$(ctx deactivate)"`.
+//
+// The command does not touch the filesystem and does not scan for
+// candidates. CTX_DIR can always be cleared safely regardless of
+// which (if any) `.context/` directories are visible.
+//
+// # Subpackages
+//
+//	cmd/root : cobra command definition and run logic.
+//
+// # Shell Support
+//
+// Deactivate shares the emit package with activate
+// (internal/cli/activate/core/emit) so both commands stay in
+// lockstep on supported shells. v1: bash, zsh, POSIX sh.
+package deactivate
diff --git a/internal/cli/deactivate/testmain_test.go b/internal/cli/deactivate/testmain_test.go
new file mode 100644
index 000000000..e924bf4a3
--- /dev/null
+++ b/internal/cli/deactivate/testmain_test.go
@@ -0,0 +1,21 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package deactivate_test
+
+import (
+	"os"
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/assets/read/lookup"
+)
+
+// TestMain initializes the embedded text-asset lookup so deactivate's
+// command metadata (Use/Short/Long from cmd/root) resolves correctly.
+func TestMain(m *testing.M) {
+	lookup.Init()
+	os.Exit(m.Run())
+}
diff --git a/internal/cli/decision/cmd/reindex/cmd.go b/internal/cli/decision/cmd/reindex/cmd.go
index e8a47eb33..4ac00b675 100644
--- a/internal/cli/decision/cmd/reindex/cmd.go
+++ b/internal/cli/decision/cmd/reindex/cmd.go
@@ -4,7 +4,6 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package reindex provides the "ctx decisions reindex" subcommand.
 package reindex
 
 import (
diff --git a/internal/cli/decision/cmd/reindex/doc.go b/internal/cli/decision/cmd/reindex/doc.go
index ff4335dce..0829d9d44 100644
--- a/internal/cli/decision/cmd/reindex/doc.go
+++ b/internal/cli/decision/cmd/reindex/doc.go
@@ -4,9 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package reindex implements the ctx decision reindex subcommand.
+// Package reindex implements the "ctx decision reindex"
+// subcommand that regenerates the index table at the
+// top of DECISIONS.md.
 //
-// [Cmd] builds the cobra.Command. [Run] regenerates the index
-// table at the top of DECISIONS.md by parsing all entry headers
-// and rebuilding the sorted table.
+// # What It Does
+//
+// Parses all decision entry headers in DECISIONS.md,
+// sorts them, and rebuilds the quick-reference index
+// table that appears at the top of the file. This is
+// useful after manual edits that leave the index out
+// of sync with the entries below.
+//
+// # Arguments
+//
+// None required. The command operates on the
+// DECISIONS.md file in the active context directory.
+//
+// # Flags
+//
+// None.
+//
+// # Output
+//
+// Prints a confirmation message indicating how many
+// entries were indexed. The file is updated in place.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [Run]. Run resolves the file path via
+// [rc.ContextDir], then calls [index.Reindex] with
+// the decision-specific update function to parse
+// headers and regenerate the table.
 package reindex
diff --git a/internal/cli/decision/cmd/reindex/run.go b/internal/cli/decision/cmd/reindex/run.go
index 873fe768d..a01ac5ee8 100644
--- a/internal/cli/decision/cmd/reindex/run.go
+++ b/internal/cli/decision/cmd/reindex/run.go
@@ -26,7 +26,12 @@ import (
 // Returns:
 //   - error: Non-nil if the file read/write fails
 func Run(cmd *cobra.Command, _ []string) error {
-	filePath := filepath.Join(rc.ContextDir(), ctx.Decision)
+	ctxDir, err := rc.RequireContextDir()
+	if err != nil {
+		cmd.SilenceUsage = true
+		return err
+	}
+	filePath := filepath.Join(ctxDir, ctx.Decision)
 	return index.Reindex(
 		cmd.OutOrStdout(),
 		filePath,
diff --git a/internal/cli/decision/decision.go b/internal/cli/decision/decision.go
index b2afa6079..c220c1ab3 100644
--- a/internal/cli/decision/decision.go
+++ b/internal/cli/decision/decision.go
@@ -4,7 +4,6 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package decision provides commands for managing DECISIONS.md.
 package decision
 
 import (
diff --git a/internal/cli/decision/decision_test.go b/internal/cli/decision/decision_test.go
index e0982bd9a..58227b703 100644
--- a/internal/cli/decision/decision_test.go
+++ b/internal/cli/decision/decision_test.go
@@ -14,6 +14,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/config/ctx"
 	"github.com/ActiveMemory/ctx/internal/config/dir"
 	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 func TestCmd(t *testing.T) {
@@ -64,7 +65,7 @@ func TestRunReindex_NoFile(t *testing.T) {
 	_ = os.Chdir(tempDir)
 	defer func() { _ = os.Chdir(origDir) }()
 
-	rc.Reset()
+	testctx.Declare(t, tempDir)
 	defer rc.Reset()
 
 	cmd := Cmd()
@@ -82,7 +83,7 @@ func TestRunReindex_WithFile(t *testing.T) {
 	_ = os.Chdir(tempDir)
 	defer func() { _ = os.Chdir(origDir) }()
 
-	rc.Reset()
+	testctx.Declare(t, tempDir)
 	defer rc.Reset()
 
 	// Create the context directory and DECISIONS.md file
@@ -124,7 +125,7 @@ func TestRunReindex_EmptyFile(t *testing.T) {
 	_ = os.Chdir(tempDir)
 	defer func() { _ = os.Chdir(origDir) }()
 
-	rc.Reset()
+	testctx.Declare(t, tempDir)
 	defer rc.Reset()
 
 	// Create the context directory and empty DECISIONS.md
diff --git a/internal/cli/decision/doc.go b/internal/cli/decision/doc.go
index 161b2c079..3d048bee2 100644
--- a/internal/cli/decision/doc.go
+++ b/internal/cli/decision/doc.go
@@ -4,9 +4,23 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package decision implements the ctx decision command group
-// for managing DECISIONS.md.
+// Package decision implements the ctx decision command
+// group for managing DECISIONS.md.
 //
-// Currently provides the reindex subcommand for regenerating
-// the decision index table.
+// DECISIONS.md records architectural choices with their
+// rationale, trade-offs, and timestamps. The decision
+// command group provides tooling to maintain this file's
+// quick-reference index table, which maps decision
+// numbers to one-line summaries for fast scanning.
+//
+// # Subcommands
+//
+//   - reindex: scans DECISIONS.md entries and regenerates
+//     the index table at the top of the file, ensuring
+//     numbering and summaries stay consistent with the
+//     full entries below
+//
+// # Subpackages
+//
+//	cmd/reindex: cobra command for index regeneration
 package decision
diff --git a/internal/cli/doc.go b/internal/cli/doc.go
index 8f550f5fa..107711b78 100644
--- a/internal/cli/doc.go
+++ b/internal/cli/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package cli contains the implementation of all ctx subcommands.
+// Package cli contains the implementation of all ctx
+// subcommands.
 //
-// Each command lives in its own package following the taxonomy:
-// parent.go (Cmd wiring), cmd/root/ or cmd// (implementation),
-// core/ (shared helpers). The bootstrap package registers all
-// commands into the root cobra.Command tree.
+// Each command lives in its own subpackage following a
+// consistent taxonomy:
+//
+//   - parent.go: exports a Cmd() function that wires the
+//     cobra command and registers subcommands
+//   - cmd/root/ or cmd//: cobra command definitions,
+//     flag binding, and RunE entry points
+//   - core/: shared helpers, business logic, and
+//     formatting used by one or more cmd/ packages
+//
+// The [internal/bootstrap] package registers all command
+// packages into the root cobra.Command tree at startup.
+// Commands that are pure namespace groupings (no RunE)
+// use [internal/cli/parent.Cmd] to create the parent
+// with desc-loaded descriptions and subcommand wiring.
+//
+// # Package Categories
+//
+// Context file commands: add, compact, decision, drift,
+// fmt, load, agent, reindex, status, watch.
+//
+// Session lifecycle: pause, resume, event, system.
+//
+// Publishing and export: journal, serve, site, memory.
+//
+// Infrastructure: backup, config, connection, hub, mcp,
+// prune, setup, steering, trigger, usage, sysinfo.
+//
+// Utilities: guide, loop, pad, resolve, skill, trace,
+// why, parent, hook, message, notify.
 package cli
diff --git a/internal/cli/doctor/cmd/root/doc.go b/internal/cli/doctor/cmd/root/doc.go
index 4c1214093..2985af554 100644
--- a/internal/cli/doctor/cmd/root/doc.go
+++ b/internal/cli/doctor/cmd/root/doc.go
@@ -4,11 +4,47 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx doctor command for context
-// health diagnostics.
+// Package root implements the "ctx doctor" command that
+// runs health diagnostics on the project context setup.
 //
-// [Cmd] builds the cobra.Command with --json flag. [Run] executes
-// all health checks (initialization, required files, ctxrc
-// validation, drift, token budget) and renders the results as
-// a checklist or JSON.
+// # What It Does
+//
+// Executes a suite of health checks and presents the
+// results as a checklist or JSON report. The checks
+// cover the full lifecycle of a ctx installation:
+//
+//   - Context initialization status
+//   - Required files presence (TASKS.md, etc.)
+//   - .ctxrc validation (syntax, schema)
+//   - Context drift detection
+//   - Plugin enablement in Claude settings
+//   - Companion configuration
+//   - Event logging health
+//   - Webhook configuration
+//   - Reminder validity
+//   - Task completion ratios
+//   - Context token size vs budget
+//   - System resource availability
+//   - Recent event activity
+//
+// # Flags
+//
+//   - --json, -j: Output results as machine-readable
+//     JSON instead of a human-readable checklist.
+//
+// # Output
+//
+// In human mode, prints a checklist with pass/warn/
+// error icons per check, followed by warning and
+// error counts. In JSON mode, outputs a structured
+// report with all check results, statuses, and
+// summary counts.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with AnnotationSkipInit
+// so it runs even when context is not initialized.
+// [Run] executes each check function from [core/check],
+// tallies warnings and errors, then delegates to
+// [core/output] for rendering.
 package root
diff --git a/internal/cli/doctor/cmd/root/run.go b/internal/cli/doctor/cmd/root/run.go
index 2f97cadfe..da519b924 100644
--- a/internal/cli/doctor/cmd/root/run.go
+++ b/internal/cli/doctor/cmd/root/run.go
@@ -7,16 +7,30 @@
 package root
 
 import (
+	"errors"
+	"fmt"
+
 	"github.com/spf13/cobra"
 
+	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	"github.com/ActiveMemory/ctx/internal/cli/doctor/core/check"
 	"github.com/ActiveMemory/ctx/internal/cli/doctor/core/output"
+	"github.com/ActiveMemory/ctx/internal/config/doctor"
+	"github.com/ActiveMemory/ctx/internal/config/embed/text"
 	"github.com/ActiveMemory/ctx/internal/config/stats"
+	errCtx "github.com/ActiveMemory/ctx/internal/err/context"
 )
 
 // Run executes the doctor command logic, running all health
 // checks and producing either JSON or human-readable output.
 //
+// Context-dependent checks that fail with
+// [errCtx.ErrDirNotDeclared] emit exactly one "did not run
+// (cascade)" line; later dependent checks are silently skipped
+// so the report shows one loud entry instead of N copies of the
+// same message. Non-dependent checks (companion config, plugin,
+// system resources, etc.) continue to run regardless.
+//
 // Parameters:
 //   - cmd: Cobra command for output stream
 //   - jsonOutput: If true, output as JSON
@@ -26,19 +40,111 @@ import (
 func Run(cmd *cobra.Command, jsonOutput bool) error {
 	report := &check.Report{}
 
-	check.ContextInitialized(report)
-	check.RequiredFiles(report)
-	check.CtxrcValidation(report)
-	check.Drift(report)
-	check.PluginEnablement(report)
-	check.CompanionConfig(report)
-	check.EventLogging(report)
-	check.Webhook(report)
-	check.Reminders(report)
-	check.TaskCompletion(report)
-	check.ContextTokenSize(report)
-	check.SystemResources(report)
-	check.RecentEventActivity(report)
+	entries := []check.Entry{
+		{
+			Name:     doctor.CheckContextInit,
+			Category: doctor.CategoryStructure,
+			Fn:       check.ContextInitialized,
+		},
+		{
+			Name:     doctor.CheckRequiredFiles,
+			Category: doctor.CategoryStructure,
+			Fn:       check.RequiredFiles,
+		},
+		{
+			Name:     doctor.CheckCtxrcValidation,
+			Category: doctor.CategoryStructure,
+			Fn:       check.CtxrcValidation,
+		},
+		{
+			Name:     doctor.CheckDrift,
+			Category: doctor.CategoryQuality,
+			Fn:       check.Drift,
+		},
+		{
+			Name:     doctor.CheckPluginInstalled,
+			Category: doctor.CategoryPlugin,
+			Fn:       check.PluginEnablement,
+		},
+		{
+			Name:     doctor.CheckCompanionConfig,
+			Category: doctor.CategoryPlugin,
+			Fn:       check.CompanionConfig,
+		},
+		{
+			Name:     doctor.CheckEventLogging,
+			Category: doctor.CategoryHooks,
+			Fn:       check.EventLogging,
+		},
+		{
+			Name:     doctor.CheckWebhook,
+			Category: doctor.CategoryHooks,
+			Fn:       check.Webhook,
+		},
+		{
+			Name:     doctor.CheckReminders,
+			Category: doctor.CategoryState,
+			Fn:       check.Reminders,
+		},
+		{
+			Name:     doctor.CheckTaskCompletion,
+			Category: doctor.CategoryState,
+			Fn:       check.TaskCompletion,
+		},
+		{
+			Name:     doctor.CheckContextSize,
+			Category: doctor.CategorySize,
+			Fn:       check.ContextTokenSize,
+		},
+		{
+			Name:     doctor.CheckResourceMemory,
+			Category: doctor.CategoryResources,
+			Fn:       check.SystemResources,
+		},
+		{
+			Name:     doctor.CheckRecentEvents,
+			Category: doctor.CategoryEvents,
+			Fn:       check.RecentEventActivity,
+		},
+	}
+
+	// Track whether a context-dependent check has already
+	// failed due to errCtx.ErrDirNotDeclared. Subsequent
+	// dependent failures with the same root cause are folded
+	// into a single diagnostic.
+	ctxCascadeAnnounced := false
+
+	for _, entry := range entries {
+		err := entry.Fn(report)
+		if err == nil {
+			continue
+		}
+		if errors.Is(err, errCtx.ErrDirNotDeclared) {
+			if ctxCascadeAnnounced {
+				// Already reported once; skip silently.
+				continue
+			}
+			ctxCascadeAnnounced = true
+			report.Results = append(report.Results, check.Result{
+				Name:     entry.Name,
+				Category: entry.Category,
+				Status:   stats.StatusError,
+				Message: fmt.Sprintf(desc.Text(
+					text.DescKeyDoctorCheckDidNotRunCascade,
+				), err),
+			})
+			continue
+		}
+		// Non-cascade error: attribute to the specific check.
+		report.Results = append(report.Results, check.Result{
+			Name:     entry.Name,
+			Category: entry.Category,
+			Status:   stats.StatusError,
+			Message: fmt.Sprintf(
+				desc.Text(text.DescKeyDoctorCheckDidNotRun), err,
+			),
+		})
+	}
 
 	// Count warnings and errors.
 	for _, r := range report.Results {
diff --git a/internal/cli/doctor/core/check/check.go b/internal/cli/doctor/core/check/check.go
index 3cf71e5f0..0844d523f 100644
--- a/internal/cli/doctor/core/check/check.go
+++ b/internal/cli/doctor/core/check/check.go
@@ -8,6 +8,7 @@ package check
 
 import (
 	"encoding/json"
+	"errors"
 	"fmt"
 	"os"
 	"path/filepath"
@@ -32,6 +33,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/context/validate"
 	"github.com/ActiveMemory/ctx/internal/drift"
 	"github.com/ActiveMemory/ctx/internal/entity"
+	errCtx "github.com/ActiveMemory/ctx/internal/err/context"
 	"github.com/ActiveMemory/ctx/internal/io"
 	"github.com/ActiveMemory/ctx/internal/log/event"
 	"github.com/ActiveMemory/ctx/internal/rc"
@@ -39,12 +41,23 @@ import (
 )
 
 // ContextInitialized verifies that a .context/ directory
-// exists.
+// exists. Always emits a Result of its own; a missing directory IS
+// the diagnostic and maps to StatusError. A resolver or stat failure
+// that cannot confirm either way is propagated so the runner shows
+// "did not run" instead of reporting a confident "missing."
 //
 // Parameters:
 //   - report: Report to append the result to
-func ContextInitialized(report *Report) {
-	if validate.Exists("") {
+//
+// Returns:
+//   - error: non-nil when validate.Exists cannot reach a definitive
+//     answer (resolver or stat failure).
+func ContextInitialized(report *Report) error {
+	exists, existsErr := validate.Exists("")
+	if existsErr != nil {
+		return existsErr
+	}
+	if exists {
 		report.Results = append(report.Results, Result{
 			Name:     doctor.CheckContextInit,
 			Category: doctor.CategoryStructure,
@@ -59,6 +72,7 @@ func ContextInitialized(report *Report) {
 			Message:  desc.Text(text.DescKeyDoctorContextInitializedError),
 		})
 	}
+	return nil
 }
 
 // RequiredFiles verifies that all required context files are
@@ -66,8 +80,16 @@ func ContextInitialized(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func RequiredFiles(report *Report) {
-	dir := rc.ContextDir()
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the context directory
+//     cannot be resolved; the runner renders a standard "did not run"
+//     line in that case.
+func RequiredFiles(report *Report) error {
+	dir, err := rc.ContextDir()
+	if err != nil {
+		return err
+	}
 	var missing []string
 	for _, f := range ctx.FilesRequired {
 		path := filepath.Join(dir, f)
@@ -101,6 +123,7 @@ func RequiredFiles(report *Report) {
 			),
 		})
 	}
+	return nil
 }
 
 // CtxrcValidation validates the .ctxrc file for unknown
@@ -108,7 +131,11 @@ func RequiredFiles(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func CtxrcValidation(report *Report) {
+//
+// Returns:
+//   - error: always nil; parse problems are reported as
+//     StatusError/StatusWarning entries rather than returned.
+func CtxrcValidation(report *Report) error {
 	data, readErr := io.SafeReadUserFile(file.CtxRC)
 	if readErr != nil {
 		// No .ctxrc is fine - defaults are used.
@@ -118,7 +145,7 @@ func CtxrcValidation(report *Report) {
 			Status:   stats.StatusOK,
 			Message:  desc.Text(text.DescKeyDoctorCtxrcValidationOkNoFile),
 		})
-		return
+		return nil
 	}
 
 	warnings, validateErr := rc.Validate(data)
@@ -132,7 +159,7 @@ func CtxrcValidation(report *Report) {
 				validateErr,
 			),
 		})
-		return
+		return nil
 	}
 
 	if len(warnings) > 0 {
@@ -148,7 +175,7 @@ func CtxrcValidation(report *Report) {
 				),
 			),
 		})
-		return
+		return nil
 	}
 
 	report.Results = append(report.Results, Result{
@@ -157,6 +184,7 @@ func CtxrcValidation(report *Report) {
 		Status:   stats.StatusOK,
 		Message:  desc.Text(text.DescKeyDoctorCtxrcValidationOk),
 	})
+	return nil
 }
 
 // Drift detects stale paths or missing files referenced in
@@ -164,13 +192,18 @@ func CtxrcValidation(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func Drift(report *Report) {
-	if !validate.Exists("") {
-		return // skip drift check if not initialized
-	}
-
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the context directory
+//     cannot be resolved via [load.Do]; the runner renders a standard
+//     "did not run" line in that case. Transient load failures are
+//     reported inline as a StatusWarning and return nil.
+func Drift(report *Report) error {
 	c, loadErr := load.Do("")
 	if loadErr != nil {
+		if errors.Is(loadErr, errCtx.ErrDirNotDeclared) {
+			return loadErr
+		}
 		report.Results = append(report.Results, Result{
 			Name:     doctor.CheckDrift,
 			Category: doctor.CategoryQuality,
@@ -180,7 +213,7 @@ func Drift(report *Report) {
 				loadErr,
 			),
 		})
-		return
+		return nil
 	}
 
 	driftReport := drift.Detect(c)
@@ -194,7 +227,7 @@ func Drift(report *Report) {
 			Status:   stats.StatusOK,
 			Message:  desc.Text(text.DescKeyDoctorDriftOk),
 		})
-		return
+		return nil
 	}
 
 	var parts []string
@@ -231,6 +264,7 @@ func Drift(report *Report) {
 			strings.Join(parts, cfgToken.CommaSpace),
 		),
 	})
+	return nil
 }
 
 // CompanionConfig reports whether companion tool checks
@@ -238,7 +272,10 @@ func Drift(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func CompanionConfig(report *Report) {
+//
+// Returns:
+//   - error: always nil.
+func CompanionConfig(report *Report) error {
 	if rc.CompanionCheck() {
 		report.Results = append(report.Results, Result{
 			Name:     doctor.CheckCompanionConfig,
@@ -254,6 +291,7 @@ func CompanionConfig(report *Report) {
 			Message:  desc.Text(text.DescKeyDoctorCompanionConfigInfo),
 		})
 	}
+	return nil
 }
 
 // PluginEnablement checks whether the ctx plugin is
@@ -261,7 +299,10 @@ func CompanionConfig(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func PluginEnablement(report *Report) {
+//
+// Returns:
+//   - error: always nil.
+func PluginEnablement(report *Report) error {
 	installed := initCore.Installed()
 	if !installed {
 		report.Results = append(report.Results, Result{
@@ -270,7 +311,7 @@ func PluginEnablement(report *Report) {
 			Status:   stats.StatusInfo,
 			Message:  desc.Text(text.DescKeyDoctorPluginInstalledInfo),
 		})
-		return
+		return nil
 	}
 
 	report.Results = append(report.Results, Result{
@@ -313,13 +354,17 @@ func PluginEnablement(report *Report) {
 			),
 		})
 	}
+	return nil
 }
 
 // EventLogging checks whether event logging is enabled.
 //
 // Parameters:
 //   - report: Report to append the result to
-func EventLogging(report *Report) {
+//
+// Returns:
+//   - error: always nil.
+func EventLogging(report *Report) error {
 	if rc.EventLog() {
 		report.Results = append(report.Results, Result{
 			Name:     doctor.CheckEventLogging,
@@ -335,6 +380,7 @@ func EventLogging(report *Report) {
 			Message:  desc.Text(text.DescKeyDoctorEventLoggingInfo),
 		})
 	}
+	return nil
 }
 
 // Webhook checks whether a webhook notification endpoint
@@ -342,8 +388,16 @@ func EventLogging(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func Webhook(report *Report) {
-	dir := rc.ContextDir()
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the context directory
+//     cannot be resolved; the runner renders a standard "did not run"
+//     line in that case.
+func Webhook(report *Report) error {
+	dir, err := rc.ContextDir()
+	if err != nil {
+		return err
+	}
 	encPath := filepath.Join(dir, crypto.NotifyEnc)
 	if _, statErr := os.Stat(encPath); statErr == nil {
 		report.Results = append(report.Results, Result{
@@ -360,6 +414,7 @@ func Webhook(report *Report) {
 			Message:  desc.Text(text.DescKeyDoctorWebhookInfo),
 		})
 	}
+	return nil
 }
 
 // Reminders checks for pending reminders in the context
@@ -367,18 +422,31 @@ func Webhook(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func Reminders(report *Report) {
-	dir := rc.ContextDir()
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the context directory
+//     cannot be resolved; the runner renders a standard "did not run"
+//     line in that case.
+func Reminders(report *Report) error {
+	dir, err := rc.ContextDir()
+	if err != nil {
+		return err
+	}
 	remindersPath := filepath.Join(dir, reminder.File)
 	data, readErr := io.SafeReadUserFile(remindersPath)
 	if readErr != nil {
-		report.Results = append(report.Results, Result{
-			Name:     doctor.CheckReminders,
-			Category: doctor.CategoryState,
-			Status:   stats.StatusOK,
-			Message:  desc.Text(text.DescKeyDoctorRemindersOk),
-		})
-		return
+		if errors.Is(readErr, os.ErrNotExist) {
+			// Legitimate: no reminders file ⇒ no pending reminders.
+			report.Results = append(report.Results, Result{
+				Name:     doctor.CheckReminders,
+				Category: doctor.CategoryState,
+				Status:   stats.StatusOK,
+				Message:  desc.Text(text.DescKeyDoctorRemindersOk),
+			})
+			return nil
+		}
+		// Permission denied, I/O error, etc.: surface it.
+		return readErr
 	}
 
 	var reminders []any
@@ -391,7 +459,7 @@ func Reminders(report *Report) {
 			Status:   stats.StatusOK,
 			Message:  desc.Text(text.DescKeyDoctorRemindersOk),
 		})
-		return
+		return nil
 	}
 
 	count := len(reminders)
@@ -413,6 +481,7 @@ func Reminders(report *Report) {
 			),
 		})
 	}
+	return nil
 }
 
 // TaskCompletion analyzes the task completion ratio and
@@ -420,12 +489,24 @@ func Reminders(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func TaskCompletion(report *Report) {
-	dir := rc.ContextDir()
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the context directory
+//     cannot be resolved; a missing TASKS.md ([os.ErrNotExist]) is a
+//     legitimate skip and returns nil; any other read failure
+//     (permissions, I/O) is propagated so the runner can report it.
+func TaskCompletion(report *Report) error {
+	dir, err := rc.ContextDir()
+	if err != nil {
+		return err
+	}
 	tasksPath := filepath.Join(dir, ctx.Task)
 	data, readErr := io.SafeReadUserFile(tasksPath)
 	if readErr != nil {
-		return // no tasks file, skip
+		if errors.Is(readErr, os.ErrNotExist) {
+			return nil // legitimate: no TASKS.md yet, nothing to analyze
+		}
+		return readErr
 	}
 
 	matches := regex.TaskMultiline.FindAllStringSubmatch(
@@ -442,7 +523,7 @@ func TaskCompletion(report *Report) {
 	total := completed + pending
 
 	if total == 0 {
-		return // no tasks to report on
+		return nil // no tasks to report on
 	}
 
 	ratio := completed * stats.PercentMultiplier / total
@@ -470,6 +551,7 @@ func TaskCompletion(report *Report) {
 			Message:  msg,
 		})
 	}
+	return nil
 }
 
 // ContextTokenSize estimates context token usage and
@@ -477,7 +559,12 @@ func TaskCompletion(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func ContextTokenSize(report *Report) {
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when context load fails
+//     for that reason; the runner renders a standard "did not run"
+//     line. Other load failures return nil without emitting a Result.
+func ContextTokenSize(report *Report) error {
 	indexed := make(
 		map[string]bool, len(ctx.ReadOrder),
 	)
@@ -488,7 +575,10 @@ func ContextTokenSize(report *Report) {
 	var totalTokens int
 	c, loadErr := load.Do("")
 	if loadErr != nil {
-		return
+		if errors.Is(loadErr, errCtx.ErrDirNotDeclared) {
+			return loadErr
+		}
+		return nil
 	}
 
 	type fileTokens struct {
@@ -545,6 +635,7 @@ func ContextTokenSize(report *Report) {
 			),
 		})
 	}
+	return nil
 }
 
 // RecentEventActivity reports the most recent event log
@@ -552,22 +643,41 @@ func ContextTokenSize(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func RecentEventActivity(report *Report) {
+//
+// Returns:
+//   - error: [errCtx.ErrDirNotDeclared] when the event log path
+//     cannot be resolved because no context directory is declared;
+//     the runner renders a standard "did not run" line. Transient
+//     read or parse failures return nil and emit a StatusInfo
+//     placeholder.
+func RecentEventActivity(report *Report) error {
 	if !rc.EventLog() {
-		return // skip if logging disabled
+		return nil // skip if logging disabled
 	}
 
 	events, queryErr := event.Query(
 		entity.EventQueryOpts{Last: 1},
 	)
-	if queryErr != nil || len(events) == 0 {
+	if queryErr != nil {
+		if errors.Is(queryErr, errCtx.ErrDirNotDeclared) {
+			return queryErr
+		}
+		report.Results = append(report.Results, Result{
+			Name:     doctor.CheckRecentEvents,
+			Category: doctor.CategoryEvents,
+			Status:   stats.StatusInfo,
+			Message:  desc.Text(text.DescKeyDoctorRecentEventsInfo),
+		})
+		return nil
+	}
+	if len(events) == 0 {
 		report.Results = append(report.Results, Result{
 			Name:     doctor.CheckRecentEvents,
 			Category: doctor.CategoryEvents,
 			Status:   stats.StatusInfo,
 			Message:  desc.Text(text.DescKeyDoctorRecentEventsInfo),
 		})
-		return
+		return nil
 	}
 
 	report.Results = append(report.Results, Result{
@@ -579,6 +689,7 @@ func RecentEventActivity(report *Report) {
 			events[len(events)-1].Timestamp,
 		),
 	})
+	return nil
 }
 
 // SystemResources collects and evaluates system resource
@@ -586,9 +697,13 @@ func RecentEventActivity(report *Report) {
 //
 // Parameters:
 //   - report: Report to append the result to
-func SystemResources(report *Report) {
+//
+// Returns:
+//   - error: always nil.
+func SystemResources(report *Report) error {
 	snap := sysinfo.Collect()
 	AddResourceResults(report, snap)
+	return nil
 }
 
 // AddResourceResults appends per-metric resource results to
diff --git a/internal/cli/doctor/core/check/doc.go b/internal/cli/doctor/core/check/doc.go
index f5785cea0..ffb97d3a9 100644
--- a/internal/cli/doctor/core/check/doc.go
+++ b/internal/cli/doctor/core/check/doc.go
@@ -4,9 +4,62 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package check runs health checks and builds a diagnostic
-// report. Covers context initialization, required files,
-// .ctxrc validation, drift detection, plugin enablement,
-// event logging, reminders, task completion, token budgets,
-// and system resource metrics.
+// Package check is the **brain** of `ctx doctor`: a battery of
+// independent health probes that together produce a single
+// diagnostic report covering everything that can plausibly be
+// wrong with a ctx installation or with the project's
+// `.context/` state.
+//
+// The package is the only thing the doctor CLI calls on the
+// "produce findings" side. The doctor command itself
+// orchestrates output; this package decides what to look at
+// and how to grade it.
+//
+// # The Probe Surface
+//
+// Each probe answers exactly one yes/no question and emits a
+// [CheckResult] with a name, status (Ok / Warning / Error),
+// and a one-line message. The full battery, run by [Run]:
+//
+//   - **Context initialization**: `.context/` exists and
+//     is populated.
+//   - **Required files**: TASKS, DECISIONS, LEARNINGS,
+//     CONVENTIONS, ARCHITECTURE, CONSTITUTION present.
+//   - **`.ctxrc` validation**: file parses, all values
+//     within range.
+//   - **Drift**: wraps [internal/drift.Detect] and
+//     surfaces the report's status.
+//   - **Plugin enablement**: Claude Code plugin
+//     installed AND enabled in `~/.claude/settings.json`.
+//   - **Event logging**: if `event_log: true`, the log
+//     file exists and is writable.
+//   - **Reminders**: pending reminder count and freshness.
+//   - **Task completion**: open task count, oldest open
+//     task age (consolidation nudge threshold).
+//   - **Token budgets**: currently injected size against
+//     the configured `injection_token_warn` and
+//     `context_window`.
+//   - **System resource metrics**: wraps
+//     [internal/sysinfo] to surface load/memory/disk
+//     pressure.
+//
+// New probes plug in by adding one more entry to the
+// dispatch table in [check.go] and one more constant to
+// [config/check.Name] (audited to keep CLI output stable).
+//
+// # Severity Roll-Up
+//
+// Each probe produces its own status. The doctor CLI rolls
+// the slice up to a single banner per the same rule the
+// drift package uses: any **Error** beats any **Warning**
+// beats **Ok**. JSON output preserves the per-probe detail
+// for tooling.
+//
+// # Stateless and Concurrency-Safe
+//
+// Probes hold no state and do not coordinate. They could
+// be parallelized; they currently run sequentially because
+// the slowest probe (`sysinfo` shelling out on macOS) is
+// still under 100ms and the simpler ordering keeps output
+// deterministic.
 package check
diff --git a/internal/cli/doctor/core/check/types.go b/internal/cli/doctor/core/check/types.go
index 1549f9a80..9311cf051 100644
--- a/internal/cli/doctor/core/check/types.go
+++ b/internal/cli/doctor/core/check/types.go
@@ -32,3 +32,19 @@ type Report struct {
 	Warnings int      `json:"warnings"`
 	Errors   int      `json:"errors"`
 }
+
+// Entry pairs a check function with the name/category to attribute
+// a failure to. The runner uses an ordered slice of Entry values to
+// produce a uniform "did not run" line when a check returns an
+// error, instead of every check having to emit its own failure
+// Result for the same cause.
+//
+// Fields:
+//   - Name: Machine-readable identifier to attribute failures to
+//   - Category: Grouping label (Structure, Quality, etc.)
+//   - Fn: The check function itself
+type Entry struct {
+	Name     string
+	Category string
+	Fn       func(*Report) error
+}
diff --git a/internal/cli/doctor/core/doc.go b/internal/cli/doctor/core/doc.go
index 1f87a5e0d..c7e609a7b 100644
--- a/internal/cli/doctor/core/doc.go
+++ b/internal/cli/doctor/core/doc.go
@@ -4,12 +4,38 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains the individual health checks for the
-// doctor command.
-//
-// Each Check* function adds findings to a shared Report:
-// [CheckContextInitialized] verifies .context/ exists,
-// [CheckRequiredFiles] verifies mandatory files are present,
-// [CheckCtxrcValidation] validates .ctxrc syntax, and
-// [CheckDrift] runs the drift detector.
+// Package core is the umbrella for the doctor command's
+// health-check logic.
+//
+// # Overview
+//
+// The doctor command runs a series of diagnostic checks
+// against the user's context directory, configuration,
+// plugins, hooks, and system resources. This package
+// groups the check and output sub-packages that contain
+// the actual implementations.
+//
+// # Sub-packages
+//
+//   - check: individual diagnostic functions, each
+//     appending results to a shared [check.Report].
+//   - output: renders the report as human-readable
+//     text or machine-readable JSON.
+//
+// # Check Categories
+//
+// Checks are grouped by category:
+//
+//   - Structure: context init, required files, ctxrc
+//   - Quality: drift detection
+//   - Plugin: companion config, plugin enablement
+//   - Hooks: event logging, webhook
+//   - State: reminders, task completion
+//   - Size: context token budget
+//   - Events: recent event log activity
+//   - Resources: memory, swap, disk, CPU load
+//
+// The cmd layer calls each Check* function in sequence,
+// tallies warnings and errors, then delegates to
+// output.Human or output.JSON for rendering.
 package core
diff --git a/internal/cli/doctor/core/output/doc.go b/internal/cli/doctor/core/output/doc.go
index d44b38b63..6ee06d349 100644
--- a/internal/cli/doctor/core/output/doc.go
+++ b/internal/cli/doctor/core/output/doc.go
@@ -4,8 +4,26 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package output formats doctor reports as JSON or
-// human-readable text. JSON output uses indented encoding
-// for machine consumption. Human output groups results by
-// category with status indicators.
+// Package output formats doctor reports for display in two
+// modes: JSON and human-readable text.
+//
+// # JSON Mode
+//
+// The [JSON] function serializes a [check.Report] as
+// indented JSON and writes it to the command's output
+// stream. This is intended for machine consumption and
+// piping into other tools. Indentation uses two-space
+// indent for readability.
+//
+// # Human Mode
+//
+// The [Human] function renders the report as a categorized
+// list with status indicators. Each check result is
+// converted to a [writeDoctor.ResultItem] carrying its
+// category, status, and message. Warnings and errors from
+// the report are appended as summary counts at the bottom.
+//
+// Both functions accept a cobra command for output routing
+// and return an error for interface consistency, though
+// Human always returns nil.
 package output
diff --git a/internal/cli/doctor/doc.go b/internal/cli/doctor/doc.go
index d4892f700..75d38f571 100644
--- a/internal/cli/doctor/doc.go
+++ b/internal/cli/doctor/doc.go
@@ -1,13 +1,47 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package doctor implements the ctx doctor command group for
-// troubleshooting context health.
+// Package doctor implements **`ctx doctor`**, the
+// one-stop structural-health command users (and
+// onboarding scripts) run when something feels off:
+// hooks not firing, drift accumulating, plugin not
+// enabled, settings file half-merged, etc.
 //
-// Runs structural health checks, analyzes event log patterns,
-// and presents findings with suggested actions. Supports both
-// human-readable checklist and JSON output formats.
+// The doctor is a *shell*: it asks
+// [internal/cli/doctor/core/check] for the full battery
+// of probes, then renders the results in either
+// human-readable checklist form (default) or structured
+// JSON form (`--json`).
+//
+// # Default Output
+//
+// The checklist groups probes by category (Setup,
+// Context, Plugin, State, Resources) and renders each
+// with a status glyph (`✓`, `⚠`, `✗`) plus a one-line
+// message. The roll-up banner at the end summarizes:
+// "all good", "warnings present", or "violations
+// present", matching the same severity ladder
+// [internal/drift] uses.
+//
+// # JSON Output
+//
+// `ctx doctor --json` emits one record per probe with
+// `name`, `status`, `message`, and any structured
+// detail. Used by CI and by the `_ctx-doctor` skill
+// when the AI is the consumer.
+//
+// # Exit Codes
+//
+//   - **0**: all checks passed.
+//   - **1**: warnings present.
+//   - **3**: violations present (so CI scripts can
+//     gate on `>= 3`).
+//
+// # Sub-Packages
+//
+//   - **[core/check]**: the actual probe battery
+//     (no UI, no CLI parsing).
 package doctor
diff --git a/internal/cli/doctor/doctor.go b/internal/cli/doctor/doctor.go
index 0ef4086df..661d18ad0 100644
--- a/internal/cli/doctor/doctor.go
+++ b/internal/cli/doctor/doctor.go
@@ -4,8 +4,6 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package doctor provides the "ctx doctor" command for structural
-// health checks across context, hooks, and configuration.
 package doctor
 
 import (
diff --git a/internal/cli/doctor/doctor_test.go b/internal/cli/doctor/doctor_test.go
index 931ad3b4a..2790208de 100644
--- a/internal/cli/doctor/doctor_test.go
+++ b/internal/cli/doctor/doctor_test.go
@@ -17,6 +17,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/cli/doctor/core/check"
 	"github.com/ActiveMemory/ctx/internal/config/claude"
 	"github.com/ActiveMemory/ctx/internal/config/ctx"
+	cfgDir "github.com/ActiveMemory/ctx/internal/config/dir"
 	"github.com/ActiveMemory/ctx/internal/config/doctor"
 	"github.com/ActiveMemory/ctx/internal/config/stats"
 	"github.com/ActiveMemory/ctx/internal/rc"
@@ -25,7 +26,10 @@ import (
 
 func setupContextDir(t *testing.T) string {
 	t.Helper()
-	dir := t.TempDir()
+	dir := filepath.Join(t.TempDir(), cfgDir.Context)
+	if mkErr := os.MkdirAll(dir, 0o700); mkErr != nil {
+		t.Fatal(mkErr)
+	}
 	t.Setenv("CTX_DIR", dir)
 	rc.Reset()
 
@@ -64,7 +68,10 @@ func TestDoctor_Healthy(t *testing.T) {
 }
 
 func TestDoctor_MissingRequiredFiles(t *testing.T) {
-	dir := t.TempDir()
+	dir := filepath.Join(t.TempDir(), cfgDir.Context)
+	if mkErr := os.MkdirAll(dir, 0o700); mkErr != nil {
+		t.Fatal(mkErr)
+	}
 	t.Setenv("CTX_DIR", dir)
 	rc.Reset()
 
@@ -541,7 +548,9 @@ func TestCheckCtxrcValidation_NoFile(t *testing.T) {
 	t.Cleanup(func() { _ = os.Chdir(orig) })
 
 	report := &check.Report{}
-	check.CtxrcValidation(report)
+	if err := check.CtxrcValidation(report); err != nil {
+		t.Fatalf("CtxrcValidation: %v", err)
+	}
 
 	if len(report.Results) != 1 {
 		t.Fatalf("expected 1 result, got %d", len(report.Results))
@@ -574,7 +583,9 @@ func TestCheckCtxrcValidation_ValidFile(t *testing.T) {
 	t.Cleanup(func() { _ = os.Chdir(orig) })
 
 	report := &check.Report{}
-	check.CtxrcValidation(report)
+	if err := check.CtxrcValidation(report); err != nil {
+		t.Fatalf("CtxrcValidation: %v", err)
+	}
 
 	if len(report.Results) != 1 {
 		t.Fatalf("expected 1 result, got %d", len(report.Results))
@@ -607,7 +618,9 @@ func TestCheckCtxrcValidation_Typo(t *testing.T) {
 	t.Cleanup(func() { _ = os.Chdir(orig) })
 
 	report := &check.Report{}
-	check.CtxrcValidation(report)
+	if err := check.CtxrcValidation(report); err != nil {
+		t.Fatalf("CtxrcValidation: %v", err)
+	}
 
 	if len(report.Results) != 1 {
 		t.Fatalf("expected 1 result, got %d", len(report.Results))
diff --git a/internal/cli/drift/cmd/root/doc.go b/internal/cli/drift/cmd/root/doc.go
index 053001a22..155bcdf6b 100644
--- a/internal/cli/drift/cmd/root/doc.go
+++ b/internal/cli/drift/cmd/root/doc.go
@@ -4,9 +4,46 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx drift command.
+// Package root implements the "ctx drift" command that
+// detects stale or invalid context and optionally
+// auto-fixes supported issues.
 //
-// It detects stale or invalid context by checking for broken path
-// references, staleness indicators, constitution violations, and
-// missing required files.
+// # What It Does
+//
+// Loads the parsed context from .context/, runs the
+// drift detection engine, and reports any issues
+// found. Detected issue categories include:
+//
+//   - Broken path references in context files
+//   - Staleness indicators (old timestamps, etc.)
+//   - Constitution violations
+//   - Missing required files
+//
+// When --fix is set, the command attempts to resolve
+// supported issue types automatically, then re-runs
+// detection to show the updated state.
+//
+// # Flags
+//
+//   - --json: Output results as machine-readable
+//     JSON instead of human-readable text.
+//   - --fix: Attempt to auto-fix supported issues
+//     (staleness, missing_file). Prints a summary
+//     of fixed, skipped, and errored items.
+//
+// # Output
+//
+// In text mode, prints warnings and violations with
+// file paths and descriptions. In JSON mode, outputs
+// a structured report. When --fix is active, prints
+// a fix header, per-item results, counts, and then
+// the re-checked state.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with --json and
+// --fix flags. [Run] loads context via
+// [context/load], runs [drift.Detect], optionally
+// applies fixes via [core/fix], and renders output
+// through [core/out].
 package root
diff --git a/internal/cli/drift/cmd/root/run.go b/internal/cli/drift/cmd/root/run.go
index 62d62b91f..7c88c7640 100644
--- a/internal/cli/drift/cmd/root/run.go
+++ b/internal/cli/drift/cmd/root/run.go
@@ -17,6 +17,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/drift"
 	errCtx "github.com/ActiveMemory/ctx/internal/err/context"
 	errInit "github.com/ActiveMemory/ctx/internal/err/initialize"
+	"github.com/ActiveMemory/ctx/internal/rc"
 	writeDrift "github.com/ActiveMemory/ctx/internal/write/drift"
 )
 
@@ -36,6 +37,10 @@ import (
 func Run(
 	cmd *cobra.Command, jsonOutput, doFix bool,
 ) error {
+	if _, ctxErr := rc.RequireContextDir(); ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	ctx, err := load.Do("")
 	if err != nil {
 		if _, ok := errors.AsType[*errCtx.NotFoundError](err); ok {
diff --git a/internal/cli/drift/core/doc.go b/internal/cli/drift/core/doc.go
index 32f7f4c1c..d15be8b3f 100644
--- a/internal/cli/drift/core/doc.go
+++ b/internal/cli/drift/core/doc.go
@@ -4,10 +4,40 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core contains drift detection output and auto-fix logic.
+// Package core is the umbrella for the drift command's
+// detection output, auto-fix, and sanitisation logic.
 //
-// [OutputDriftText] renders a drift report as a human-readable
-// checklist. [ApplyFixes] runs interactive fixes for detected
-// issues. [FixStaleness] touches stale files and [FixMissingFile]
-// creates missing context files from templates.
+// # Overview
+//
+// The drift command detects stale paths, missing files,
+// constitution violations, and other quality issues in
+// the user's .context/ directory. This package groups
+// three sub-packages that handle output formatting,
+// automated fixes, and display sanitisation.
+//
+// # Sub-packages
+//
+//   - out: renders drift reports as human-readable
+//     text (with icons and grouping) or as structured
+//     JSON for machine consumption. Exports [out.DriftText]
+//     and [out.DriftJSON].
+//   - fix: applies automated corrections for fixable
+//     drift issues. [fix.Apply] iterates the report,
+//     archiving completed tasks for staleness issues
+//     and creating files for missing-file issues.
+//     Tracks results in [fix.Result].
+//   - sanitize: converts internal check identifiers
+//     to human-readable labels via [sanitize.FormatCheckName].
+//
+// # Data Flow
+//
+// The cmd layer loads context, calls drift.Detect to
+// produce a Report, then delegates to this package:
+//
+//  1. out.DriftText or out.DriftJSON renders the
+//     report for display.
+//  2. If --fix is passed, fix.Apply walks the report
+//     and attempts auto-remediation.
+//  3. sanitize helpers are used by the output layer
+//     to translate check names for display.
 package core
diff --git a/internal/cli/drift/core/fix/doc.go b/internal/cli/drift/core/fix/doc.go
index 16b54a085..b40fcbdf4 100644
--- a/internal/cli/drift/core/fix/doc.go
+++ b/internal/cli/drift/core/fix/doc.go
@@ -4,8 +4,49 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package fix auto-fixes drift issues like staleness and
-// missing required files. Archives completed tasks from the
-// Completed section and creates missing context files from
-// templates. Issues like dead paths and secrets are skipped.
+// Package fix is the **auto-remediation half** of
+// `ctx drift`: given a [drift.Report], it applies the
+// fixes the package knows how to apply safely (archiving
+// completed tasks, creating missing required files from
+// templates) and skips the issues that need human
+// judgment (dead paths, leaked secrets, constitution
+// violations).
+//
+// The package is the conservative side of the drift
+// loop. Anything that could be wrong if applied
+// blindly stays in the report and the user fixes it
+// by hand.
+//
+// # What Gets Auto-Fixed
+//
+//   - **Stale-completed tasks**: tasks marked `[x]`
+//     in the body of TASKS.md (not in a Completed
+//     section) are archived via [internal/tidy].
+//   - **Missing required files**: empty placeholders
+//     for the foundation files (CONSTITUTION,
+//     CONVENTIONS, etc.) are deployed from the
+//     embedded templates.
+//
+// # What Stays Manual
+//
+//   - **Dead path references**: the package cannot
+//     know whether a path is genuinely gone or just
+//     temporarily missing.
+//   - **Leaked secrets**: the user must redact and
+//     rotate; auto-removal could corrupt history.
+//   - **Constitution violations**: the user agreed
+//     to the rule and must un-violate it.
+//   - **File-age warnings**: staleness is
+//     informational, not fixable.
+//
+// # Public Surface
+//
+//   - **[Apply](report, contextDir)**: walks the
+//     report, applies fixable issues, returns a
+//     summary of what was changed and what was
+//     skipped.
+//
+// # Concurrency
+//
+// Filesystem-bound. Single-process, sequential.
 package fix
diff --git a/internal/cli/drift/core/fix/fix.go b/internal/cli/drift/core/fix/fix.go
index efeb03f6f..29f5e040f 100644
--- a/internal/cli/drift/core/fix/fix.go
+++ b/internal/cli/drift/core/fix/fix.go
@@ -201,12 +201,16 @@ func MissingFile(filename string) error {
 		return prompt.NoTemplate(filename, err)
 	}
 
-	targetPath := filepath.Join(rc.ContextDir(), filename)
+	ctxDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		return ctxErr
+	}
+	targetPath := filepath.Join(ctxDir, filename)
 
 	if mkErr := ctxIo.SafeMkdirAll(
-		rc.ContextDir(), fs.PermExec,
+		ctxDir, fs.PermExec,
 	); mkErr != nil {
-		return errFs.Mkdir(rc.ContextDir(), mkErr)
+		return errFs.Mkdir(ctxDir, mkErr)
 	}
 
 	if writeErr := ctxIo.SafeWriteFile(
diff --git a/internal/cli/drift/core/out/doc.go b/internal/cli/drift/core/out/doc.go
index 666c171d6..881c42804 100644
--- a/internal/cli/drift/core/out/doc.go
+++ b/internal/cli/drift/core/out/doc.go
@@ -4,8 +4,41 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package out formats drift reports as text or JSON. Text
-// output groups violations, warnings by type (path refs,
-// staleness, other), and passed checks with icons. JSON
-// output includes a UTC timestamp and full issue details.
+// Package out is the **rendering half** of `ctx drift`:
+// given a [drift.Report], it formats the report for
+// either humans (terminal text with icons and
+// section grouping) or machines (JSON for tooling and
+// CI pipelines).
+//
+// # Public Surface
+//
+//   - **[Text](report, w)**: writes the
+//     human-readable report to `w`. Groups
+//     violations and warnings by issue type
+//     (path refs, staleness, missing files,
+//     other) so similar issues cluster. Renders
+//     each with status glyphs (`✗`, `⚠`, `✓`)
+//     and a one-line message; passed checks are
+//     listed at the bottom.
+//   - **[JSON](report, w)**: writes a
+//     structured JSON document with a UTC
+//     timestamp, the per-issue detail (file,
+//     line, type, message, path, rule), and the
+//     passed-check list. Stable shape suitable
+//     for `jq` parsing in CI scripts.
+//
+// # Why Two Renderers
+//
+// Humans want skimmable output with visual
+// grouping; CI wants stable JSON with explicit
+// types. Hoisting both into a single output
+// package keeps the formatting choices in one
+// place and the underlying data shape (the
+// [drift.Report]) decoupled from how it's
+// presented.
+//
+// # Concurrency
+//
+// Pure data → io.Writer transformation.
+// Concurrent callers never race.
 package out
diff --git a/internal/cli/drift/core/sanitize/doc.go b/internal/cli/drift/core/sanitize/doc.go
index a057601db..20cd43c61 100644
--- a/internal/cli/drift/core/sanitize/doc.go
+++ b/internal/cli/drift/core/sanitize/doc.go
@@ -4,8 +4,26 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package sanitize formats internal drift check identifiers
-// into human-readable names. Maps machine identifiers like
-// "path_references" and "staleness_check" to descriptive
-// labels loaded from the text asset system.
+// Package sanitize converts internal drift check
+// identifiers into human-readable display names.
+//
+// Drift checks are identified by machine-friendly constants
+// such as "path_references" or "staleness_check" defined in
+// [internal/config/drift]. These identifiers are stable for
+// configuration and code references but are not suitable for
+// end-user output.
+//
+// [FormatCheckName] maps each known [cfgDrift.CheckName] to
+// a descriptive label loaded from the embedded text asset
+// system via [desc.Text]. Unknown identifiers pass through
+// unchanged, so new checks degrade gracefully before their
+// labels are added to the asset YAML.
+//
+// # Design Choice
+//
+// Labels are resolved at call time from the asset cache
+// rather than hardcoded in a switch, so the same text
+// definition is shared between the drift CLI output and
+// any other consumer that needs check names (e.g., doctor
+// checks, hook messages).
 package sanitize
diff --git a/internal/cli/drift/doc.go b/internal/cli/drift/doc.go
index 908cfe5ad..f92eccd43 100644
--- a/internal/cli/drift/doc.go
+++ b/internal/cli/drift/doc.go
@@ -4,10 +4,29 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package drift implements the "ctx drift" command for detecting stale
-// or invalid context.
+// Package drift implements the "ctx drift" command for
+// detecting stale or invalid context.
 //
-// The drift command checks for broken path references, staleness indicators,
-// constitution violations, and missing required files. Results can be
-// output as formatted text or JSON.
+// The drift command performs a suite of health checks
+// against the .context/ directory and reports problems
+// that may cause AI agents to operate on outdated or
+// broken information. Results can be output as formatted
+// text or JSON for scripting.
+//
+// # Checks Performed
+//
+//   - Broken path references: file paths mentioned in
+//     context files that no longer exist on disk
+//   - Staleness indicators: entries whose timestamps
+//     exceed configured age thresholds
+//   - Constitution violations: rules in CONSTITUTION.md
+//     that conflict with other context files
+//   - Missing required files: context files expected by
+//     the project template but absent from .context/
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
+//   - core: check implementations, path resolution, and
+//     staleness evaluation
 package drift
diff --git a/internal/cli/drift/drift_test.go b/internal/cli/drift/drift_test.go
index f1b1abc9b..111948bf7 100644
--- a/internal/cli/drift/drift_test.go
+++ b/internal/cli/drift/drift_test.go
@@ -16,8 +16,10 @@ import (
 	"github.com/ActiveMemory/ctx/internal/cli/initialize"
 	"github.com/ActiveMemory/ctx/internal/config/ctx"
 	"github.com/ActiveMemory/ctx/internal/config/dir"
+	"github.com/ActiveMemory/ctx/internal/config/env"
 	"github.com/ActiveMemory/ctx/internal/io"
 	"github.com/ActiveMemory/ctx/internal/rc"
+	"github.com/ActiveMemory/ctx/internal/testutil/testctx"
 )
 
 // TestDriftCommand tests the drift command.
@@ -34,6 +36,8 @@ func TestDriftCommand(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -64,6 +68,8 @@ func TestDriftJSONOutput(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 
+	testctx.Declare(t, tmpDir)
+
 	// First init
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -92,6 +98,7 @@ func TestRunDrift_NoContext(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv(env.CtxDir, "")
 
 	rc.Reset()
 	defer rc.Reset()
@@ -105,7 +112,9 @@ func TestRunDrift_NoContext(t *testing.T) {
 	if runErr == nil {
 		t.Fatal("expected error when no .context/ exists")
 	}
-	if !strings.Contains(runErr.Error(), "not initialized") {
+	// Under the explicit-context-dir model, the error is "no context
+	// directory specified" because nothing declared one.
+	if !strings.Contains(runErr.Error(), "context directory") {
 		t.Errorf("unexpected error: %v", runErr)
 	}
 }
@@ -123,7 +132,7 @@ func setupContextDir(t *testing.T) (string, func()) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 
-	rc.Reset()
+	testctx.Declare(t, tmpDir)
 
 	initCmd := initialize.Cmd()
 	initCmd.SetArgs([]string{})
@@ -251,6 +260,7 @@ func TestRunDrift_GenericError(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, dir.Context))
 
 	rc.Reset()
 	defer rc.Reset()
diff --git a/internal/cli/event/cmd.go b/internal/cli/event/cmd.go
index f2b19645d..230f98787 100644
--- a/internal/cli/event/cmd.go
+++ b/internal/cli/event/cmd.go
@@ -17,7 +17,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/flagbind"
 )
 
-// Cmd returns the "ctx event" top-level command.
+// Cmd returns the "ctx hook event" command.
 //
 // Returns:
 //   - *cobra.Command: Configured event command
diff --git a/internal/cli/event/doc.go b/internal/cli/event/doc.go
index 70d6fd892..225f9b709 100644
--- a/internal/cli/event/doc.go
+++ b/internal/cli/event/doc.go
@@ -4,11 +4,28 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package event implements the ctx event top-level command.
+// Package event implements the ctx hook event command.
 //
-// Queries the local hook event log, filtered by hook name,
-// session ID, and event type. Supports JSON output and count
-// limiting.
+// Queries the local hook event log and displays matching
+// entries. Events are recorded by system hooks during AI
+// sessions and capture hook firings, session lifecycle
+// transitions, and notification deliveries.
 //
-// Key exports: [Cmd], [Run].
+// # Filtering
+//
+// Results can be narrowed by hook name (--hook), session
+// ID (--session), and event type (--event). The --last
+// flag limits the number of returned entries (default
+// from [config/event.DefaultLast]). The --all flag
+// includes rotated log files that are normally excluded.
+//
+// # Output Formats
+//
+// Human-readable output shows a table with timestamp,
+// hook, event, and session columns. JSON output (--json)
+// emits an array of event objects for scripting.
+//
+// [Cmd] builds the cobra command with filter and format flags.
+// [Run] queries the event log, applies filters, and renders
+// matching entries as a table or JSON array.
 package event
diff --git a/internal/cli/fmt/cmd/root/doc.go b/internal/cli/fmt/cmd/root/doc.go
index 11ad94ee9..c8882a917 100644
--- a/internal/cli/fmt/cmd/root/doc.go
+++ b/internal/cli/fmt/cmd/root/doc.go
@@ -4,9 +4,38 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the "ctx fmt" command logic.
+// Package root implements the "ctx fmt" command that
+// formats context files to a consistent line width.
 //
-// [Cmd] registers the command and its flags (--width, --check).
-// [Run] iterates over the four context files, applies wrapping,
-// and writes back only files that changed.
+// # What It Does
+//
+// Iterates over the four context files (TASKS.md,
+// DECISIONS.md, LEARNINGS.md, CONVENTIONS.md),
+// applies word-wrapping to each, and writes back
+// only files that changed. In check mode it reports
+// which files need formatting without modifying them.
+//
+// # Flags
+//
+//   - --width: Target line width in characters.
+//     Defaults to [wrap.DefaultWidth] (72).
+//   - --check: Dry-run mode. Reports files that
+//     would change and exits with code 1 if any
+//     need formatting. Useful in CI pipelines.
+//
+// # Output
+//
+// In normal mode, prints a summary line like
+// "Formatted 2/4 files." In check mode, prints
+// each file that needs formatting and exits with
+// a non-zero status if any were found.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with --width and
+// --check flags. [Run] resolves the context
+// directory via [rc.ContextDir], reads each file
+// with [io.SafeReadUserFile], wraps content via
+// [wrap.ContextFile], and writes back with
+// [io.SafeWriteFile].
 package root
diff --git a/internal/cli/fmt/cmd/root/run.go b/internal/cli/fmt/cmd/root/run.go
index 0c1526a1e..debdc4321 100644
--- a/internal/cli/fmt/cmd/root/run.go
+++ b/internal/cli/fmt/cmd/root/run.go
@@ -40,7 +40,11 @@ var contextFiles = []string{
 //   - error: Non-nil if context directory is missing or file
 //     operations fail; exits 1 in check mode if files would change
 func Run(cmd *cobra.Command, width int, check bool) error {
-	ctxDir := rc.ContextDir()
+	ctxDir, ctxErr := rc.RequireContextDir()
+	if ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	if _, statErr := os.Stat(ctxDir); os.IsNotExist(statErr) {
 		return errFmt.NoContextDir()
 	}
diff --git a/internal/cli/fmt/doc.go b/internal/cli/fmt/doc.go
index f0fefe9f3..5913f5e4c 100644
--- a/internal/cli/fmt/doc.go
+++ b/internal/cli/fmt/doc.go
@@ -7,7 +7,22 @@
 // Package fmt provides the "ctx fmt" command for formatting
 // context files to a consistent line width.
 //
-// It wraps long lines at word boundaries, using 2-space
-// continuation indent for markdown list items. Headings,
-// tables, frontmatter, and HTML comments are preserved.
+// The fmt command rewraps markdown text in .context/ files
+// so that lines stay within a target width (default 72
+// characters). This produces cleaner diffs and improves
+// readability in terminals and editors.
+//
+// # Wrapping Rules
+//
+//   - Long lines are broken at word boundaries
+//   - Markdown list items use 2-space continuation indent
+//     when wrapped
+//   - Headings are never wrapped
+//   - Tables, YAML frontmatter blocks, and HTML comments
+//     are preserved verbatim
+//   - Code blocks (fenced and indented) are left untouched
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
 package fmt
diff --git a/internal/cli/guide/cmd/root/doc.go b/internal/cli/guide/cmd/root/doc.go
index ba3763889..157e96afd 100644
--- a/internal/cli/guide/cmd/root/doc.go
+++ b/internal/cli/guide/cmd/root/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx guide command that lists
-// available skills and CLI commands.
+// Package root implements the "ctx guide" command that
+// lists available skills and CLI commands as a quick
+// reference.
 //
-// [Cmd] builds the cobra.Command with --skills and --commands
-// flags. [Run] reads skills from the embedded plugin and commands
-// from the bootstrap registry, then renders both lists.
+// # What It Does
+//
+// Displays help information about the ctx ecosystem.
+// Without flags it shows a default overview. With
+// --skills it lists all available slash-command skills
+// from the embedded plugin. With --commands it lists
+// every registered CLI command.
+//
+// # Flags
+//
+//   - --skills: List all available skills with
+//     their trigger descriptions.
+//   - --commands: List all CLI commands with their
+//     short descriptions.
+//
+// # Output
+//
+// Default mode prints a concise getting-started
+// guide. Skills mode prints a table of skill names
+// and descriptions. Commands mode prints a table
+// of command paths and short descriptions.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command with
+// AnnotationSkipInit so it works before context
+// initialization. [Run] dispatches to
+// [skill.List], [command.List], or
+// [guide.Default] based on flags.
 package root
diff --git a/internal/cli/guide/core/command/doc.go b/internal/cli/guide/core/command/doc.go
index c12c5b064..a270ec1ea 100644
--- a/internal/cli/guide/core/command/doc.go
+++ b/internal/cli/guide/core/command/doc.go
@@ -4,8 +4,25 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package command lists available CLI commands for the guide
-// output. Iterates the root command tree and prints each
-// non-hidden subcommand with its short description for
-// quick reference.
+// Package command lists available CLI commands for the
+// ctx guide output.
+//
+// The [List] function walks the root cobra command tree and
+// prints every non-hidden subcommand with its short
+// description. Hidden commands (system hooks, internal
+// plumbing) are filtered out so the guide shows only
+// user-facing surface area.
+//
+// Output is rendered through [internal/write/guide] which
+// handles the terminal formatting: a header line followed
+// by one indented line per command showing the command name
+// and its one-line description.
+//
+// # Design Choice
+//
+// The guide command serves as an interactive onboarding
+// tool. By listing commands from the live cobra tree rather
+// than a static list, the output stays in sync with the
+// actual binary. Adding or removing a subcommand
+// automatically updates the guide.
 package command
diff --git a/internal/cli/guide/core/doc.go b/internal/cli/guide/core/doc.go
index b6f118560..c5921f92b 100644
--- a/internal/cli/guide/core/doc.go
+++ b/internal/cli/guide/core/doc.go
@@ -4,11 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core provides guide logic: skill listing, command
-// listing, and frontmatter parsing.
+// Package core is the umbrella for the guide command's
+// listing and discovery logic.
 //
-// Key exports: [ListCommands], [ListSkills],
-// [ParseSkillFrontmatter], [TruncateDescription], [SkillMeta].
-// Shared helpers used by sibling cmd/ packages.
-// Used by core cmd/ packages.
+// # Overview
+//
+// The guide command helps users discover available ctx
+// commands and skills. This package groups sub-packages
+// that enumerate CLI commands and parse skill metadata
+// for display.
+//
+// # Sub-packages
+//
+//   - command: lists all non-hidden cobra subcommands
+//     from the root command tree. Exports [command.List].
+//   - skill: lists available SKILL.md files, parses
+//     their YAML frontmatter, and truncates descriptions
+//     for display. Exports [skill.List],
+//     [skill.ParseFrontmatter], [skill.TruncateDescription],
+//     and the [skill.Meta] type.
+//
+// # Data Flow
+//
+// The cmd layer calls into the appropriate sub-package
+// based on the subcommand:
+//
+//  1. ctx guide commands: calls command.List, which
+//     walks the cobra command tree and prints each
+//     non-hidden command with its short description.
+//  2. ctx guide skills: calls skill.List, which
+//     reads SKILL.md files from the claude skill
+//     directory, extracts frontmatter metadata, and
+//     prints each skill name with a truncated
+//     description.
 package core
diff --git a/internal/cli/guide/core/skill/doc.go b/internal/cli/guide/core/skill/doc.go
index cbaa161f5..3f0b119fd 100644
--- a/internal/cli/guide/core/skill/doc.go
+++ b/internal/cli/guide/core/skill/doc.go
@@ -4,8 +4,29 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package skill parses skill frontmatter and lists available
-// skills. Extracts name and description from YAML frontmatter
-// in SKILL.md files and truncates long descriptions for
-// compact display.
+// Package skill discovers and lists available skills for the
+// ctx guide output.
+//
+// Skills are SKILL.md files with YAML frontmatter containing
+// a name and description. The [List] function enumerates all
+// installed skills via [internal/claude.SkillList], parses
+// each file's frontmatter with [ParseFrontmatter], and
+// prints a summary line per skill.
+//
+// # Frontmatter Parsing
+//
+// [ParseFrontmatter] extracts the YAML block delimited by
+// "---" fences at the top of a SKILL.md file. If no valid
+// frontmatter is found (missing fences or malformed YAML),
+// it returns a zero [Meta] without error. Only YAML parse
+// failures produce an error.
+//
+// # Description Truncation
+//
+// [TruncateDescription] shortens long descriptions for
+// compact display. It prefers a natural sentence break
+// (first ". " within the limit) over a hard cut. If the
+// text is shorter than the limit, it passes through
+// unchanged; otherwise it is cut at the limit with an
+// ellipsis appended.
 package skill
diff --git a/internal/cli/guide/doc.go b/internal/cli/guide/doc.go
index 306a5c7a1..d9658e352 100644
--- a/internal/cli/guide/doc.go
+++ b/internal/cli/guide/doc.go
@@ -6,7 +6,26 @@
 
 // Package guide provides the "ctx guide" command.
 //
-// Displays a use-case-oriented cheat sheet covering core CLI commands
-// grouped by workflow, available skills, and common recipes. Default
-// output fits one screen; full listings available via flags.
+// Displays a use-case-oriented cheat sheet covering core
+// CLI commands grouped by workflow, available skills, and
+// common recipes. The default output fits a single
+// terminal screen; full listings are available via flags.
+//
+// The guide is aimed at new users and AI agents that need
+// a quick orientation on what ctx can do. It pulls
+// content from embedded asset files and formats it for
+// terminal display with section headers and indentation.
+//
+// # Output Sections
+//
+//   - Quick start: essential commands for a first session
+//   - Context files: what each .context/ file does
+//   - Workflows: common multi-command sequences
+//   - Skills: available slash-command skills
+//   - Recipes: copy-paste command combinations
+//
+// # Subpackages
+//
+//   - cmd/root: cobra command definition and flag binding
+//   - core: content assembly and terminal formatting
 package guide
diff --git a/internal/cli/hook/doc.go b/internal/cli/hook/doc.go
index 7e80399e0..c338a96c6 100644
--- a/internal/cli/hook/doc.go
+++ b/internal/cli/hook/doc.go
@@ -6,8 +6,27 @@
 
 // Package hook implements the ctx hook parent command.
 //
-// Consolidates hook-related user-facing commands under a single
-// namespace: message, notify, pause, resume, and event.
+// The hook command is a pure namespace that consolidates
+// user-facing hook-related commands under a single CLI
+// group. It has no RunE of its own and delegates all
+// work to its subcommands.
 //
-// Key exports: [Cmd].
+// # Subcommands
+//
+//   - event: query the hook event log for past firings
+//   - message: inject messages into the AI session via
+//     the hook message protocol
+//   - notify: send webhook notifications, set up webhook
+//     URLs, and test connectivity
+//   - pause: suppress all context hooks for the current
+//     session
+//   - resume: re-enable context hooks after a pause
+//
+// Hook plumbing commands (check-*, block-*, heartbeat)
+// live under [internal/cli/system] rather than here,
+// because they are hidden and not intended for direct
+// user invocation.
+//
+// [Cmd] builds the parent cobra command and registers each
+// subcommand (event, message, notify, pause, resume).
 package hook
diff --git a/internal/cli/hub/cmd/peer/cmd.go b/internal/cli/hub/cmd/peer/cmd.go
index 97e22be97..b79386698 100644
--- a/internal/cli/hub/cmd/peer/cmd.go
+++ b/internal/cli/hub/cmd/peer/cmd.go
@@ -11,6 +11,7 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	corePeer "github.com/ActiveMemory/ctx/internal/cli/hub/core/peer"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 )
 
@@ -27,6 +28,9 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyHubPeer),
 		Args:    cobra.ExactArgs(2),
-		RunE:    corePeer.Run,
+		// Hub stores at ~/.ctx/hub-data/, not .context/.
+		// Spec: specs/single-source-context-anchor.md.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
+		RunE:        corePeer.Run,
 	}
 }
diff --git a/internal/cli/hub/cmd/peer/cmd_test.go b/internal/cli/hub/cmd/peer/cmd_test.go
new file mode 100644
index 000000000..3bd4acd53
--- /dev/null
+++ b/internal/cli/hub/cmd/peer/cmd_test.go
@@ -0,0 +1,24 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package peer
+
+import (
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+)
+
+// TestHubPeer_AnnotationSkipInit guards the hub-bypass contract.
+// Spec: specs/single-source-context-anchor.md.
+func TestHubPeer_AnnotationSkipInit(t *testing.T) {
+	c := Cmd()
+	if got, ok := c.Annotations[cli.AnnotationSkipInit]; !ok {
+		t.Errorf("hub peer: missing AnnotationSkipInit annotation")
+	} else if got != cli.AnnotationTrue {
+		t.Errorf("hub peer: AnnotationSkipInit = %q, want %q", got, cli.AnnotationTrue)
+	}
+}
diff --git a/internal/cli/hub/cmd/peer/doc.go b/internal/cli/hub/cmd/peer/doc.go
index 4a46f3cc7..de8b8a6a9 100644
--- a/internal/cli/hub/cmd/peer/doc.go
+++ b/internal/cli/hub/cmd/peer/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package peer provides the cobra command for
-// ctx hub peer.
+// Package peer implements the "ctx hub peer" subcommand
+// for managing peer nodes in a ctx Hub cluster.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Adds or removes a peer address from the hub's
+// cluster membership. This is used when scaling a
+// hub deployment across multiple nodes for Raft-based
+// leader election and replication.
+//
+// # Arguments
+//
+// Requires exactly two positional arguments:
+//
+//   - args[0]: action: "add" or "remove"
+//   - args[1]: address: peer gRPC address
+//     (host:port)
+//
+// # Flags
+//
+// None.
+//
+// # Output
+//
+// Prints a confirmation line indicating the peer
+// was added or removed, e.g. "Peer added: host:9090"
+// or "Peer removed: host:9090".
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [corePeer.Run] which validates the
+// action string and writes the confirmation message.
 package peer
diff --git a/internal/cli/hub/cmd/start/cmd.go b/internal/cli/hub/cmd/start/cmd.go
index 38295d028..7d1ba48e7 100644
--- a/internal/cli/hub/cmd/start/cmd.go
+++ b/internal/cli/hub/cmd/start/cmd.go
@@ -11,6 +11,7 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	"github.com/ActiveMemory/ctx/internal/cli/hub/core/server"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 	"github.com/ActiveMemory/ctx/internal/config/embed/flag"
 	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
@@ -41,6 +42,11 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyHubStart),
 		Args:    cobra.NoArgs,
+		// Hub stores at ~/.ctx/hub-data/, never reads .context/.
+		// Exempt from the require-context-dir gate so AWS/EKS hub
+		// users hit no-broken-windows on first contact.
+		// Spec: specs/single-source-context-anchor.md.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
 		RunE: func(cobraCmd *cobra.Command, _ []string) error {
 			if isDaemon {
 				return server.RunDaemon(
diff --git a/internal/cli/hub/cmd/start/cmd_test.go b/internal/cli/hub/cmd/start/cmd_test.go
new file mode 100644
index 000000000..fecf6a361
--- /dev/null
+++ b/internal/cli/hub/cmd/start/cmd_test.go
@@ -0,0 +1,29 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package start
+
+import (
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+)
+
+// TestHubStart_AnnotationSkipInit verifies the hub start subcommand
+// carries the AnnotationSkipInit annotation. Hub uses
+// ~/.ctx/hub-data/, never reads .context/, and must bypass the
+// require-context-dir gate so AWS/EKS hub users hit no broken
+// windows on first contact.
+//
+// Spec: specs/single-source-context-anchor.md.
+func TestHubStart_AnnotationSkipInit(t *testing.T) {
+	c := Cmd()
+	if got, ok := c.Annotations[cli.AnnotationSkipInit]; !ok {
+		t.Errorf("hub start: missing AnnotationSkipInit annotation")
+	} else if got != cli.AnnotationTrue {
+		t.Errorf("hub start: AnnotationSkipInit = %q, want %q", got, cli.AnnotationTrue)
+	}
+}
diff --git a/internal/cli/hub/cmd/start/doc.go b/internal/cli/hub/cmd/start/doc.go
index dbe609eaf..a090366c9 100644
--- a/internal/cli/hub/cmd/start/doc.go
+++ b/internal/cli/hub/cmd/start/doc.go
@@ -4,10 +4,41 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package start provides the cobra command for
-// ctx hub start.
+// Package start implements the "ctx hub start" subcommand
+// that launches the ctx Hub gRPC server.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Starts the hub server either in the foreground or
+// as a detached daemon process. The hub provides a
+// gRPC API for publishing, syncing, and streaming
+// context entries across projects. When --peers is
+// set, the server joins a Raft cluster for leader
+// election and entry replication.
+//
+// # Flags
+//
+//   - --port: gRPC listen port. Defaults to the
+//     value from [server.DefaultPort].
+//   - --data-dir: Directory for persistent storage
+//     (entries, Raft state, PID file).
+//   - --daemon: Run as a background daemon. Writes
+//     a PID file for later stop/status commands.
+//   - --peers: Comma-separated list of peer
+//     addresses (host:port) to form a Raft cluster.
+//
+// # Output
+//
+// In foreground mode, prints the admin token and
+// listen address, then blocks until interrupted.
+// In daemon mode, prints the admin token and PID,
+// then detaches.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and binds all
+// flags. When --daemon is set it calls
+// [server.RunDaemon]; otherwise it parses the
+// peers string via [server.ParsePeers] and calls
+// [server.Run] for foreground operation.
 package start
diff --git a/internal/cli/hub/cmd/status/cmd.go b/internal/cli/hub/cmd/status/cmd.go
index c0f2405ce..f83fc69c4 100644
--- a/internal/cli/hub/cmd/status/cmd.go
+++ b/internal/cli/hub/cmd/status/cmd.go
@@ -11,6 +11,7 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	coreStatus "github.com/ActiveMemory/ctx/internal/cli/hub/core/status"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 )
 
@@ -27,6 +28,9 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyHubStatus),
 		Args:    cobra.NoArgs,
-		RunE:    coreStatus.Run,
+		// Hub stores at ~/.ctx/hub-data/, not .context/.
+		// Spec: specs/single-source-context-anchor.md.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
+		RunE:        coreStatus.Run,
 	}
 }
diff --git a/internal/cli/hub/cmd/status/cmd_test.go b/internal/cli/hub/cmd/status/cmd_test.go
new file mode 100644
index 000000000..8663e2d7a
--- /dev/null
+++ b/internal/cli/hub/cmd/status/cmd_test.go
@@ -0,0 +1,24 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package status
+
+import (
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+)
+
+// TestHubStatus_AnnotationSkipInit guards the hub-bypass contract.
+// Spec: specs/single-source-context-anchor.md.
+func TestHubStatus_AnnotationSkipInit(t *testing.T) {
+	c := Cmd()
+	if got, ok := c.Annotations[cli.AnnotationSkipInit]; !ok {
+		t.Errorf("hub status: missing AnnotationSkipInit annotation")
+	} else if got != cli.AnnotationTrue {
+		t.Errorf("hub status: AnnotationSkipInit = %q, want %q", got, cli.AnnotationTrue)
+	}
+}
diff --git a/internal/cli/hub/cmd/status/doc.go b/internal/cli/hub/cmd/status/doc.go
index f2c9ed511..fa03dfb45 100644
--- a/internal/cli/hub/cmd/status/doc.go
+++ b/internal/cli/hub/cmd/status/doc.go
@@ -4,10 +4,35 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package status provides the cobra command for
-// ctx hub status.
+// Package status implements the "ctx hub status"
+// subcommand that displays cluster-level information
+// about a running ctx Hub.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Loads the connection config, connects to the hub,
+// calls the Status RPC, and prints a cluster summary
+// including the node role, hub address, total stored
+// entries, and number of registered projects.
+//
+// # Flags
+//
+// None. The command accepts no arguments. Connection
+// settings are read from .context/.connect.enc.
+//
+// # Output
+//
+// A human-readable status block including:
+//
+//   - Node role (active or follower)
+//   - Hub address (host:port)
+//   - Total entries stored
+//   - Number of registered projects
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [coreStatus.Run] which handles config
+// loading, gRPC client setup, the status call, role
+// determination, and output formatting.
 package status
diff --git a/internal/cli/hub/cmd/status/integration_test.go b/internal/cli/hub/cmd/status/integration_test.go
new file mode 100644
index 000000000..2625e7e69
--- /dev/null
+++ b/internal/cli/hub/cmd/status/integration_test.go
@@ -0,0 +1,73 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package status_test
+
+import (
+	"errors"
+	"path/filepath"
+	"testing"
+
+	"github.com/spf13/cobra"
+
+	"github.com/ActiveMemory/ctx/internal/bootstrap"
+	"github.com/ActiveMemory/ctx/internal/cli/hub/cmd/status"
+	"github.com/ActiveMemory/ctx/internal/config/dir"
+	"github.com/ActiveMemory/ctx/internal/config/env"
+	errCtx "github.com/ActiveMemory/ctx/internal/err/context"
+	"github.com/ActiveMemory/ctx/internal/rc"
+)
+
+// discardWriter silences command output in tests.
+type discardWriter struct{}
+
+func (discardWriter) Write(p []byte) (int, error) { return len(p), nil }
+
+// TestHubStatus_BypassesPreRunEGate is the integration-style smoke
+// test required by the spec. Builds a root command tree as
+// production does (via bootstrap.RootCmd), wires this hub status
+// subcommand onto a "hub" parent, and runs with CTX_DIR pointing at
+// a deliberately-non-existent path. The PreRunE gate must NOT
+// short-circuit with ErrDirNotDeclared.
+//
+// Without this guard, a future refactor that breaks PreRunE's
+// annotation handling could leave the annotation in place but
+// regress the actual bypass behavior.
+//
+// Spec: specs/single-source-context-anchor.md.
+//
+// The test lives in package `status_test` to avoid an import cycle
+// (bootstrap → cli/hub → cli/hub/cmd/status). External-test packages
+// are exempt from cycle detection.
+func TestHubStatus_BypassesPreRunEGate(t *testing.T) {
+	// Wire CTX_DIR to a deliberately-non-existent shape-valid path
+	// so RequireContextDir would fail loud if PreRunE actually ran.
+	t.Setenv(env.CtxDir, filepath.Join(t.TempDir(), "absent", dir.Context))
+	rc.Reset()
+	t.Cleanup(rc.Reset)
+
+	root := bootstrap.RootCmd()
+
+	// Build a hub parent (matches the production tree shape).
+	hub := &cobra.Command{
+		Use:   "hub",
+		Short: "ctx Hub",
+	}
+	hub.AddCommand(status.Cmd())
+	root.AddCommand(hub)
+
+	root.SetOut(&discardWriter{})
+	root.SetErr(&discardWriter{})
+	root.SetArgs([]string{"hub", "status"})
+
+	err := root.Execute()
+	// Server is not running so coreStatus.Run will return its own
+	// connect error — that's fine. The contract: the error must
+	// NOT be the gate's "context dir not declared" sentinel.
+	if errors.Is(err, errCtx.ErrDirNotDeclared) {
+		t.Errorf("hub status: PreRunE gate short-circuited with ErrDirNotDeclared (annotation bypass broken)")
+	}
+}
diff --git a/internal/cli/hub/cmd/stepdown/cmd.go b/internal/cli/hub/cmd/stepdown/cmd.go
index f9449d453..b915cf0e7 100644
--- a/internal/cli/hub/cmd/stepdown/cmd.go
+++ b/internal/cli/hub/cmd/stepdown/cmd.go
@@ -11,6 +11,7 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	coreStep "github.com/ActiveMemory/ctx/internal/cli/hub/core/stepdown"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 )
 
@@ -27,6 +28,9 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyHubStepdown),
 		Args:    cobra.NoArgs,
-		RunE:    coreStep.Run,
+		// Hub stores at ~/.ctx/hub-data/, not .context/.
+		// Spec: specs/single-source-context-anchor.md.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
+		RunE:        coreStep.Run,
 	}
 }
diff --git a/internal/cli/hub/cmd/stepdown/cmd_test.go b/internal/cli/hub/cmd/stepdown/cmd_test.go
new file mode 100644
index 000000000..86c4bdfcf
--- /dev/null
+++ b/internal/cli/hub/cmd/stepdown/cmd_test.go
@@ -0,0 +1,24 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package stepdown
+
+import (
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+)
+
+// TestHubStepdown_AnnotationSkipInit guards the hub-bypass contract.
+// Spec: specs/single-source-context-anchor.md.
+func TestHubStepdown_AnnotationSkipInit(t *testing.T) {
+	c := Cmd()
+	if got, ok := c.Annotations[cli.AnnotationSkipInit]; !ok {
+		t.Errorf("hub stepdown: missing AnnotationSkipInit annotation")
+	} else if got != cli.AnnotationTrue {
+		t.Errorf("hub stepdown: AnnotationSkipInit = %q, want %q", got, cli.AnnotationTrue)
+	}
+}
diff --git a/internal/cli/hub/cmd/stepdown/doc.go b/internal/cli/hub/cmd/stepdown/doc.go
index 41654cb14..0ac3a8964 100644
--- a/internal/cli/hub/cmd/stepdown/doc.go
+++ b/internal/cli/hub/cmd/stepdown/doc.go
@@ -4,10 +4,31 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package stepdown provides the cobra command for
-// ctx hub stepdown.
+// Package stepdown implements the "ctx hub stepdown"
+// subcommand that requests leadership transfer from
+// the current hub node.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Tells the current Raft leader node to voluntarily
+// give up leadership so another node in the cluster
+// can take over. This is useful for planned
+// maintenance or graceful role rotation in a
+// multi-node hub deployment.
+//
+// # Flags
+//
+// None. The command accepts no arguments.
+//
+// # Output
+//
+// Prints a confirmation line indicating that the
+// leadership stepdown was requested.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command and delegates
+// directly to [coreStep.Run] which sends the
+// stepdown request and writes the confirmation
+// message.
 package stepdown
diff --git a/internal/cli/hub/cmd/stop/cmd.go b/internal/cli/hub/cmd/stop/cmd.go
index 8fc85380e..529b0be55 100644
--- a/internal/cli/hub/cmd/stop/cmd.go
+++ b/internal/cli/hub/cmd/stop/cmd.go
@@ -11,6 +11,7 @@ import (
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	"github.com/ActiveMemory/ctx/internal/cli/hub/core/server"
+	"github.com/ActiveMemory/ctx/internal/config/cli"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 	"github.com/ActiveMemory/ctx/internal/config/embed/flag"
 	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
@@ -35,6 +36,9 @@ func Cmd() *cobra.Command {
 		Long:    long,
 		Example: desc.Example(cmd.DescKeyHubStop),
 		Args:    cobra.NoArgs,
+		// Hub stores at ~/.ctx/hub-data/, not .context/.
+		// Spec: specs/single-source-context-anchor.md.
+		Annotations: map[string]string{cli.AnnotationSkipInit: cli.AnnotationTrue},
 		RunE: func(cobraCmd *cobra.Command, _ []string) error {
 			return server.Stop(cobraCmd, dataDir)
 		},
diff --git a/internal/cli/hub/cmd/stop/cmd_test.go b/internal/cli/hub/cmd/stop/cmd_test.go
new file mode 100644
index 000000000..bf9905608
--- /dev/null
+++ b/internal/cli/hub/cmd/stop/cmd_test.go
@@ -0,0 +1,24 @@
+//   /    ctx:                         https://ctx.ist
+// ,'`./    do you remember?
+// `.,'\
+//   \    Copyright 2026-present Context contributors.
+//                 SPDX-License-Identifier: Apache-2.0
+
+package stop
+
+import (
+	"testing"
+
+	"github.com/ActiveMemory/ctx/internal/config/cli"
+)
+
+// TestHubStop_AnnotationSkipInit guards the hub-bypass contract.
+// Spec: specs/single-source-context-anchor.md.
+func TestHubStop_AnnotationSkipInit(t *testing.T) {
+	c := Cmd()
+	if got, ok := c.Annotations[cli.AnnotationSkipInit]; !ok {
+		t.Errorf("hub stop: missing AnnotationSkipInit annotation")
+	} else if got != cli.AnnotationTrue {
+		t.Errorf("hub stop: AnnotationSkipInit = %q, want %q", got, cli.AnnotationTrue)
+	}
+}
diff --git a/internal/cli/hub/cmd/stop/doc.go b/internal/cli/hub/cmd/stop/doc.go
index ac51d3a56..482569b4e 100644
--- a/internal/cli/hub/cmd/stop/doc.go
+++ b/internal/cli/hub/cmd/stop/doc.go
@@ -4,10 +4,31 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package stop provides the cobra command for
-// ctx hub stop.
+// Package stop implements the "ctx hub stop" subcommand
+// that shuts down a daemonized ctx Hub server.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # What It Does
+//
+// Reads the PID file from the hub data directory,
+// sends SIGTERM to the running hub process, and
+// removes the PID file on success. This is the
+// counterpart to "ctx hub start --daemon".
+//
+// # Flags
+//
+//   - --data-dir: Directory where the hub stores
+//     its PID file and persistent data. Must match
+//     the --data-dir used when starting the hub.
+//
+// # Output
+//
+// Prints a confirmation line when the hub process
+// is successfully terminated.
+//
+// # Delegation
+//
+// [Cmd] builds the cobra.Command, binds the
+// --data-dir flag, and delegates to [server.Stop]
+// which reads the PID file, sends the signal, and
+// cleans up.
 package stop
diff --git a/internal/cli/hub/core/peer/doc.go b/internal/cli/hub/core/peer/doc.go
index 83e804de1..cbfd3186e 100644
--- a/internal/cli/hub/core/peer/doc.go
+++ b/internal/cli/hub/core/peer/doc.go
@@ -4,10 +4,35 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package peer implements runtime peer add/remove for
-// ctx hub peer.
+// Package peer implements runtime peer management for
+// the ctx hub peer command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Overview
+//
+// This package provides the business logic for adding
+// and removing peers from a hub cluster. When a user
+// runs ctx hub peer add or ctx hub peer remove, the
+// command layer delegates to [Run], which dispatches
+// on the action argument and reports the result.
+//
+// # Behavior
+//
+// [Run] dispatches on the action argument ("add" or "remove")
+// to register or deregister a peer address in the cluster.
+//
+// # Data Flow
+//
+// The peer management pipeline works as follows:
+//
+//  1. The cmd layer invokes [Run] with cobra args
+//     containing [action, address].
+//  2. [Run] switches on the action string, matching
+//     against the configured add and remove constants
+//     from the hub config package.
+//  3. For "add", a confirmation message is printed
+//     via writeHub.PeerAdded.
+//  4. For "remove", a confirmation message is printed
+//     via writeHub.PeerRemoved.
+//  5. An invalid action returns an error from the
+//     hub error package.
 package peer
diff --git a/internal/cli/hub/core/server/doc.go b/internal/cli/hub/core/server/doc.go
index cc7b749d9..044cb326f 100644
--- a/internal/cli/hub/core/server/doc.go
+++ b/internal/cli/hub/core/server/doc.go
@@ -4,10 +4,45 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package server implements the ctx Hub server
-// startup logic for ctx serve --hub.
+// Package server is the **server-side runtime** for
+// `ctx hub start`: daemon lifecycle, PID file management,
+// and the wire-up between the [internal/hub] package and
+// the user-facing CLI flags (`--port`, `--peers`,
+// `--daemon`).
 //
-// Key exports: [Run], [DefaultPort].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// The package is the bridge: [internal/hub] knows how to
+// be a hub, this package knows how to *run* one as a
+// daemon process.
+//
+// # Public Surface
+//
+//   - **[Run](opts)**: foreground server boot. Binds
+//     the listener, instantiates the [hub.Server],
+//     wires the optional [hub.Cluster] when `--peers`
+//     is passed, blocks on serve. Honors signals
+//     (SIGINT, SIGTERM) for graceful shutdown.
+//   - **[DefaultPort]**: the canonical port (9900)
+//     used by docs, examples, and the recipes.
+//
+// # Daemon Mode
+//
+// When the user passes `--daemon`, the parent forks a
+// detached child, writes `/hub.pid` with the
+// child's PID, and exits. The PID file is what
+// `ctx hub stop` consumes to send SIGTERM.
+//
+// # PID File Lifecycle
+//
+//   - **Created** atomically on daemon start.
+//   - **Removed** by the child on graceful shutdown.
+//   - **Stale-detected** by `ctx hub status` (PID does
+//     not refer to a running process) so a crashed
+//     hub does not block a fresh start.
+//
+// # Concurrency
+//
+// The server runs in the same process as gRPC
+// dispatch; this package starts it and waits. No
+// in-process concurrency primitives beyond what
+// [internal/hub] and the gRPC runtime already provide.
 package server
diff --git a/internal/cli/hub/core/status/doc.go b/internal/cli/hub/core/status/doc.go
index 7b44d25ce..c0dba6c13 100644
--- a/internal/cli/hub/core/status/doc.go
+++ b/internal/cli/hub/core/status/doc.go
@@ -5,9 +5,34 @@
 //                 SPDX-License-Identifier: Apache-2.0
 
 // Package status implements cluster status display for
-// ctx hub status.
+// the ctx hub status command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Overview
+//
+// This package queries a remote hub for its cluster
+// state and renders a summary showing the node role,
+// address, total entries, and project count.
+//
+// # Behavior
+//
+// [Run] dials the hub via gRPC, retrieves cluster metrics
+// (connected clients, entry count, projects), and renders
+// a summary showing node role, address, and totals.
+//
+// # Data Flow
+//
+// When [Run] is called it performs these steps:
+//
+//  1. Loads connection config to obtain the hub
+//     address and authentication token.
+//  2. Dials the hub via gRPC using hub.NewClient.
+//  3. Calls the Status RPC to retrieve cluster
+//     metrics including connected clients, total
+//     entries, and per-project breakdowns.
+//  4. Determines the node role: if there are
+//     connected clients the node is marked active,
+//     otherwise it is a follower.
+//  5. Delegates to writeHub.ClusterStatus to render
+//     the role, address, entry count, and project
+//     count for the user.
 package status
diff --git a/internal/cli/hub/core/stepdown/doc.go b/internal/cli/hub/core/stepdown/doc.go
index d08968bf6..e8331f60c 100644
--- a/internal/cli/hub/core/stepdown/doc.go
+++ b/internal/cli/hub/core/stepdown/doc.go
@@ -4,10 +4,33 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package stepdown implements graceful leadership transfer
-// for ctx hub stepdown.
+// Package stepdown implements graceful leadership
+// transfer for the ctx hub stepdown command.
 //
-// Key exports: [Run].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// # Overview
+//
+// This package provides the business logic for
+// requesting that the current hub node relinquish
+// its leader role. When a user runs ctx hub stepdown,
+// the command layer delegates to [Run], which signals
+// the transfer and reports the result.
+//
+// # Behavior
+//
+// [Run] signals the current hub node to relinquish its
+// leader role and prints a confirmation once the transfer
+// is initiated.
+//
+// # Data Flow
+//
+// The stepdown pipeline works as follows:
+//
+//  1. The cmd layer invokes [Run] with the cobra
+//     command and unused args.
+//  2. [Run] calls writeHub.SteppedDown to print a
+//     confirmation message indicating the node has
+//     initiated leadership transfer.
+//  3. The function returns nil on success. Future
+//     implementations may add gRPC calls to
+//     coordinate the transfer with the cluster.
 package stepdown
diff --git a/internal/cli/hub/doc.go b/internal/cli/hub/doc.go
index 753a58d41..de7baa5c2 100644
--- a/internal/cli/hub/doc.go
+++ b/internal/cli/hub/doc.go
@@ -7,7 +7,28 @@
 // Package hub provides the ctx hub command group for
 // cluster management operations.
 //
-// Key exports: [Cmd].
-// See source files for implementation details.
-// Part of the internal subsystem.
+// The Hub is a lightweight server that synchronizes
+// context across multiple machines or team members. The
+// hub command group manages the server lifecycle and
+// cluster topology from the command line.
+//
+// # Subcommands
+//
+//   - start: launch a Hub instance on the local machine
+//   - stop: gracefully shut down a running Hub
+//   - status: display the Hub's health, peer list, and
+//     subscription counts
+//   - peer: add or remove peer Hub nodes for multi-node
+//     replication
+//   - stepdown: ask the current leader to yield its role
+//     to another node
+//
+// # Subpackages
+//
+//	cmd/start: server startup logic
+//	cmd/stop: graceful shutdown
+//	cmd/status: health and topology display
+//	cmd/peer: peer management
+//	cmd/stepdown: leader yield
+//	core: shared Hub client and config helpers
 package hub
diff --git a/internal/cli/initialize/cmd/root/doc.go b/internal/cli/initialize/cmd/root/doc.go
index fa1c1e405..5815f18da 100644
--- a/internal/cli/initialize/cmd/root/doc.go
+++ b/internal/cli/initialize/cmd/root/doc.go
@@ -4,11 +4,50 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package root implements the ctx init command for creating and
-// updating .context/ directories.
+// Package root implements the "ctx init" command that
+// creates and populates a .context/ directory for
+// persistent AI context.
 //
-// [Cmd] builds the cobra.Command with --force and --auto-merge
-// flags. [Run] orchestrates the full init workflow: create
-// directories, deploy templates, generate encryption key, deploy
-// hooks and skills, merge settings, and write CLAUDE.md.
+// # What It Does
+//
+// Orchestrates the full initialization workflow for
+// a new or existing project:
+//
+//  1. Validates that ctx is in PATH.
+//  2. Creates .context/ and its subdirectories
+//     (steering/, hooks/, skills/).
+//  3. Scaffolds foundation steering files.
+//  4. Deploys context file templates (TASKS.md,
+//     DECISIONS.md, LEARNINGS.md, CONVENTIONS.md,
+//     CONSTITUTION.md, etc.).
+//  5. Creates entry templates in .context/templates/.
+//  6. Sets up the encrypted scratchpad.
+//  7. Creates project root directories (specs/,
+//     ideas/).
+//  8. Merges permissions into settings.local.json.
+//  9. Auto-enables the ctx plugin globally and
+//     locally.
+//  10. Creates or merges CLAUDE.md.
+//  11. Deploys Makefile.ctx and amends the user
+//     Makefile.
+//  12. Updates .gitignore with recommended entries.
+//
+// # Flags
+//
+//   - --force, -f: Overwrite existing context files
+//     without prompting for confirmation.
+//   - --minimal, -m: Only create essential files
+//     (TASKS, DECISIONS, CONSTITUTION).
+//   - --merge: Auto-merge ctx content into existing
+//     CLAUDE.md without prompting.
+//   - --no-plugin-enable: Skip auto-enabling the ctx
+//     plugin in Claude settings.
+//   - --no-steering-init: Skip scaffolding foundation
+//     steering files in .context/steering/.
+//
+// # Output
+//
+// Prints progress lines for each created file and
+// directory, warnings for non-fatal errors, and a
+// final "next steps" guide with workflow tips.
 package root
diff --git a/internal/cli/initialize/cmd/root/run.go b/internal/cli/initialize/cmd/root/run.go
index d0e147969..9edced4d3 100644
--- a/internal/cli/initialize/cmd/root/run.go
+++ b/internal/cli/initialize/cmd/root/run.go
@@ -8,6 +8,7 @@ package root
 
 import (
 	"bufio"
+	"errors"
 	"os"
 	"path/filepath"
 	"strings"
@@ -35,6 +36,7 @@ import (
 	"github.com/ActiveMemory/ctx/internal/config/fs"
 	"github.com/ActiveMemory/ctx/internal/config/sync"
 	"github.com/ActiveMemory/ctx/internal/config/token"
+	errCtx "github.com/ActiveMemory/ctx/internal/err/context"
 	errFs "github.com/ActiveMemory/ctx/internal/err/fs"
 	errPrompt "github.com/ActiveMemory/ctx/internal/err/prompt"
 	ctxIo "github.com/ActiveMemory/ctx/internal/io"
@@ -47,6 +49,21 @@ import (
 // Creates a .context/ directory with template files. Handles existing
 // directories, minimal mode, and CLAUDE.md merge operations.
 //
+// Under the single-source-anchor resolution model
+// (spec: specs/single-source-context-anchor.md), init is exempt from
+// the require-context-dir gate. It resolves the target in priority
+// order:
+//
+//  1. CTX_DIR env var (read by rc.ContextDir).
+//  2. Fall back to `/.context/` and create it there.
+//
+// The basename guard does not apply at init time because init
+// *creates* the canonical-named directory.
+//
+// After materializing the directory, init prints the shell activation
+// hint via InfoActivateHint so the user's next ctx call in a new
+// process finds the right CTX_DIR.
+//
 // Parameters:
 //   - cmd: Cobra command for output and input streams
 //   - force: If true, overwrite existing files without prompting
@@ -66,14 +83,32 @@ func Run(
 	caller string,
 ) error {
 	// Check if ctx is in PATH (required for hooks to work).
-	// Skip when a caller is set — the caller manages its own binary path.
+	// Skip when a caller is set: the caller manages its own binary path.
 	if caller == "" {
 		if pathErr := validate.CheckCtxInPath(cmd); pathErr != nil {
 			return pathErr
 		}
 	}
 
-	contextDir := rc.ContextDir()
+	// Under the explicit-context-dir resolution model, rc.ContextDir()
+	// returns an error when neither --context-dir nor CTX_DIR is declared.
+	// `ctx init` is an exempt command: fall back to cwd/.context so a
+	// user running `ctx init` in a fresh project gets the expected
+	// behavior. Spec: specs/explicit-context-dir.md. The fallback is
+	// reserved for the not-declared case; propagate any other resolver
+	// failure (e.g. malformed .ctxrc) so operators see the real error
+	// rather than a silent redirection to the working directory.
+	contextDir, ctxErr := rc.ContextDir()
+	if ctxErr != nil {
+		if !errors.Is(ctxErr, errCtx.ErrDirNotDeclared) {
+			return ctxErr
+		}
+		cwd, cwdErr := os.Getwd()
+		if cwdErr != nil {
+			return errFs.ReadInput(cwdErr)
+		}
+		contextDir = filepath.Join(cwd, dir.Context)
+	}
 
 	// Check if .context/ already exists and is properly initialized.
 	// A directory with only logs/ (created by hooks before init) is
@@ -123,7 +158,8 @@ func Run(
 	// for users who want a bare init with no starter
 	// templates.
 	if !noSteeringInit {
-		if steeringErr := steeringInit.Run(cmd); steeringErr != nil {
+		steeringErr := steeringInit.RunWithDir(cmd, contextDir)
+		if steeringErr != nil {
 			// Non-fatal: the rest of init is more
 			// important than the steering templates.
 			label := desc.Text(text.DescKeyInitLabelSteering)
@@ -231,14 +267,15 @@ func Run(
 		initialize.InfoWarnNonFatal(cmd, file.FileGitignore, ignoreErr)
 	}
 
+	initialize.InfoActivateHint(cmd, contextDir)
 	initialize.InfoNextSteps(cmd)
 	initialize.InfoWorkflowTips(cmd)
 
 	// Save the quick-start reference to a project-root file.
-	coreProject.WriteGettingStarted(cmd)
+	coreProject.WriteGettingStarted(cmd, contextDir)
 
 	// Post-script: stage-aware Claude Code setup guidance.
-	// Never fatal, never an error — a friendly nudge
+	// Never fatal, never an error; a friendly nudge
 	// pointing the user at whichever step they're missing.
 	// Honors --no-plugin-enable: if plugin detection was
 	// suppressed, skip the hint too.
diff --git a/internal/cli/initialize/core/backup/doc.go b/internal/cli/initialize/core/backup/doc.go
index 1dfff55f6..e4fd7782c 100644
--- a/internal/cli/initialize/core/backup/doc.go
+++ b/internal/cli/initialize/core/backup/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package backup handles file backup during initialization.
+// Package backup handles file backup during the ctx
+// init pipeline.
 //
-// [File] creates a timestamped backup of an existing file before
-// overwriting, preserving the user's changes for recovery. Backup
-// files use the format name.timestamp.bak and are written alongside
-// the original.
+// # Overview
+//
+// Before overwriting an existing context file, the
+// init command creates a timestamped backup so the
+// user can recover their previous content. This
+// package provides the [File] function that performs
+// that backup.
+//
+// # Behavior
+//
+// [File] writes a timestamped .bak copy of an existing
+// context file so the user can recover previous content
+// after an overwrite.
+//
+// # Data Flow
+//
+// When [File] is called it:
+//
+//  1. Generates a backup filename using the pattern
+//     name.timestamp.bak, where the timestamp is the
+//     current Unix epoch.
+//  2. Writes the original content to the backup path
+//     using safe file I/O with standard permissions.
+//  3. Reports the backup path to the user via the
+//     initialize write layer.
+//  4. Returns an error if the write fails, wrapping
+//     it with the backup error constructor.
+//
+// The backup file is placed alongside the original,
+// making it easy to find and restore if needed.
 package backup
diff --git a/internal/cli/initialize/core/claude/doc.go b/internal/cli/initialize/core/claude/doc.go
index bb0ed1ab0..1c4c840d5 100644
--- a/internal/cli/initialize/core/claude/doc.go
+++ b/internal/cli/initialize/core/claude/doc.go
@@ -4,9 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package claude handles CLAUDE.md creation and merging during init.
+// Package claude handles CLAUDE.md creation and merging
+// during the ctx init pipeline.
 //
-// [HandleMd] creates CLAUDE.md if missing, or merges the ctx
-// section into an existing file using marker-delimited regions.
-// Force mode overwrites without merging.
+// # Overview
+//
+// CLAUDE.md is the project-level instruction file that
+// tells Claude Code how to work with a project. During
+// init, this package either creates a new CLAUDE.md
+// from a built-in template or merges the ctx section
+// into an existing file using marker-delimited regions.
+//
+// # Behavior
+//
+// [HandleMd] creates a new CLAUDE.md from the embedded
+// template, or merges the ctx-managed section into an
+// existing file using marker-delimited regions.
+//
+// # Data Flow
+//
+// When [HandleMd] is called it:
+//
+//  1. Reads the embedded CLAUDE.md template from the
+//     assets package.
+//  2. Delegates to the merge sub-package via
+//     merge.OrCreate, passing marker boundaries that
+//     delimit the ctx-managed section.
+//  3. If no CLAUDE.md exists, the template is written
+//     as a new file and a creation message is printed.
+//  4. If CLAUDE.md exists, the ctx section between
+//     markers is replaced (or inserted if absent).
+//  5. Force mode overwrites the section without
+//     prompting. Auto-merge mode skips interactive
+//     confirmation.
 package claude
diff --git a/internal/cli/initialize/core/claude_check/detail.go b/internal/cli/initialize/core/claude_check/detail.go
index ff05b752a..b7a8374ae 100644
--- a/internal/cli/initialize/core/claude_check/detail.go
+++ b/internal/cli/initialize/core/claude_check/detail.go
@@ -22,7 +22,7 @@ import (
 // Details returns a zero-value PluginDetails and ok=false
 // if either file is missing, unreadable, or doesn't mention
 // the ctx plugin. Callers should fall back to a minimal
-// confirmation message in that case — a metadata parse
+// confirmation message in that case; a metadata parse
 // failure must never break the `ctx init` tail.
 //
 // Returns:
diff --git a/internal/cli/initialize/core/claude_check/doc.go b/internal/cli/initialize/core/claude_check/doc.go
index f88128218..48865dd24 100644
--- a/internal/cli/initialize/core/claude_check/doc.go
+++ b/internal/cli/initialize/core/claude_check/doc.go
@@ -4,17 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package claude_check detects the state of Claude Code and
-// the ctx plugin so `ctx init` and `ctx setup claude-code`
-// can print stage-aware setup guidance.
+// Package claude_check detects the install state of Claude
+// Code and the ctx plugin so `ctx init` and `ctx setup
+// claude-code` can print **stage-aware** guidance instead of
+// dumping every possible setup step at once.
 //
-// The detector answers four questions, ordered:
+// The detector answers four questions in order, with each
+// negative answer short-circuiting the cascade:
 //
-//  1. Is the `claude` binary on PATH?
-//  2. Is the ctx plugin registered in
-//     ~/.claude/plugins/installed_plugins.json?
-//  3. Is the plugin enabled globally or locally?
-//  4. (derived) Is the setup ready to use?
+//  1. **Is the `claude` binary on PATH?** If not, suggest
+//     installing Claude Code.
+//  2. **Is the ctx plugin registered** in
+//     `~/.claude/plugins/installed_plugins.json`? If not,
+//     suggest `claude plugin install ...`.
+//  3. **Is the plugin enabled** globally or in the project's
+//     `.claude/settings.local.json`? If not, suggest the
+//     enable command.
+//  4. **Are MCP, hooks, and slash commands ready?** If not,
+//     suggest the missing pieces.
 //
-// Key exports: [State], [Detect].
+// # Public Surface
+//
+//   - **[State]**: the four-bool detection result plus a
+//     [PluginDetails] struct with version, install path,
+//     and registration scope.
+//   - **[Detect]**: runs the cascade, returns a [State].
+//     Pure detection: no installation, no mutation.
+//   - **[Details]**: loads rich metadata about the
+//     installed plugin (version, marketplace pin,
+//     install timestamp). Returns a zero value with
+//     `ok == false` when the plugin is not registered.
+//
+// # Concurrency
+//
+// All functions are read-only against the user's home
+// directory; concurrent calls never race. Results are
+// not cached because users frequently install /
+// uninstall mid-session and stale-cache bugs are worse
+// than the trivial re-read cost.
 package claude_check
diff --git a/internal/cli/initialize/core/claude_check/hint.go b/internal/cli/initialize/core/claude_check/hint.go
index 38938a953..565a8ae73 100644
--- a/internal/cli/initialize/core/claude_check/hint.go
+++ b/internal/cli/initialize/core/claude_check/hint.go
@@ -14,7 +14,7 @@ import (
 
 // InitHint prints stage-aware Claude Code setup guidance as
 // a post-script at the end of `ctx init`. Never writes
-// files, never errors, never fatal — it's a friendly nudge
+// files, never errors, never fatal: it's a friendly nudge
 // matching whichever step the user still needs to complete.
 //
 // State-to-output mapping:
@@ -56,7 +56,7 @@ func InitHint(cmd *cobra.Command) {
 // SetupHint prints stage-aware Claude Code setup guidance
 // as the primary output of `ctx setup claude-code`. Unlike
 // the other `ctx setup ` commands, Claude Code has no
-// writable config file ctx can emit directly — the
+// writable config file ctx can emit directly; the
 // integration is delivered via the ctx plugin installed
 // from the user's local clone.
 //
@@ -68,7 +68,7 @@ func InitHint(cmd *cobra.Command) {
 //     flow.
 //   - StatePluginInstalledNotEnabled: print the same
 //     install flow, which ends with "re-run `ctx init` to
-//     enable locally" — the action the user needs.
+//     enable locally", the action the user needs.
 //   - StatePluginReady: print the detail block + setup
 //     ready message, or the minimal variant on metadata
 //     parse failure.
diff --git a/internal/cli/initialize/core/claude_check/types.go b/internal/cli/initialize/core/claude_check/types.go
index a648352c4..8203c1668 100644
--- a/internal/cli/initialize/core/claude_check/types.go
+++ b/internal/cli/initialize/core/claude_check/types.go
@@ -69,7 +69,7 @@ type marketplaceEntry struct {
 	Source marketplaceSource `json:"source"`
 }
 
-// marketplaceSource captures the origin of a marketplace —
+// marketplaceSource captures the origin of a marketplace:
 // either a github repo or a local directory (the dev
 // flow).
 type marketplaceSource struct {
diff --git a/internal/cli/initialize/core/doc.go b/internal/cli/initialize/core/doc.go
index ae9f34564..5c63ec2af 100644
--- a/internal/cli/initialize/core/doc.go
+++ b/internal/cli/initialize/core/doc.go
@@ -4,9 +4,48 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package core provides helper functions for the initialize command.
+// Package core is the umbrella for the ctx init command's
+// business logic.
 //
-// This package contains file handling, template creation, plugin management,
-// prompt/CLAUDE.md merging, and validation logic used by the init command's
-// Run function.
+// # Overview
+//
+// The init command bootstraps a project's .context/
+// directory, creates context files from templates,
+// configures companion tools, and sets up the encrypted
+// scratchpad. This package groups the sub-packages that
+// implement each stage of the initialisation pipeline.
+//
+// # Sub-packages
+//
+//   - backup: creates timestamped .bak copies before
+//     overwriting existing files.
+//   - claude: creates or merges CLAUDE.md with the
+//     ctx-managed section.
+//   - claude_check: validates CLAUDE.md structure and
+//     renders diagnostic hints.
+//   - entry: creates context file templates (TASKS.md,
+//     DECISIONS.md, etc.) and locates insertion points
+//     in existing files.
+//   - merge: marker-delimited section merging for
+//     CLAUDE.md and prompt files.
+//   - pad: sets up the encrypted or plaintext
+//     scratchpad.
+//   - plugin: detects and enables the ctx companion
+//     plugin.
+//   - project: scaffolds project-level files like
+//     Makefile and .gitignore.
+//   - tpl: generic template deployment engine used
+//     by entry and other sub-packages.
+//   - validate: pre-flight checks (ctx in PATH,
+//     essential files present).
+//   - vscode: generates VS Code workspace files
+//     (extensions.json, tasks.json, mcp.json).
+//
+// # Data Flow
+//
+// The cmd layer's Run function orchestrates the init
+// pipeline by calling into these sub-packages in order:
+// validate, project scaffolding, entry templates, claude
+// handling, pad setup, plugin detection, and vscode
+// configuration.
 package core
diff --git a/internal/cli/initialize/core/entry/doc.go b/internal/cli/initialize/core/entry/doc.go
index f35c24fd6..4943d16ad 100644
--- a/internal/cli/initialize/core/entry/doc.go
+++ b/internal/cli/initialize/core/entry/doc.go
@@ -4,10 +4,37 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package entry handles template creation and insertion point
-// detection during initialization.
+// Package entry handles template creation and insertion
+// point detection during the ctx init pipeline.
 //
-// [FindInsertionPoint] locates where new content should be inserted
-// in an existing file. [CreateTemplates] writes context file
-// templates (TASKS.md, DECISIONS.md, etc.) to the target directory.
+// # Overview
+//
+// This package provides two capabilities needed when
+// initialising a project's context directory: deploying
+// entry template files and finding the right place to
+// insert ctx content into existing markdown files.
+//
+// # Behavior
+//
+// [CreateTemplates] deploys entry template files (TASKS.md,
+// DECISIONS.md, LEARNINGS.md, etc.) into .context/templates/.
+// [FindInsertionPoint] parses existing markdown to locate
+// the position where ctx content should be inserted, placing
+// it after a level-1 heading or at the top of the file.
+//
+// # Algorithms
+//
+// FindInsertionPoint works by parsing lines top-down:
+//
+//  1. Skips leading blank lines.
+//  2. If the first non-blank line is a level-1 heading,
+//     returns the position after the heading and any
+//     trailing blank lines.
+//  3. If the first non-blank line is a deeper heading
+//     or non-heading text, returns position 0 (insert
+//     at the top).
+//
+// CreateTemplates delegates to the tpl sub-package's
+// DeployTemplates function, passing the embedded entry
+// template list and reader functions.
 package entry
diff --git a/internal/cli/initialize/core/merge/doc.go b/internal/cli/initialize/core/merge/doc.go
index 4e2ffd1c4..9519f482b 100644
--- a/internal/cli/initialize/core/merge/doc.go
+++ b/internal/cli/initialize/core/merge/doc.go
@@ -1,14 +1,66 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package merge handles create-or-merge file operations during init.
+// Package merge implements the **create-or-merge** file
+// operations that make `ctx init` safely idempotent: each
+// foundation file is either created from a template or, if
+// already present, has only its **ctx-managed marker
+// section** updated, never the user-edited surrounding
+// content.
 //
-// [OrCreate] creates a file from template, or merges the template's
-// marked section into an existing file. [UpdateMarkedSection]
-// replaces content between start/end markers. [SettingsPermissions]
-// merges Claude Code permission settings. [Permissions] deduplicates
-// and merges allow/deny permission lists.
+// The package solves the "I edited my CONSTITUTION.md and
+// re-ran `ctx init`: did I lose my edits?" problem by
+// making "yes, you keep them" the only possible answer.
+//
+// # Public Surface
+//
+//   - **[OrCreate](path, template, vars)**: file does
+//     not exist → write the template (with `vars`
+//     interpolated). File exists → run
+//     [UpdateMarkedSection] on it. Always creates a
+//     timestamped `.bak` before writing. Returns a
+//     report indicating which path was taken.
+//   - **[UpdateMarkedSection](existing, newSection,
+//     start, end)**: finds the `start` and `end` marker
+//     lines in `existing` and replaces only the content
+//     between them. If the markers are missing, the
+//     section is inserted at the bottom of the file with
+//     the markers added so the next run becomes a true
+//     in-place update.
+//   - **[SettingsPermissions](path, allow, deny)**:
+//     specialized merger for Claude Code permission
+//     lists; preserves user-added entries while ensuring
+//     the ctx-required entries are present.
+//   - **[Permissions](existing, additions)**:
+//     deduplicating list union used by the settings
+//     merger and by `_ctx-permission-sanitize`.
+//
+// # Marker Convention
+//
+// ctx-managed sections are bracketed by HTML-comment
+// markers:
+//
+//	
+//	... ctx-managed content ...
+//	
+//
+// The markers are invisible in rendered Markdown but
+// trivially greppable. Constants for the well-known
+// pairs live in [internal/config/marker].
+//
+// # Backup Policy
+//
+// Every write goes through a timestamped backup
+// (`.bak.YYYY-MM-DD-HHMMSS`). Backups accumulate;
+// `ctx prune` cleans them on schedule. The trade-off is
+// disk space for accident recovery, which the user
+// always wants.
+//
+// # Concurrency
+//
+// Filesystem-bound and stateless; serialized through
+// process-level execution.
 package merge
diff --git a/internal/cli/initialize/core/pad/doc.go b/internal/cli/initialize/core/pad/doc.go
index 6630a32b5..a19c452c4 100644
--- a/internal/cli/initialize/core/pad/doc.go
+++ b/internal/cli/initialize/core/pad/doc.go
@@ -4,11 +4,41 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package pad provides scratchpad initialization
-// logic for ctx init.
+// Package pad provides scratchpad initialisation logic
+// for the ctx init pipeline.
 //
-// Handles both plaintext (.md) and encrypted (.enc)
-// scratchpad modes based on rc configuration.
-// Key exports: [Setup].
-// Used by cmd/root.
+// # Overview
+//
+// The scratchpad is a per-project note area that can
+// operate in plaintext or encrypted mode. During init,
+// this package sets up the appropriate backing storage
+// based on the user's runtime configuration.
+//
+// # Behavior
+//
+// [Setup] provisions the scratchpad backing storage:
+// in encrypted mode it generates a 256-bit AES key,
+// in plaintext mode it creates an empty scratchpad.md.
+//
+// # Modes
+//
+// Setup checks the rc.ScratchpadEncrypt setting and
+// branches accordingly:
+//
+// Encrypted mode (default):
+//
+//  1. If the encryption key already exists at the
+//     configured key path, skips with an info message.
+//  2. If an .enc file exists but no key is found,
+//     warns the user about the orphaned ciphertext.
+//  3. Otherwise, creates the key directory with
+//     restricted permissions, generates a 256-bit
+//     AES key via the crypto package, and saves it.
+//
+// Plaintext mode:
+//
+//  1. If scratchpad.md already exists, skips with
+//     an info message.
+//  2. Otherwise, creates an empty scratchpad.md in
+//     the .context/ directory.
 package pad
diff --git a/internal/cli/initialize/core/pad/setup.go b/internal/cli/initialize/core/pad/setup.go
index 6bd747e65..a1f0b04de 100644
--- a/internal/cli/initialize/core/pad/setup.go
+++ b/internal/cli/initialize/core/pad/setup.go
@@ -60,7 +60,10 @@ func setupPlaintext(
 func setupEncrypted(
 	cmd *cobra.Command, contextDir string,
 ) error {
-	kPath := rc.KeyPath()
+	kPath, kpErr := rc.KeyPath()
+	if kpErr != nil {
+		return kpErr
+	}
 	encPath := filepath.Join(contextDir, cfgPad.Enc)
 
 	// Check if the key already exists (idempotent)
diff --git a/internal/cli/initialize/core/plugin/doc.go b/internal/cli/initialize/core/plugin/doc.go
index 6576631ef..c36d94e44 100644
--- a/internal/cli/initialize/core/plugin/doc.go
+++ b/internal/cli/initialize/core/plugin/doc.go
@@ -1,14 +1,50 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package plugin handles Claude Code plugin detection and enablement
-// during initialization.
+// Package plugin handles **Claude Code plugin enablement**
+// during `ctx init`, the read/write side of the same
+// settings files that
+// [internal/cli/initialize/core/claude_check] only reads.
 //
-// [EnableGlobally] registers the ctx plugin in Claude Code's global
-// settings. [Installed] checks if the plugin binary exists.
-// [EnabledGlobally] and [EnabledLocally] check registration status
-// in global and project-level settings respectively.
+// Claude Code keeps two layers of plugin state:
+//
+//   - **Global**: `~/.claude/settings.json`'s
+//     `enabledPlugins` map. Affects every project on the
+//     machine.
+//   - **Local**: `/.claude/settings.local.json`'s
+//     `enabledPlugins` map. Affects only this project.
+//
+// Both can independently mark a plugin as enabled. ctx
+// prefers global enablement so users do not have to
+// re-flip the bit per project, but supports local-only
+// enablement for users who segment configs.
+//
+// # Public Surface
+//
+//   - **[Installed](pluginID)**: true when the plugin
+//     binary is registered in
+//     `~/.claude/plugins/installed_plugins.json`.
+//   - **[EnabledGlobally](pluginID)**: true when the
+//     plugin is enabled in the global settings file.
+//   - **[EnabledLocally](projectRoot, pluginID)**:
+//     true when the plugin is enabled in the project's
+//     local settings file.
+//   - **[EnableGlobally](pluginID)**: atomically merges
+//     the plugin into the global `enabledPlugins` map.
+//     Idempotent. Creates the settings file if missing.
+//
+// # Settings-File Editing Contract
+//
+// All writes are **JSON-merge-aware**: existing keys are
+// preserved, only `enabledPlugins.` is touched.
+// A pre-write backup (`.bak`) is created so a manual
+// rollback is one `mv` away.
+//
+// # Concurrency
+//
+// Filesystem-bound and stateless; serialized through
+// process-level execution.
 package plugin
diff --git a/internal/cli/initialize/core/plugin/plugin.go b/internal/cli/initialize/core/plugin/plugin.go
index 4b9b4fd63..f382c527c 100644
--- a/internal/cli/initialize/core/plugin/plugin.go
+++ b/internal/cli/initialize/core/plugin/plugin.go
@@ -160,7 +160,7 @@ func EnabledGlobally() bool {
 
 // EnableLocally enables the ctx plugin in the project-level
 // .claude/settings.local.json. Unlike EnableGlobally, this
-// does not check installed_plugins.json — it writes
+// does not check installed_plugins.json; it writes
 // unconditionally because ctx init owns the project setup.
 //
 // Parameters:
diff --git a/internal/cli/initialize/core/project/doc.go b/internal/cli/initialize/core/project/doc.go
index ea9ea4ca8..b6dd53f26 100644
--- a/internal/cli/initialize/core/project/doc.go
+++ b/internal/cli/initialize/core/project/doc.go
@@ -1,13 +1,54 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package project handles project-root directory and file creation
-// during initialization.
+// Package project handles **project-root scaffolding** during
+// `ctx init`: creating the `.context/` directory tree with
+// the right permissions and deploying optional Makefile
+// integration when the host project already uses Make.
 //
-// [CreateDirs] creates the .context/ directory tree with proper
-// permissions. [HandleMakefileCtx] deploys the Makefile.ctx
-// template if it does not already exist.
+// The package is the *filesystem layer* of init; the foundation
+// **content** comes from [internal/cli/initialize/core/merge]
+// and [internal/assets].
+//
+// # Public Surface
+//
+//   - **[CreateDirs](contextDir)**: creates the
+//     `.context/` tree:
+//   - `.context/`          (0o755)
+//   - `.context/archive/`  for archived tasks/decisions
+//   - `.context/state/`    for per-session markers,
+//     events, trace history (mode 0o755, readable by
+//     hooks)
+//   - `.context/journal/`  for enriched journal entries
+//   - `.context/memory/`   for the Claude-Code memory
+//     mirror
+//   - `.context/steering/` for steering files
+//   - `.context/hooks/`    for project-authored
+//     lifecycle scripts
+//     Idempotent: existing directories are left in place
+//     with their existing permissions.
+//   - **[HandleMakefileCtx](projectRoot)**: when a
+//     `Makefile` already exists at the project root,
+//     deploys `Makefile.ctx` from the embedded template
+//     so users can run `make ctx-status`, `make
+//     ctx-agent`, etc. Skipped when the project has no
+//     Makefile (avoids polluting non-Make projects).
+//
+// # Permissions Rationale
+//
+// The hooks directory needs `0o755` (not `0o700`) because
+// child hook scripts launched by AI tools may inherit
+// reduced privileges; making the directory world-readable
+// avoids "cannot stat" failures across user/agent
+// boundaries. State files are `0o644` for the same
+// reason.
+//
+// # Concurrency
+//
+// Filesystem-bound and stateless. Concurrent invocations
+// against the same root would race on `MkdirAll` writes;
+// in practice ctx is single-process.
 package project
diff --git a/internal/cli/initialize/core/project/getting_started.go b/internal/cli/initialize/core/project/getting_started.go
index c9466b2a7..e110df751 100644
--- a/internal/cli/initialize/core/project/getting_started.go
+++ b/internal/cli/initialize/core/project/getting_started.go
@@ -7,6 +7,8 @@
 package project
 
 import (
+	"fmt"
+
 	"github.com/spf13/cobra"
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
@@ -18,14 +20,31 @@ import (
 	"github.com/ActiveMemory/ctx/internal/write/initialize"
 )
 
-// WriteGettingStarted saves the next-steps and workflow-tips text to
-// GETTING_STARTED.md in the project root. Best-effort: failures are
-// non-fatal since the same content was already printed to stdout.
+// WriteGettingStarted saves an anatomy preamble (what `.context/`
+// is and how the project-root contract works), the activation hint,
+// next-steps, and workflow-tips text to GETTING_STARTED.md in the
+// project root. The file is the human's durable primer after
+// running `ctx init`: the preamble names the contract so future
+// readers know which directory rule is load-bearing; the activation
+// hint comes next because every subsequent `ctx `
+// requires CTX_DIR to be declared. Best-effort: failures are
+// non-fatal since the activation hint and next-steps were already
+// printed to stdout.
 //
 // Parameters:
-//   - cmd: Cobra command for status output
-func WriteGettingStarted(cmd *cobra.Command) {
-	content := desc.Text(text.DescKeyWriteInitNextStepsBlock) +
+//   - cmd:        Cobra command for status output.
+//   - contextDir: Absolute path of the just-created .context/
+//     directory, used in the activation hint.
+func WriteGettingStarted(cmd *cobra.Command, contextDir string) {
+	activateHint := fmt.Sprintf(
+		desc.Text(text.DescKeyWriteInitActivateHint),
+		contextDir,
+	)
+	content := desc.Text(text.DescKeyWriteInitAnatomyPreamble) +
+		token.NewlineLF +
+		activateHint +
+		token.NewlineLF +
+		desc.Text(text.DescKeyWriteInitNextStepsBlock) +
 		token.NewlineLF +
 		desc.Text(text.DescKeyWriteInitWorkflowTips) +
 		token.NewlineLF
diff --git a/internal/cli/initialize/core/tpl/doc.go b/internal/cli/initialize/core/tpl/doc.go
index c4cfcb043..1dafae681 100644
--- a/internal/cli/initialize/core/tpl/doc.go
+++ b/internal/cli/initialize/core/tpl/doc.go
@@ -4,9 +4,43 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package tpl handles template deployment during initialization.
+// Package tpl handles template deployment during the
+// ctx init pipeline.
 //
-// [DeployTemplates] copies embedded template files to the target
-// directory, creating subdirectories as needed. Existing files
-// are skipped unless force mode is enabled.
+// # Overview
+//
+// This package provides a generic engine for copying
+// embedded template files into the user's .context/
+// directory. It is used by the entry and other init
+// sub-packages to deploy context file templates.
+//
+// # Behavior
+//
+// [DeployTemplates] creates a target subdirectory under
+// .context/ and writes each embedded template file into
+// it, skipping files that already exist unless force mode
+// is enabled.
+//
+// # Data Flow
+//
+// When [DeployTemplates] is called it:
+//
+//  1. Creates the target subdirectory under contextDir
+//     (e.g. .context/templates/) with executable
+//     permissions.
+//  2. Calls the provided list function to enumerate
+//     all embedded template names.
+//  3. For each template, checks whether the target
+//     file already exists. If it exists and force is
+//     false, the file is skipped with a diagnostic.
+//  4. Calls the provided read function to obtain the
+//     template content.
+//  5. Writes the content to the target path using
+//     safe file I/O.
+//  6. Reports each created or skipped file via the
+//     initialize write layer.
+//
+// The list and read functions are injected as
+// parameters, making DeployTemplates reusable across
+// different template sets (entries, prompts, etc.).
 package tpl
diff --git a/internal/cli/initialize/core/validate/doc.go b/internal/cli/initialize/core/validate/doc.go
index 1bb6dc98e..ceaaee313 100644
--- a/internal/cli/initialize/core/validate/doc.go
+++ b/internal/cli/initialize/core/validate/doc.go
@@ -4,9 +4,36 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package validate runs pre-flight checks during initialization.
+// Package validate runs pre-flight checks during the
+// ctx init pipeline.
 //
-// [CheckCtxInPath] verifies the ctx binary is accessible via PATH,
-// warning the user if the installed binary will not be found by
-// hooks and scripts.
+// # Overview
+//
+// Before the init command creates or modifies context
+// files, it runs validation checks to ensure the
+// environment is properly configured. This package
+// provides those checks.
+//
+// # Behavior
+//
+// [CheckCtxInPath] uses exec.LookPath to verify the ctx
+// binary is reachable via PATH, warning if it is missing.
+// [EssentialFilesPresent] checks for TASKS.md,
+// CONSTITUTION.md, or DECISIONS.md, treating a directory
+// without them as uninitialised.
+//
+// # Behavior
+//
+// CheckCtxInPath uses exec.LookPath to search for the
+// ctx binary. If the binary is not found, it prints a
+// warning via the initialize write layer and returns
+// an error. The check can be skipped by setting the
+// CTX_SKIP_PATH_CHECK environment variable to "true".
+//
+// EssentialFilesPresent checks for the presence of any
+// file in the required files list (TASKS.md,
+// CONSTITUTION.md, DECISIONS.md). A directory that
+// contains only logs or other non-essential content is
+// considered uninitialised, allowing init to run a
+// fresh setup.
 package validate
diff --git a/internal/cli/initialize/core/vscode/doc.go b/internal/cli/initialize/core/vscode/doc.go
index a44c983cb..017d05513 100644
--- a/internal/cli/initialize/core/vscode/doc.go
+++ b/internal/cli/initialize/core/vscode/doc.go
@@ -4,19 +4,50 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package vscode generates VS Code workspace configuration files
-// during ctx init.
+// Package vscode generates VS Code workspace configuration
+// files during the ctx init pipeline.
 //
-// [WriteAll] is the entry point, invoked from the init pipeline.
-// It delegates to per-file generators that create:
+// # Overview
 //
-//   - extensions.json — recommended extensions including the ctx
-//     VS Code extension
-//   - tasks.json — shell tasks for common ctx commands (status,
-//     drift, agent)
-//   - mcp.json — MCP server registration pointing at ctx mcp serve
+// When ctx init runs, this package creates the .vscode/
+// directory and populates it with configuration files
+// that integrate VS Code with ctx. Each file generator
+// is idempotent: existing files are skipped to preserve
+// user customisations.
 //
-// Each generator skips its file if it already exists, printing a
-// diagnostic via [writeVscode.InfoExistsSkipped]. Types used for
-// JSON serialisation live in types.go.
+// # Behavior
+//
+// createVSCodeArtifacts generates extensions.json (extension
+// recommendations), tasks.json (ctx shell tasks), and mcp.json
+// (MCP server registration) inside .vscode/, skipping files
+// that already exist.
+//
+// # Generated Files
+//
+// The package creates three files in .vscode/:
+//
+//   - extensions.json: recommends the ctx VS Code
+//     extension. If the file exists and already lists
+//     the extension, it is skipped. If the file exists
+//     without the recommendation, the user is prompted
+//     to add it manually.
+//   - tasks.json: defines shell tasks for common ctx
+//     commands (status, drift, agent). Uses VS Code
+//     task schema version 2.0.0 with shared terminal
+//     panels.
+//   - mcp.json: registers the ctx MCP server so
+//     VS Code can communicate with ctx via the Model
+//     Context Protocol.
+//
+// # Internal Types
+//
+//   - [vsTask]: single task definition for tasks.json
+//   - [vsPresentation]: terminal display settings
+//   - [vsTasksFile]: top-level tasks.json structure
+//   - [vsMCPServer]: MCP server entry in mcp.json
+//   - [vsMCPFile]: top-level mcp.json structure
+//
+// Individual file errors are non-fatal and reported
+// inline, allowing the rest of the init pipeline to
+// continue.
 package vscode
diff --git a/internal/cli/initialize/doc.go b/internal/cli/initialize/doc.go
index cd10c687b..5c46c4bf5 100644
--- a/internal/cli/initialize/doc.go
+++ b/internal/cli/initialize/doc.go
@@ -4,11 +4,76 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package initialize implements the "ctx init" command for initializing a
-// .context/ directory with template files.
+// Package initialize implements **`ctx init`**, the first
+// command a user runs against a project to bootstrap the
+// `.context/` directory, scaffold the foundation files, and
+// optionally wire the Claude Code plugin and other tool
+// integrations.
 //
-// The init command creates the foundation for persistent AI context by
-// generating template files for constitution rules, tasks, decisions,
-// learnings, conventions, and architecture documentation. It also sets
-// up Claude Code integration with hooks and slash commands.
+// `ctx init` is the entry point that turns "a directory" into
+// "a ctx-managed project". Its idempotency is a hard
+// requirement: running it twice in a row must produce no
+// destructive changes, only fresh foundation files where
+// they were missing, and merge-aware updates to settings
+// files that already exist.
+//
+// # What `ctx init` Creates
+//
+// On a clean directory the command produces:
+//
+//   - **`.context/` tree**: the dir itself plus
+//     `archive/`, `state/`, `journal/`, `memory/`,
+//     `steering/`, `hooks/` subdirectories with sane
+//     permissions ([core/project]).
+//   - **Foundation files**: `CONSTITUTION.md`,
+//     `TASKS.md`, `DECISIONS.md`, `LEARNINGS.md`,
+//     `CONVENTIONS.md`, `ARCHITECTURE.md`, `GLOSSARY.md`,
+//     each from a template with the project name
+//     interpolated.
+//   - **Steering scaffold**: four foundation steering
+//     files (`product.md`, `tech.md`, `structure.md`,
+//     `workflow.md`) under `.context/steering/`.
+//   - **`Makefile.ctx`**: optional; deployed when the
+//     project has a `Makefile` so users can `make
+//     ctx-status` etc.
+//   - **Tool wiring**: Claude Code plugin enablement,
+//     Copilot instructions, VS Code tasks, MCP config,
+//     etc., depending on what the host environment has
+//     installed.
+//
+// # Sub-Packages
+//
+//   - **[cmd/root]**: the cobra command +
+//     flag wiring.
+//   - **[core/project]**: directory tree and
+//     foundation file creation.
+//   - **[core/plugin]**: Claude Code plugin
+//     detection and global enablement.
+//   - **[core/claude_check]**: stage-aware detection of
+//     Claude Code state used to print contextual
+//     guidance during init.
+//   - **[core/merge]**: create-or-merge file
+//     operations with marker-bracketed sections so
+//     re-running init never clobbers user edits.
+//   - **[core/vscode]**: `.vscode/` workspace
+//     artifacts (tasks.json, mcp.json, extensions.json).
+//
+// # Idempotency Contract
+//
+// Every action performed by init must satisfy:
+//
+//  1. **Existing files are merged, not overwritten**: the
+//     [core/merge] helpers find the marker pair, replace
+//     only the bracketed content, and leave everything
+//     else alone.
+//  2. **Permissions are deduplicated**: Claude Code
+//     `allow`/`deny` lists are merged; existing entries
+//     are preserved.
+//  3. **Templated values are stable**: the project name
+//     interpolation uses `git remote` data when
+//     available so re-running produces byte-identical
+//     output.
+//  4. **No destructive operations without an explicit
+//     `--force`**: `init` does not delete or move user
+//     files.
 package initialize
diff --git a/internal/cli/initialize/init_test.go b/internal/cli/initialize/init_test.go
index 362729699..1c1970550 100644
--- a/internal/cli/initialize/init_test.go
+++ b/internal/cli/initialize/init_test.go
@@ -32,6 +32,9 @@ func TestInitCommand(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
 	cmd.SetArgs([]string{})
@@ -76,6 +79,7 @@ func TestInitCreatesSteeringHooksSkillsDirs(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
@@ -110,6 +114,7 @@ func TestInitSkipsExistingSteeringHooksSkillsDirs(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	// Pre-create the directories with a marker file inside each.
@@ -156,6 +161,9 @@ func TestInitMergeInsertsAfterH1(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	existingContent := "# My Amazing Project\n\n" +
 		"This is the project description.\n\n" +
@@ -207,6 +215,9 @@ func TestInitMergeInsertsAtTopWhenNoH1(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	existingContent := "## Build Instructions\n\nRun make build.\n\n" +
 		"## Testing\n\nRun make test.\n"
@@ -254,6 +265,9 @@ func TestInitCreatesPermissions(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
 	cmd.SetArgs([]string{})
@@ -310,6 +324,9 @@ func TestInitMergesPermissions(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	if err = os.MkdirAll(".claude", 0750); err != nil {
 		t.Fatalf("failed to create .claude: %v", err)
@@ -378,6 +395,9 @@ func TestInitWithExistingClaudeMdWithCtxMarker(t *testing.T) {
 		t.Fatalf("failed to chdir: %v", err)
 	}
 	defer func() { _ = os.Chdir(origDir) }()
+	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
+	t.Setenv(env.SkipPathCheck, env.True)
 
 	existingContent := "# My Project\n\n" +
 		"This is my existing CLAUDE.md content.\n\n" +
@@ -441,6 +461,7 @@ func TestRunInit_Minimal(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
@@ -475,6 +496,7 @@ func TestRunInit_Force(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
@@ -507,6 +529,7 @@ func TestRunInit_Merge(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	mdContent := "# My Project\n\nExisting.\n"
@@ -543,6 +566,7 @@ func TestInitScaffoldsFoundationSteeringFiles(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
@@ -589,6 +613,7 @@ func TestInitNoSteeringInitFlagSkipsScaffold(t *testing.T) {
 	}
 	defer func() { _ = os.Chdir(origDir) }()
 	t.Setenv("HOME", tmpDir)
+	t.Setenv(env.CtxDir, filepath.Join(tmpDir, ".context"))
 	t.Setenv(env.SkipPathCheck, env.True)
 
 	cmd := Cmd()
diff --git a/internal/cli/journal/cmd/importer/doc.go b/internal/cli/journal/cmd/importer/doc.go
index aaceb4338..d0d0259cb 100644
--- a/internal/cli/journal/cmd/importer/doc.go
+++ b/internal/cli/journal/cmd/importer/doc.go
@@ -1,13 +1,49 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package importer implements the ctx journal import subcommand.
+// Package importer implements **`ctx journal import`**,
+// the subcommand that ingests raw AI session files from
+// `~/.claude/projects//` (and the equivalent paths
+// for other tools) into enriched, git-tracked journal
+// entries under `.context/journal/`.
 //
-// [Cmd] builds the cobra.Command with --all, --regenerate,
-// --dry-run, and --keep-frontmatter flags. [Run] plans the import
-// (which sessions to create, regenerate, or skip), confirms with
-// the user, and executes the plan.
+// # Public Surface
+//
+//   - **[Cmd]**: cobra command with `--all`,
+//     `--regenerate`, `--dry-run`, and
+//     `--keep-frontmatter` flags.
+//
+//   - **[Run]**: three-phase orchestration;
+//
+//     1. **Plan**: diff the source set against the
+//     journal state file ([internal/journal/state])
+//     to produce an [entity.ImportPlan]: which
+//     sources to create, regenerate, or skip.
+//     2. **Confirm**: print the plan and ask for
+//     confirmation (skipped under `--dry-run`).
+//     3. **Execute**: for each action, parse via
+//     [internal/journal/parser], reduce/collapse
+//     /normalize, write the entry, update the
+//     state file. Locked entries
+//     ([internal/cli/journal/core/lock]) are
+//     skipped with a notice.
+//
+// # `--regenerate` Semantics
+//
+// Without `--regenerate`, only sources that have not
+// been imported produce new entries. With
+// `--regenerate`, **every** source is re-imported,
+// preserving any frontmatter the user added by
+// default (`--keep-frontmatter true`). Pass
+// `--keep-frontmatter=false` to discard enrichments
+// (destructive; the importer warns explicitly).
+//
+// # Concurrency
+//
+// Sequential. Concurrent imports against the same
+// journal directory would race on state-file writes;
+// ctx is single-process.
 package importer
diff --git a/internal/cli/journal/cmd/importer/run.go b/internal/cli/journal/cmd/importer/run.go
index 6e21c0c5f..36e8459ed 100644
--- a/internal/cli/journal/cmd/importer/run.go
+++ b/internal/cli/journal/cmd/importer/run.go
@@ -98,7 +98,12 @@ func Run(cmd *cobra.Command, args []string, opts entity.ImportOpts) error {
 	}
 
 	// 4. Ensure journal directory exists.
-	journalDir := filepath.Join(rc.ContextDir(), dir.Journal)
+	ctxDir, ctxErr := rc.RequireContextDir()
+	if ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
+	journalDir := filepath.Join(ctxDir, dir.Journal)
 	if mkErr := ctxIo.SafeMkdirAll(journalDir, fs.PermExec); mkErr != nil {
 		return errFs.Mkdir(dir.Journal, mkErr)
 	}
diff --git a/internal/cli/journal/cmd/lock/doc.go b/internal/cli/journal/cmd/lock/doc.go
index bc47272d1..632c9e956 100644
--- a/internal/cli/journal/cmd/lock/doc.go
+++ b/internal/cli/journal/cmd/lock/doc.go
@@ -4,9 +4,45 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package lock implements the ctx journal lock subcommand.
+// Package lock implements the "ctx journal lock" command.
 //
-// [Cmd] builds the cobra.Command with --all flag. [Run] marks
-// journal entries as locked, preventing future import regeneration
-// from overwriting enriched content.
+// # Overview
+//
+// The lock command protects journal entries from being
+// overwritten during export regeneration. Once an entry
+// is locked, "ctx journal export --regenerate" skips it
+// regardless of other flags.
+//
+// This is useful when an entry has been manually enriched
+// with metadata, tags, or summary edits that should not
+// be lost during a bulk re-export.
+//
+// # Flags
+//
+//	--all    Lock every journal entry in the journal
+//	         directory. Without this flag, one or more
+//	         filename patterns must be provided as
+//	         positional arguments.
+//
+// # Arguments
+//
+// Positional arguments are glob patterns matched against
+// journal filenames. At least one pattern is required
+// unless --all is set.
+//
+// # Behavior
+//
+// [Cmd] builds the cobra.Command and registers the --all
+// flag. [Run] delegates to the shared lock/unlock core
+// in journal/core/lock with lock=true.
+//
+// The core logic loads .state.json from the journal
+// directory, marks matched entries as locked, and
+// persists the updated state file.
+//
+// # Output
+//
+// Each locked entry is confirmed on stdout with its
+// filename. A summary line reports the total number
+// of entries locked.
 package lock
diff --git a/internal/cli/journal/cmd/obsidian/cmd.go b/internal/cli/journal/cmd/obsidian/cmd.go
index f08eab6c7..cd9f4979d 100644
--- a/internal/cli/journal/cmd/obsidian/cmd.go
+++ b/internal/cli/journal/cmd/obsidian/cmd.go
@@ -7,21 +7,26 @@
 package obsidian
 
 import (
-	"path/filepath"
-
 	"github.com/spf13/cobra"
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 	"github.com/ActiveMemory/ctx/internal/config/embed/flag"
 	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
-	"github.com/ActiveMemory/ctx/internal/config/obsidian"
 	"github.com/ActiveMemory/ctx/internal/flagbind"
-	"github.com/ActiveMemory/ctx/internal/rc"
 )
 
 // Cmd returns the journal obsidian subcommand.
 //
+// The --output default is resolved inside [Run] against the
+// declared context directory. Computing it at construction time
+// would require rc.ContextDir() to succeed before cobra has
+// parsed the flags, which is too early under the
+// explicit-context-dir model. Leaving the default empty and
+// resolving lazily keeps the failure path clean: a missing
+// context directory surfaces as a single actionable error from
+// Run, not a silently-empty flag default.
+//
 // Returns:
 //   - *cobra.Command: Command for generating an Obsidian vault from journal
 //     entries
@@ -39,12 +44,9 @@ func Cmd() *cobra.Command {
 		},
 	}
 
-	defaultOutput := filepath.Join(
-		rc.ContextDir(), obsidian.DirName,
-	)
 	flagbind.StringFlagPDefault(
 		c, &output,
-		cFlag.Output, cFlag.ShortOutput, defaultOutput,
+		cFlag.Output, cFlag.ShortOutput, "",
 		flag.DescKeyJournalObsidianOutput,
 	)
 
diff --git a/internal/cli/journal/cmd/obsidian/doc.go b/internal/cli/journal/cmd/obsidian/doc.go
index 1c5e82f86..c78a595e2 100644
--- a/internal/cli/journal/cmd/obsidian/doc.go
+++ b/internal/cli/journal/cmd/obsidian/doc.go
@@ -1,12 +1,39 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package obsidian implements the ctx journal obsidian subcommand.
+// Package obsidian implements **`ctx journal obsidian`**,
+// the subcommand that exports the project's enriched
+// journal entries as a complete **Obsidian vault** (with
+// MOC pages, wikilinks, and graph-friendly frontmatter)
+// for users who consume the journal in Obsidian rather
+// than the zensical site.
 //
-// [Cmd] builds the cobra.Command with --output flag. [Run]
-// delegates to core/obsidian.BuildVault to generate an
-// Obsidian vault from journal entries.
+// # Public Surface
+//
+//   - **[Cmd]**: cobra command with `--output` to
+//     control the destination directory (default
+//     `vault/`).
+//   - **[Run]**: delegates to
+//     [internal/cli/journal/core/obsidian.BuildVault]
+//     which handles the full file generation pipeline
+//     (scan, transform frontmatter, convert links to
+//     `[[wikilinks]]`, build MOC pages, write
+//     `Home.md`).
+//
+// # Why a Separate Vault
+//
+// Obsidian and the zensical site both consume the same
+// raw entries but render them very differently
+// (wikilinks vs markdown links, MOC vs topic index,
+// graph view vs sidebar nav). Producing two output
+// trees from one input set keeps each rendering
+// idiomatic for its environment.
+//
+// # Concurrency
+//
+// Single-process, sequential. `O(N)` over journal
+// entries.
 package obsidian
diff --git a/internal/cli/journal/cmd/obsidian/run.go b/internal/cli/journal/cmd/obsidian/run.go
index f72b022c9..66c6f13aa 100644
--- a/internal/cli/journal/cmd/obsidian/run.go
+++ b/internal/cli/journal/cmd/obsidian/run.go
@@ -13,6 +13,7 @@ import (
 
 	coreObsidian "github.com/ActiveMemory/ctx/internal/cli/journal/core/obsidian"
 	"github.com/ActiveMemory/ctx/internal/config/dir"
+	"github.com/ActiveMemory/ctx/internal/config/obsidian"
 	"github.com/ActiveMemory/ctx/internal/rc"
 )
 
@@ -20,12 +21,21 @@ import (
 //
 // Parameters:
 //   - cmd: Cobra command for output stream
-//   - output: Output directory for the vault
+//   - output: Output directory for the vault; when empty, defaults
+//     to /
 //
 // Returns:
 //   - error: Non-nil if generation fails
 func Run(cmd *cobra.Command, output string) error {
+	ctxDir, err := rc.RequireContextDir()
+	if err != nil {
+		cmd.SilenceUsage = true
+		return err
+	}
+	if output == "" {
+		output = filepath.Join(ctxDir, obsidian.DirName)
+	}
 	return coreObsidian.BuildVault(
-		cmd, filepath.Join(rc.ContextDir(), dir.Journal), output,
+		cmd, filepath.Join(ctxDir, dir.Journal), output,
 	)
 }
diff --git a/internal/cli/journal/cmd/schema/check/doc.go b/internal/cli/journal/cmd/schema/check/doc.go
index fa4d6b020..30045e618 100644
--- a/internal/cli/journal/cmd/schema/check/doc.go
+++ b/internal/cli/journal/cmd/schema/check/doc.go
@@ -4,13 +4,45 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package check provides the journal schema check subcommand.
-//
-// It walks JSONL session files in Claude Code project directories,
-// validates each line against the embedded schema, and reports
-// unknown fields, missing required fields, unknown record types,
-// and unknown content block types. When drift is found, a
-// Markdown report is written to .context/reports/schema-drift.md.
-// When drift resolves, the report is automatically deleted.
-// Exit code 0 means clean; exit code 1 means drift detected.
+// Package check implements the "ctx journal schema check"
+// command.
+//
+// # Overview
+//
+// The check command scans JSONL session files in Claude
+// Code project directories and validates each line against
+// the embedded schema definition. It detects unknown
+// fields, missing required fields, unknown record types,
+// and unrecognized content block types.
+//
+// # Flags
+//
+//	--dir            Scan a specific directory instead of
+//	                 the default Claude project paths.
+//	--all-projects   Scan all discovered project
+//	                 directories, not just the current one.
+//	-q, --quiet      Suppress normal output; exit code
+//	                 alone indicates pass (0) or drift (1).
+//
+// # Output
+//
+// When no drift is found, prints a clean summary with
+// the number of files and lines scanned. When drift is
+// detected, prints a categorized drift summary to stdout
+// and writes a Markdown report to
+// .context/reports/schema-drift.md. When drift later
+// resolves, the report file is automatically deleted.
+//
+// In quiet mode, the command produces no output and
+// relies solely on the exit code.
+//
+// # Behavior
+//
+// [Cmd] builds the cobra.Command and registers the three
+// flags above. [Run] calls the core schema checker,
+// optionally writes the report, and formats output based
+// on drift status and the quiet flag.
+//
+// Designed for use in CI pipelines, nightly cron jobs,
+// and interactive troubleshooting.
 package check
diff --git a/internal/cli/journal/cmd/schema/check/run.go b/internal/cli/journal/cmd/schema/check/run.go
index 8a83b684a..f21e022b0 100644
--- a/internal/cli/journal/cmd/schema/check/run.go
+++ b/internal/cli/journal/cmd/schema/check/run.go
@@ -15,6 +15,7 @@ import (
 	errSchema "github.com/ActiveMemory/ctx/internal/err/schema"
 	"github.com/ActiveMemory/ctx/internal/journal/schema"
 	ctxLog "github.com/ActiveMemory/ctx/internal/log/warn"
+	"github.com/ActiveMemory/ctx/internal/rc"
 	writeSchema "github.com/ActiveMemory/ctx/internal/write/schema"
 )
 
@@ -27,6 +28,10 @@ import (
 // Returns:
 //   - error: non-nil when drift is detected or scan fails
 func Run(cmd *cobra.Command, opts coreSchema.CheckOpts) error {
+	if _, ctxErr := rc.RequireContextDir(); ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
 	c, checkErr := coreSchema.Check(opts)
 	if checkErr != nil {
 		return checkErr
diff --git a/internal/cli/journal/cmd/schema/doc.go b/internal/cli/journal/cmd/schema/doc.go
index ff1a7851e..95bfee74c 100644
--- a/internal/cli/journal/cmd/schema/doc.go
+++ b/internal/cli/journal/cmd/schema/doc.go
@@ -4,12 +4,36 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package schema provides the journal schema parent command.
-//
-// It groups the check and dump subcommands under
-// "ctx journal schema". The check subcommand scans JSONL
-// session files for format drift and writes a report. The
-// dump subcommand prints the embedded schema definition for
-// inspection. Both are designed for use in CI pipelines and
-// nightly cron jobs as well as interactive use.
+// Package schema provides the "ctx journal schema" parent
+// command.
+//
+// # Overview
+//
+// This package groups schema-related subcommands under a
+// single namespace. It does not contain business logic
+// itself; it delegates to its two children:
+//
+//   - check: scans JSONL session files for format drift
+//     and writes a Markdown report when violations are
+//     found.
+//   - dump: prints the full embedded schema definition
+//     to stdout for human inspection.
+//
+// # Usage
+//
+//	ctx journal schema check [flags]
+//	ctx journal schema dump
+//
+// # Behavior
+//
+// [Cmd] uses the parent.Cmd helper to build a
+// cobra.Command with short and long descriptions loaded
+// from embedded assets. It attaches the check and dump
+// subcommands as children. Running the parent without a
+// subcommand prints the help text.
+//
+// Both subcommands are designed for CI pipelines and
+// nightly cron jobs as well as interactive use. The check
+// subcommand returns exit code 1 on drift; the dump
+// subcommand always succeeds.
 package schema
diff --git a/internal/cli/journal/cmd/schema/dump/doc.go b/internal/cli/journal/cmd/schema/dump/doc.go
index c47ee4871..550e467b3 100644
--- a/internal/cli/journal/cmd/schema/dump/doc.go
+++ b/internal/cli/journal/cmd/schema/dump/doc.go
@@ -4,12 +4,38 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package dump provides the journal schema dump subcommand.
-//
-// It prints the embedded JSONL schema definition to stdout,
-// showing all known record types with their required and optional
-// fields, and all recognized content block types with their
-// parse status. The output is human-readable and useful for
-// understanding what the schema validator expects before
-// running a check.
+// Package dump implements the "ctx journal schema dump"
+// command.
+//
+// # Overview
+//
+// The dump command prints the embedded JSONL schema
+// definition to stdout in a human-readable format. It
+// shows the schema version, supported Claude Code version
+// range, all known record types with their required and
+// optional fields, and all recognized content block types
+// with their parse status.
+//
+// # Output Format
+//
+// The output is structured as follows:
+//
+//  1. Schema version and CC version range header.
+//  2. Record types section listing each type with its
+//     required and optional field names. Metadata-only
+//     types are shown without field lists.
+//  3. Block types section listing each content block
+//     type and whether it is "known" or "parsed".
+//
+// # Behavior
+//
+// [Cmd] builds a simple cobra.Command with no flags.
+// [Run] loads the default schema, sorts record types
+// and block types alphabetically, and writes each
+// section to the command output stream. The command
+// always returns nil.
+//
+// This is useful for understanding what the schema
+// validator expects before running a check, or for
+// documenting the current schema in external tooling.
 package dump
diff --git a/internal/cli/journal/cmd/site/cmd.go b/internal/cli/journal/cmd/site/cmd.go
index d5a81878f..045421334 100644
--- a/internal/cli/journal/cmd/site/cmd.go
+++ b/internal/cli/journal/cmd/site/cmd.go
@@ -7,21 +7,26 @@
 package site
 
 import (
-	"path/filepath"
-
 	"github.com/spf13/cobra"
 
 	"github.com/ActiveMemory/ctx/internal/assets/read/desc"
-	"github.com/ActiveMemory/ctx/internal/config/dir"
 	"github.com/ActiveMemory/ctx/internal/config/embed/cmd"
 	"github.com/ActiveMemory/ctx/internal/config/embed/flag"
 	cFlag "github.com/ActiveMemory/ctx/internal/config/flag"
 	"github.com/ActiveMemory/ctx/internal/flagbind"
-	"github.com/ActiveMemory/ctx/internal/rc"
 )
 
 // Cmd returns the journal site subcommand.
 //
+// The --output default is resolved inside [Run] against the
+// declared context directory. Computing it at construction time
+// would require rc.ContextDir() to succeed before cobra has
+// parsed the flags, which is too early under the
+// explicit-context-dir model. Leaving the default empty and
+// resolving lazily keeps the failure path clean: a missing
+// context directory surfaces as a single actionable error from
+// Run, not a silently-empty flag default.
+//
 // Returns:
 //   - *cobra.Command: Command for generating a static site from journal entries
 func Cmd() *cobra.Command {
@@ -42,10 +47,9 @@ func Cmd() *cobra.Command {
 		},
 	}
 
-	defaultOutput := filepath.Join(rc.ContextDir(), dir.JournalSite)
 	flagbind.StringFlagPDefault(
 		c, &output, cFlag.Output, cFlag.ShortOutput,
-		defaultOutput, flag.DescKeyJournalSiteOutput,
+		"", flag.DescKeyJournalSiteOutput,
 	)
 	flagbind.BoolFlag(c, &build, cFlag.Build, flag.DescKeyJournalSiteBuild)
 	flagbind.BoolFlag(c, &serve, cFlag.Serve, flag.DescKeyJournalSiteServe)
diff --git a/internal/cli/journal/cmd/site/doc.go b/internal/cli/journal/cmd/site/doc.go
index a05ad7522..ff0f49a25 100644
--- a/internal/cli/journal/cmd/site/doc.go
+++ b/internal/cli/journal/cmd/site/doc.go
@@ -1,13 +1,37 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package site implements the ctx journal site subcommand.
+// Package site implements **`ctx journal site`**, the
+// subcommand that turns the project's enriched journal
+// entries into a browsable static site, optionally
+// invoking the zensical builder to produce the HTML.
 //
-// [Cmd] builds the cobra.Command with --build and --output flags.
-// [Run] generates a static journal site: parses entries, builds
-// month-grouped pages, topic indexes, and a zensical configuration.
-// With --build, it also invokes zensical to produce HTML.
+// # Public Surface
+//
+//   - **[Cmd]**: cobra command with `--build` (also
+//     run zensical) and `--output` (override the
+//     destination directory).
+//   - **[Run]**: orchestrates the full generation;
+//     parse entries (parse), normalize each (normalize),
+//     build month-grouped pages and topic indexes
+//     (section + generate + moc), write the zensical
+//     `README.md` (generate.SiteReadme), and, when
+//     `--build` is set, shell out to `zensical build`.
+//
+// # Output Layout
+//
+//   - `/README.md`: zensical config
+//   - `/index.md`: chronological index
+//   - `/topics/index.md`: topic overview MOC
+//   - `/topics/.md`: per-topic pages
+//   - `///.md`: entries
+//
+// # Concurrency
+//
+// Single-process, sequential. The site build is
+// `O(N)` over journal entries and typically
+// completes in seconds.
 package site
diff --git a/internal/cli/journal/cmd/site/run.go b/internal/cli/journal/cmd/site/run.go
index a92c2998e..6c852768f 100644
--- a/internal/cli/journal/cmd/site/run.go
+++ b/internal/cli/journal/cmd/site/run.go
@@ -54,7 +54,15 @@ import (
 func Run(
 	cmd *cobra.Command, output string, build, serve bool,
 ) error {
-	journalDir := filepath.Join(rc.ContextDir(), dir.Journal)
+	ctxDir, ctxErr := rc.RequireContextDir()
+	if ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
+	if output == "" {
+		output = filepath.Join(ctxDir, dir.JournalSite)
+	}
+	journalDir := filepath.Join(ctxDir, dir.Journal)
 
 	// Check if the journal directory exists
 	if _, statErr := os.Stat(journalDir); os.IsNotExist(statErr) {
diff --git a/internal/cli/journal/cmd/source/doc.go b/internal/cli/journal/cmd/source/doc.go
index 3b5485542..d51b621e6 100644
--- a/internal/cli/journal/cmd/source/doc.go
+++ b/internal/cli/journal/cmd/source/doc.go
@@ -4,10 +4,49 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package source implements the ctx journal source subcommand.
+// Package source implements the "ctx journal source"
+// command.
 //
-// [Cmd] builds the cobra.Command with --limit, --project, --tool,
-// --latest, --full, and date range flags. [Run] routes to list
-// mode (tabular session overview) or show mode (detailed session
-// display) based on flags.
+// # Overview
+//
+// The source command provides two modes for working with
+// raw Claude Code JSONL session files:
+//
+//   - List mode (default): prints a tabular overview of
+//     available sessions with timestamps, slugs, and
+//     summary metadata.
+//   - Show mode: displays detailed content for a single
+//     session, identified by slug, ID, or positional arg.
+//
+// # Flags
+//
+//	-s, --show       Show a specific session by slug
+//	                      or session ID.
+//	    --latest          Show the most recent session.
+//	    --full            Include full session content in
+//	                      show mode (no truncation).
+//	-n, --limit       Maximum sessions to list
+//	                      (default from config).
+//	-p, --project  Filter by project name.
+//	-t, --tool     Filter by tool name.
+//	    --since     Include sessions on or after
+//	                      this date.
+//	    --until     Include sessions on or before
+//	                      this date.
+//	    --all-projects    Scan all project directories.
+//
+// # Behavior
+//
+// [Cmd] builds the cobra.Command and registers all flags
+// listed above. [Run] inspects the flags to determine the
+// mode: if --show, --latest, or a positional argument is
+// present, it dispatches to coreSrc.RunShow; otherwise it
+// dispatches to coreSrc.RunList.
+//
+// # Output
+//
+// List mode outputs a formatted table to stdout. Show
+// mode outputs session details including timestamps,
+// message counts, and optionally the full conversation
+// content.
 package source
diff --git a/internal/cli/journal/cmd/sync/doc.go b/internal/cli/journal/cmd/sync/doc.go
index cbe149894..486f5f883 100644
--- a/internal/cli/journal/cmd/sync/doc.go
+++ b/internal/cli/journal/cmd/sync/doc.go
@@ -4,9 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package sync implements the ctx journal sync subcommand.
+// Package sync implements the "ctx journal sync" command.
 //
-// [Cmd] builds the cobra.Command. [Run] scans journal Markdown
-// files and updates .state.json to match each file's frontmatter
-// lock status — the inverse of ctx journal lock.
+// # Overview
+//
+// The sync command reconciles journal lock state by
+// treating Markdown frontmatter as the source of truth.
+// It scans all journal Markdown files and updates
+// .state.json to match each file's frontmatter lock
+// status. This is the inverse of "ctx journal lock",
+// which writes state first and expects frontmatter to
+// follow.
+//
+// # Flags
+//
+// This command accepts no flags.
+//
+// # Behavior
+//
+// [Cmd] builds a simple cobra.Command with no flags.
+// [Run] performs the following steps:
+//
+//  1. Loads .state.json from the journal directory.
+//  2. Discovers all journal Markdown files.
+//  3. For each file, reads the frontmatter lock field
+//     and compares it to the state file entry.
+//  4. When frontmatter says locked but state says
+//     unlocked, marks the entry as locked in state.
+//  5. When frontmatter says unlocked but state says
+//     locked, clears the lock in state.
+//  6. Saves the updated .state.json.
+//
+// # Output
+//
+// Prints one line per state change (locked or unlocked)
+// with the affected filename. Ends with a summary line
+// showing total locked and unlocked counts. If no
+// journal files are found, prints a "nothing to sync"
+// message.
 package sync
diff --git a/internal/cli/journal/cmd/sync/run.go b/internal/cli/journal/cmd/sync/run.go
index c3f80acf6..b326bd76a 100644
--- a/internal/cli/journal/cmd/sync/run.go
+++ b/internal/cli/journal/cmd/sync/run.go
@@ -29,7 +29,12 @@ import (
 // Returns:
 //   - error: Non-nil on I/O failure
 func Run(cmd *cobra.Command) error {
-	journalDir := filepath.Join(rc.ContextDir(), dir.Journal)
+	ctxDir, ctxErr := rc.RequireContextDir()
+	if ctxErr != nil {
+		cmd.SilenceUsage = true
+		return ctxErr
+	}
+	journalDir := filepath.Join(ctxDir, dir.Journal)
 
 	jstate, loadErr := state.Load(journalDir)
 	if loadErr != nil {
diff --git a/internal/cli/journal/cmd/unlock/doc.go b/internal/cli/journal/cmd/unlock/doc.go
index 3ee74015f..99415ed01 100644
--- a/internal/cli/journal/cmd/unlock/doc.go
+++ b/internal/cli/journal/cmd/unlock/doc.go
@@ -4,9 +4,42 @@
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package unlock implements the ctx journal unlock subcommand.
+// Package unlock implements the "ctx journal unlock"
+// command.
 //
-// [Cmd] builds the cobra.Command with --all flag. [Run] removes
-// lock protection from journal entries, allowing future import
-// regeneration to update them.
+// # Overview
+//
+// The unlock command removes lock protection from
+// journal entries, allowing "ctx journal export
+// --regenerate" to overwrite them again. This reverses
+// the effect of "ctx journal lock".
+//
+// # Flags
+//
+//	--all    Unlock every journal entry in the journal
+//	         directory. Without this flag, one or more
+//	         filename patterns must be provided as
+//	         positional arguments.
+//
+// # Arguments
+//
+// Positional arguments are glob patterns matched against
+// journal filenames. At least one pattern is required
+// unless --all is set.
+//
+// # Behavior
+//
+// [Cmd] builds the cobra.Command and registers the --all
+// flag. [Run] delegates to the shared lock/unlock core
+// in journal/core/lock with lock=false.
+//
+// The core logic loads .state.json from the journal
+// directory, clears the locked mark on matched entries,
+// and persists the updated state file.
+//
+// # Output
+//
+// Each unlocked entry is confirmed on stdout with its
+// filename. A summary line reports the total number
+// of entries unlocked.
 package unlock
diff --git a/internal/cli/journal/core/collapse/doc.go b/internal/cli/journal/core/collapse/doc.go
index 529c7a6fd..51d5d334a 100644
--- a/internal/cli/journal/core/collapse/doc.go
+++ b/internal/cli/journal/core/collapse/doc.go
@@ -1,13 +1,51 @@
 //   /    ctx:                         https://ctx.ist
 // ,'`./    do you remember?
-// `.,'\\
+// `.,'\
 //   \    Copyright 2026-present Context contributors.
 //                 SPDX-License-Identifier: Apache-2.0
 
-// Package collapse condenses large tool output sections in journal
-// markdown.
+// Package collapse condenses **large tool-output blocks** in
+// journal markdown, the multi-thousand-line shell pastes
+// and `ls`/`grep` outputs that bloat an entry without
+// adding much signal, into expandable summaries that show
+// the first few lines and offer the rest under a
+// `
` toggle. // -// [ToolOutputs] finds tool output blocks in the content and -// replaces them with collapsed summaries, preserving the first -// few lines as context. +// The package complements [reduce]: reduce strips bona-fide +// noise (system reminders, orphan fences); collapse +// preserves output but **hides** the bulk so reviewers can +// skim and only expand the tool calls they care about. +// +// # Public Surface +// +// - **[ToolOutputs](content, opts)**: finds tool-output +// code blocks larger than a configurable line +// threshold and replaces them with a `
` +// summary block: +// +//
Tool output (NNN lines) +// +// ``` +// ...full output... +// ``` +// +//
+// +// with the first 5 lines shown above the +// collapsed block as anchor context. Threshold and +// preview line count are tunable via [opts]. +// +// # Why Not Just Truncate? +// +// Truncating loses information. The journal entry is a +// **record**; the user may need the full output later +// to reconstruct what happened. Collapsing wins on both +// fronts: the rendered page is short and skimmable, the +// raw markdown still contains every byte of the original +// output. +// +// # Concurrency +// +// Pure data transformation. Concurrent callers never +// race. package collapse diff --git a/internal/cli/journal/core/confirm/doc.go b/internal/cli/journal/core/confirm/doc.go index 669d6cfa6..3d19a2f8f 100644 --- a/internal/cli/journal/core/confirm/doc.go +++ b/internal/cli/journal/core/confirm/doc.go @@ -4,9 +4,35 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package confirm handles user confirmation prompts for import -// operations. +// Package confirm handles user confirmation prompts for +// journal import operations. // -// [Import] displays the import plan and prompts the user to -// proceed. Returns true if the user confirms, false if cancelled. +// # Overview +// +// Before the journal import command writes files to +// disk, it presents a summary of what will happen and +// asks the user to confirm. This package implements +// that interactive confirmation step. +// +// # Behavior +// +// [Import] renders the import plan summary (new, regenerated, +// skipped, locked file counts), prompts the user for +// confirmation on stdin, and returns true only when the +// response is "y" or "yes". +// +// # Data Flow +// +// When [Import] is called it: +// +// 1. Renders the import plan summary showing counts +// for new files, regenerated files, skipped files, +// and locked files via writeRecall.ImportSummary. +// 2. Prints a confirmation prompt via +// writeRecall.ConfirmPrompt. +// 3. Reads a line from stdin using a buffered reader. +// 4. Trims whitespace and lowercases the response. +// 5. Returns true if the response matches "y" or +// "yes", false otherwise. +// 6. Returns an error if reading from stdin fails. package confirm diff --git a/internal/cli/journal/core/consolidate/doc.go b/internal/cli/journal/core/consolidate/doc.go index 0c0b757dc..8d949efc3 100644 --- a/internal/cli/journal/core/consolidate/doc.go +++ b/internal/cli/journal/core/consolidate/doc.go @@ -4,9 +4,39 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package consolidate merges consecutive tool runs in journal -// markdown for cleaner reading. +// Package consolidate merges consecutive identical tool +// runs in journal markdown for cleaner reading. // -// [ToolRuns] detects adjacent tool call/result sections and -// consolidates them into grouped blocks. +// # Overview +// +// AI session transcripts often contain long sequences +// of identical tool calls (e.g. repeated file reads or +// test runs). This package detects those repetitions +// and collapses them into a single entry with a count +// annotation. +// +// # Public Surface +// +// - [ToolRuns]: collapses consecutive turns with +// identical body content. +// +// # Algorithm +// +// ToolRuns processes the content line by line: +// +// 1. Scans for turn headers matching the turn header +// regex pattern. +// 2. For each turn header, extracts the body content +// up to the next header or end of file using the +// turn.Body helper. +// 3. Counts consecutive turns that share the same +// body content. +// 4. If the count exceeds one, emits a single copy +// of the header and body followed by a count +// annotation (e.g. "repeated 5 times"). +// 5. If the count is one, preserves the original +// lines unchanged. +// +// Non-turn lines (narrative text, headings, etc.) pass +// through unchanged. package consolidate diff --git a/internal/cli/journal/core/doc.go b/internal/cli/journal/core/doc.go index ac26614ba..0d53820a8 100644 --- a/internal/cli/journal/core/doc.go +++ b/internal/cli/journal/core/doc.go @@ -4,11 +4,61 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package core contains the journal processing pipeline: import -// planning, execution, site generation, normalization, and -// Obsidian vault building. +// Package core is the umbrella for the journal command's +// processing pipeline. // -// The pipeline flows: plan → confirm → execute → normalize → -// generate site. Each stage is idempotent and tracks progress -// via .state.json. +// # Overview +// +// The journal system converts Claude Code session JSONL +// transcripts into structured markdown files with YAML +// frontmatter, then generates navigable indexes and +// optional Obsidian vault output. This package groups +// the sub-packages that implement each stage. +// +// # Pipeline Stages +// +// The journal pipeline flows through these stages: +// +// 1. plan: scans session files and builds an import +// plan of file actions (new, regenerate, skip). +// 2. confirm: presents the plan summary and prompts +// for user confirmation. +// 3. execute: writes journal entries to disk, +// preserving existing frontmatter on regeneration. +// 4. normalize: cleans up content boundaries and +// formatting. +// 5. generate: builds site indexes, maps of content, +// and navigation pages. +// +// Each stage is idempotent and tracks progress via a +// shared .state.json file. +// +// # Sub-packages +// +// - collapse: collapses verbose tool output blocks +// - confirm: user confirmation prompts +// - consolidate: merges repeated tool runs +// - execute: import plan execution +// - extract: YAML frontmatter extraction +// - format: size, slug, and link formatting +// - frontmatter: frontmatter parsing and types +// - generate: site and index generation +// - group: entry grouping by topic +// - index: journal index management +// - lock: concurrent access locking +// - moc: map of content generation +// - normalize: content boundary normalisation +// - obsidian: Obsidian vault builder +// - parse: session JSONL parsing +// - plan: import planning +// - query: journal entry querying +// - reduce: content reduction +// - schema: schema validation +// - section: section indexing +// - session: session metadata +// - slug: filename slug generation +// - source: source file listing and formatting +// - turn: turn header and body extraction +// - validate: entry validation +// - wikilink: wikilink processing package core diff --git a/internal/cli/journal/core/execute/doc.go b/internal/cli/journal/core/execute/doc.go index 9bdc42ad1..8827f6517 100644 --- a/internal/cli/journal/core/execute/doc.go +++ b/internal/cli/journal/core/execute/doc.go @@ -4,10 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package execute runs the import plan, converting session JSONL -// to journal markdown. +// Package execute runs the import plan, converting +// session JSONL to journal markdown files. // -// [Import] iterates FileActions from the plan, renders each -// session part to Markdown, preserves existing frontmatter when -// regenerating, and writes the output files. +// # Overview +// +// After the user confirms the import plan, this package +// iterates over each file action and writes the +// corresponding journal entry to disk. It handles new +// files, regenerated files (preserving enriched YAML +// frontmatter), and skipped or locked entries. +// +// # Public Surface +// +// - [Import]: writes files according to the plan +// and returns counts of imported, updated, and +// skipped entries. +// +// # Data Flow +// +// When [Import] is called it processes each FileAction: +// +// 1. Locked actions are skipped with a diagnostic +// message noting the frontmatter lock. +// 2. Skip actions are skipped with a reason message +// indicating the file already exists. +// 3. For new and regenerate actions, the session +// messages are rendered to markdown via the source +// format sub-package. +// 4. Invalid UTF-8 sequences are replaced with +// ellipsis characters. +// 5. For regenerated files, the existing YAML +// frontmatter is preserved unless the discard +// frontmatter option is set. +// 6. The rendered content is written to disk using +// safe file I/O. +// 7. The journal state is updated to mark the file +// as imported. package execute diff --git a/internal/cli/journal/core/extract/doc.go b/internal/cli/journal/core/extract/doc.go index ff71500dc..b41de560d 100644 --- a/internal/cli/journal/core/extract/doc.go +++ b/internal/cli/journal/core/extract/doc.go @@ -4,9 +4,39 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package extract pulls YAML frontmatter from journal markdown. +// Package extract pulls YAML frontmatter from journal +// markdown content. // -// [Frontmatter] returns the raw frontmatter block (including -// delimiters). [StripFrontmatter] removes the frontmatter, -// returning only the body content. +// # Overview +// +// Journal entries begin with a YAML frontmatter block +// delimited by --- lines. This package provides +// functions to extract that block or strip it from the +// content, which is needed during import regeneration +// to preserve enriched metadata. +// +// # Public Surface +// +// - [Frontmatter]: returns the raw frontmatter +// block including the --- delimiters and trailing +// newline. +// - [StripFrontmatter]: removes the frontmatter +// block and returns only the body content. +// +// # Algorithm +// +// Frontmatter works by: +// +// 1. Checking whether the content starts with the +// opening delimiter (--- followed by newline). +// 2. Searching for the closing delimiter (newline, +// ---, newline) after the opening. +// 3. Returning the substring from the start through +// the closing delimiter, or empty string if no +// valid frontmatter block is found. +// +// StripFrontmatter calls Frontmatter, then returns +// everything after the block with leading newlines +// trimmed. If no frontmatter exists, the original +// content is returned unchanged. package extract diff --git a/internal/cli/journal/core/format/doc.go b/internal/cli/journal/core/format/doc.go index cb88846c0..41f038dfc 100644 --- a/internal/cli/journal/core/format/doc.go +++ b/internal/cli/journal/core/format/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package format provides formatting helpers for journal output. +// Package format provides formatting helpers for +// journal output and site generation. // -// [Size] formats byte counts as human-readable strings. -// [KeyFileSlug] converts a file path to a URL-safe slug. -// [SessionLink] builds a Markdown link to a session page. +// # Overview +// +// This package contains utility functions for +// converting raw values into human-readable or +// URL-safe representations used throughout the +// journal pipeline. +// +// # Public Surface +// +// - [Size]: formats a byte count as a +// human-readable string (e.g. "512B", "1.5KB", +// "2.3MB"). Uses IEC units (1024-based). +// - [KeyFileSlug]: converts a file path to a +// URL-safe slug by replacing path separators +// and dots with underscores, and glob stars +// with "x". +// - [SessionLink]: builds a markdown list item +// linking to a session page with a session count +// (e.g. "- [topic](topic.md) (3 sessions)"). +// +// # Usage +// +// Size is used when reporting journal entry sizes +// during import. KeyFileSlug is used by the map of +// content generator to create safe filenames from +// file paths. SessionLink is used by the MOC builder +// to generate navigation links. package format diff --git a/internal/cli/journal/core/frontmatter/doc.go b/internal/cli/journal/core/frontmatter/doc.go index bf2536b06..8e1772372 100644 --- a/internal/cli/journal/core/frontmatter/doc.go +++ b/internal/cli/journal/core/frontmatter/doc.go @@ -1,14 +1,48 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package frontmatter handles YAML frontmatter transformation -// for journal entries and Obsidian vault generation. +// Package frontmatter handles the **YAML frontmatter +// transformations** that journal entries undergo as they +// pass through the pipeline: importer → normalizer → +// renderer (site or vault). // -// [Transform] converts raw frontmatter into a normalized format. -// [ExtractStringSlice] safely extracts []string from parsed YAML -// maps. The Obsidian struct provides the vault-specific frontmatter -// schema. +// The package owns the per-renderer adapters that map the +// canonical [entity.JournalFrontmatter] into the slightly +// different shapes each downstream renderer expects. +// +// # Public Surface +// +// - **[Transform](raw)**: converts a raw frontmatter +// map (untyped, just-parsed YAML) into the +// normalized journal frontmatter shape: enforces +// field types, fills in defaults, drops fields the +// schema does not recognize. Used by the importer +// when ingesting hand-edited entries. +// - **[ExtractStringSlice](m, key)**: safely pulls a +// `[]string` from a `map[string]any`, tolerating +// both `[]string` and `[]any` source types (YAML +// decoders produce one or the other depending on +// content). Returns nil when the key is missing. +// - **[Obsidian]**: the Obsidian-vault frontmatter +// struct: subset/extension of the canonical shape +// with additional `aliases:`, `tags:`, and graph +// metadata Obsidian renders. +// +// # Why a Separate Package +// +// Frontmatter handling looks trivial on the surface but +// is one of the most bug-prone surfaces in any markdown +// pipeline because YAML's loose typing produces +// `[]string` in some cases and `[]any` in others for +// "the same" structure. Hoisting the conversions here +// means every renderer benefits from the same +// safe-decode helpers. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never +// race. package frontmatter diff --git a/internal/cli/journal/core/generate/doc.go b/internal/cli/journal/core/generate/doc.go index 8e5569605..d7ea354c4 100644 --- a/internal/cli/journal/core/generate/doc.go +++ b/internal/cli/journal/core/generate/doc.go @@ -1,13 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package generate builds journal site pages from parsed entries. +// Package generate builds the **top-level pages** of the +// journal site from parsed entries: the README zensical +// reads at build time, the chronological index page, and the +// AI-generated summary insertion that decorates pages already +// produced upstream. // -// [SiteReadme] creates the site README with zensical configuration. -// [Index] generates the main index page with month-grouped entries. -// [InjectedSummary] inserts an AI-generated summary into existing -// page content. +// The package is the third leg of the site-building tripod +// alongside [section] (topic indexes) and [moc] (Maps of +// Content). Together they cover everything the journal site +// renders. +// +// # The Surface +// +// - **[SiteReadme](opts)**: produces the +// `site/README.md` zensical reads at build time. +// Embeds the zensical configuration block (theme, +// navigation, search settings) and the site-wide +// description. Idempotent: a call with identical +// `opts` produces byte-identical output. +// - **[Index](entries)**: produces the chronological +// index page: entries grouped by month, newest at +// the top. Output is markdown ready to land at +// `site/index.md` (or `site/journal/index.md`, +// depending on layout). +// - **[InjectedSummary](existing, summary)**: splices +// an AI-generated summary into existing page +// content **at a stable insertion point** (a +// marker comment) so re-running site generation +// does not duplicate the summary or push other +// content around. The marker pattern matches what +// `/ctx-blog` and `/ctx-blog-changelog` skills emit. +// +// # Idempotency Contract +// +// All three generators are idempotent under the same +// inputs. This is what makes `ctx journal site` safe to +// re-run during a CI build: identical entries → identical +// output → no spurious git diffs. +// +// # Concurrency +// +// All functions are pure data transformations. Concurrent +// callers never race. package generate diff --git a/internal/cli/journal/core/group/doc.go b/internal/cli/journal/core/group/doc.go index 86168ed26..a505f5b84 100644 --- a/internal/cli/journal/core/group/doc.go +++ b/internal/cli/journal/core/group/doc.go @@ -4,9 +4,39 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package group aggregates journal entries for index generation. +// Package group aggregates journal entries for index +// generation. // -// [ByMonth] groups entries by year-month for the main index. -// [GroupedIndex] builds topic or key-file aggregations sorted by -// frequency, splitting into popular and long-tail sets. +// The journal index organizes entries by time and by +// topic. This package provides the grouping logic that +// the index renderer consumes. +// +// # Temporal Grouping +// +// [ByMonth] partitions a slice of journal entries by +// their YYYY-MM date prefix. It returns a map keyed by +// month string together with a slice of month strings +// in first-seen order. The cmd layer iterates the +// ordered slice to render month headings while looking +// up entries from the map. +// +// # Topic Aggregation +// +// [GroupedIndex] builds frequency-ranked groups from +// arbitrary keys extracted via a caller-supplied +// function. For every entry the extractor may return +// one or more keys (e.g. tags, key files). The function +// counts how many entries share each key, marks groups +// that meet the popularity threshold (two or more +// sessions) as popular, and sorts the result by count +// descending then alphabetically. The cmd layer uses +// the popularity flag to split output into a "popular" +// section and a long-tail section. +// +// # Data Flow +// +// The cmd/journal layer calls [ByMonth] and +// [GroupedIndex] after loading parsed journal entries +// from disk. Results flow into template rendering in +// the write/journal package. package group diff --git a/internal/cli/journal/core/index/doc.go b/internal/cli/journal/core/index/doc.go index 5f2ea0a47..5ab755875 100644 --- a/internal/cli/journal/core/index/doc.go +++ b/internal/cli/journal/core/index/doc.go @@ -1,13 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package index provides session ID indexing for journal files. +// Package index builds the **session-ID-to-filename map** that +// every other journal subcommand uses to find a journal entry +// when given only a session ID. // -// [Session] builds a map from session IDs to filenames by -// scanning journal markdown frontmatter. [ExtractSessionID] pulls -// the session_id from a single file. [LookupSessionFile] resolves -// a session ID to its filename. +// The map matters because users (and skills) routinely refer +// to a session by its ID, a short alphanumeric tag like +// `abc123`, but on disk the journal entry filename is keyed +// by date and slug. The mapping has to be built on demand +// from the entry frontmatter; it cannot be derived from the +// filename alone. +// +// # The Surface +// +// - **[Session](dir)**: walks the journal directory, +// reads the YAML frontmatter of every `*.md` entry, +// extracts each `session_id`, and returns a +// `map[sessionID]filename`. Entries without a +// `session_id` field are silently skipped. +// - **[ExtractSessionID](path)**: reads one file and +// returns its `session_id` (empty string if not +// present, error if the file cannot be read or the +// frontmatter cannot be parsed). +// - **[LookupSessionFile](dir, sessionID)**: convenience +// wrapper: calls [Session] and returns the matching +// filename, or empty string if not found. +// +// # Performance +// +// [Session] reads the frontmatter only, not the full +// body, so the cost scales with `O(N)` files but with +// a small per-file constant. For a journal with a few +// hundred entries, the build typically completes well +// under 100 ms. Callers that need many lookups in a +// row should call [Session] once and cache the map +// rather than calling [LookupSessionFile] repeatedly. +// +// # Concurrency +// +// All functions are stateless. Concurrent callers +// against the same directory each pay the full read +// cost; no module-level cache is implemented because +// the journal directory mutates between sessions and +// stale-cache bugs are worse than the perf cost. package index diff --git a/internal/cli/journal/core/lock/doc.go b/internal/cli/journal/core/lock/doc.go index 72cfc6f3d..79701baee 100644 --- a/internal/cli/journal/core/lock/doc.go +++ b/internal/cli/journal/core/lock/doc.go @@ -1,13 +1,57 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package lock manages journal entry lock state. +// Package lock manages **journal entry lock state**: the +// `locked: true` frontmatter flag that protects an enriched +// journal entry from being clobbered by a re-import of its +// raw source session. // -// [MatchJournalFiles] finds journal files matching a pattern. -// [MultipartBase] extracts the base name from multipart filenames. -// [UpdateFrontmatter] sets or clears the locked: field in a -// file's YAML frontmatter. +// Locking is the journal pipeline's "do not touch" affordance. +// Without it, every `ctx journal import --regenerate` would +// risk overwriting the careful edits an author made to an +// enriched entry. With it, the importer sees `locked: true`, +// skips that file, and reports it in the import summary. +// +// # The Surface +// +// - **[MatchJournalFiles](dir, pattern)**: finds journal +// files matching a CLI pattern (slug, date, ID, glob). +// Used by `ctx journal lock ` and `ctx journal +// unlock ` to expand a pattern to a concrete +// list of files. Pattern semantics match what the +// user-facing CLI documents. +// - **[MultipartBase](filename)**: extracts the base +// name from a multipart filename (e.g. +// `2026-04-12-foo--part2.md` → `2026-04-12-foo`). The +// lock state for a multipart entry lives on the **base +// part**, and other parts inherit it. +// - **[UpdateFrontmatter](path, lock)**: atomic update +// of the `locked:` field in a file's YAML frontmatter. +// Adds the field if missing; removes it when `lock` is +// false (rather than writing `locked: false`, which +// would still bypass the importer's omit-default +// check). +// +// # State File Sync +// +// The lock state can also be read from +// `.context/journal/.state.json` (per +// [internal/journal/state]). Frontmatter is the source of +// truth; the state file is a denormalized index for fast +// queries from `ctx journal sync` and the importer. The +// `ctx journal sync` command (in +// [internal/cli/journal/cmd/sync]) reconciles drift in +// either direction. +// +// # Concurrency +// +// All operations are file-local and hold the file open +// only for the duration of the read+write. Concurrent +// invocations against different files never race; +// concurrent updates to the same file would race on the +// final write (no per-file locking is implemented; the +// CLI is single-process anyway). package lock diff --git a/internal/cli/journal/core/lock/lock.go b/internal/cli/journal/core/lock/lock.go index 157ac69ea..02d81e5cf 100644 --- a/internal/cli/journal/core/lock/lock.go +++ b/internal/cli/journal/core/lock/lock.go @@ -258,7 +258,12 @@ func Run( return errSession.AllWithPattern() } - journalDir := filepath.Join(rc.ContextDir(), dir.Journal) + ctxDir, ctxErr := rc.RequireContextDir() + if ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } + journalDir := filepath.Join(ctxDir, dir.Journal) jState, loadErr := state.Load(journalDir) if loadErr != nil { diff --git a/internal/cli/journal/core/lock/sync_test.go b/internal/cli/journal/core/lock/sync_test.go index de4e1e5d9..8e317b522 100644 --- a/internal/cli/journal/core/lock/sync_test.go +++ b/internal/cli/journal/core/lock/sync_test.go @@ -15,6 +15,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/journal" "github.com/ActiveMemory/ctx/internal/config/fs" "github.com/ActiveMemory/ctx/internal/journal/state" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) func TestRunSync_LocksFromFrontmatter(t *testing.T) { @@ -41,6 +42,8 @@ func TestRunSync_LocksFromFrontmatter(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -98,6 +101,8 @@ func TestRunSync_UnlocksFromFrontmatter(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -154,6 +159,8 @@ func TestRunSync_NoChanges(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -182,6 +189,8 @@ func TestRunSync_EmptyDir(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -247,6 +256,8 @@ func TestRunSync_MixedFiles(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) diff --git a/internal/cli/journal/core/lock/unlock_test.go b/internal/cli/journal/core/lock/unlock_test.go index e5e3bdbf0..19b43f208 100644 --- a/internal/cli/journal/core/lock/unlock_test.go +++ b/internal/cli/journal/core/lock/unlock_test.go @@ -16,6 +16,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/fs" "github.com/ActiveMemory/ctx/internal/config/session" "github.com/ActiveMemory/ctx/internal/journal/state" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) func TestRunLockUnlock_LockSingle(t *testing.T) { @@ -41,6 +42,8 @@ func TestRunLockUnlock_LockSingle(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + // Lock via CLI. cmd := journal.Cmd() buf := new(strings.Builder) @@ -103,6 +106,8 @@ func TestRunLockUnlock_UnlockSingle(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -154,6 +159,8 @@ func TestRunLockUnlock_LockAll(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -207,6 +214,8 @@ func TestRunLockUnlock_AlreadyLocked(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) @@ -275,6 +284,8 @@ func TestRunLockUnlock_LockMultipart(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, dir) + cmd := journal.Cmd() buf := new(strings.Builder) cmd.SetOut(buf) diff --git a/internal/cli/journal/core/moc/doc.go b/internal/cli/journal/core/moc/doc.go index eabea79ca..e15cac95c 100644 --- a/internal/cli/journal/core/moc/doc.go +++ b/internal/cli/journal/core/moc/doc.go @@ -1,14 +1,59 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package moc generates Maps of Content for journal sites and -// Obsidian vaults. +// Package moc generates **Maps of Content**, the navigational +// index pages that sit at the top of both the journal site and +// the Obsidian vault and tell a human "here are the high-level +// topics, here are the key files, here are the recent entries +// that matter most". // -// [Home] generates the main topic/key-file MOC page for the site. -// [ObsidianTopics] generates the Obsidian topics index page. -// [GenerateObsidianTopicPage] generates a single topic page with -// wikilinks to matching entries. +// "MOC" is borrowed from the personal-knowledge-management +// world (Obsidian / Linking Your Thinking) where it names the +// curated dashboard page that aggregates by topic and key +// entity rather than by chronology. +// +// # The Surface +// +// - **[Home](entries, opts)**: generates the **site +// homepage MOC**: top topics, key files, recent +// entries, all in a single page. Output is markdown +// ready to land at `site/index.md`. +// - **[ObsidianTopics](entries)**: generates the +// Obsidian-vault topics index using `[[wikilink]]` +// syntax. Lives at `vault/MOC.md`. +// - **[GenerateObsidianTopicPage](topic, entries)**: +// generates a per-topic page in Obsidian format with +// wikilinks back to each matching entry. Lives at +// `vault/topics/.md`. +// +// # Site MOC vs Obsidian MOC +// +// The two flavors share the *aggregation logic* (topic +// counts, key-file detection, recency ranking) but +// diverge in **link syntax**: +// +// - The site uses standard `[text](url.md)` markdown +// links so zensical can resolve them through its +// navigation graph. +// - Obsidian uses `[[wikilinks]]` so its native graph +// view picks them up. +// +// Each helper assembles the link in the right dialect; +// the aggregation results are reused. +// +// # Inputs +// +// All MOC generators take a slice of [entity.Entry] and +// optionally a [TopicIndex] (built by +// [internal/cli/journal/core/section.BuildTopicIndex]). +// The MOC is a *projection* of the entry set, not a +// transformation: original entries are unchanged. +// +// # Concurrency +// +// All functions are pure data transformations over +// the entry slice. Concurrent callers never race. package moc diff --git a/internal/cli/journal/core/normalize/doc.go b/internal/cli/journal/core/normalize/doc.go index f5d02c992..89dcd02ae 100644 --- a/internal/cli/journal/core/normalize/doc.go +++ b/internal/cli/journal/core/normalize/doc.go @@ -1,14 +1,63 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package normalize sanitizes journal markdown for site rendering. +// Package normalize sanitizes journal markdown for two +// downstream renderers: the **zensical site builder** and the +// **Obsidian vault exporter**. Raw enriched journal entries +// often carry constructs that one or both renderers cannot +// handle (or render confusingly): unbalanced code fences, H1 +// headings that collide with the page title, raw HTML, etc. +// This package smoothes those out without losing meaning. // -// [MatchTurnHeader] parses conversation turn headers. [FindTurnBoundary] -// locates turn boundaries in content. [TrimBlankLines] removes -// leading and trailing blank lines from a slice. The main Content -// function (not exported here) handles fence stripping, heading -// demotion, and HTML escaping. +// # The Transformations +// +// The main `Content(text, opts)` entry point performs, in +// order: +// +// 1. **Fence stripping at boundaries**: orphan opening or +// closing fences left over from incomplete code blocks +// are removed so the renderer does not enter "code +// mode" for the rest of the document. +// 2. **Heading demotion**: every H1 in the body is +// demoted to H2 so it does not collide with the +// frontmatter-derived page title rendered by both +// zensical and Obsidian. +// 3. **HTML escaping**: bare `` patterns that are +// not legitimate HTML are escaped so they do not get +// swallowed silently. +// 4. **Turn-boundary normalization**: turn headers like +// `## [12:34:56] User:` are recognized and given a +// consistent shape via [MatchTurnHeader] / +// [FindTurnBoundary] so the per-turn navigator on the +// site can find them. +// 5. **Trim**: leading and trailing blank-line runs are +// reduced to a single blank line via [TrimBlankLines]. +// +// # The Public Helpers +// +// - **[MatchTurnHeader](line)**: returns true plus the +// parsed turn role + timestamp when the line matches +// the canonical turn-header shape. +// - **[FindTurnBoundary](lines, start)**: locates the +// index of the next turn boundary at or after `start`, +// used for slicing out a specific turn. +// - **[TrimBlankLines](lines)**: strips leading and +// trailing blank entries from a `[]string`. +// +// # Idempotency +// +// Every transformation is **idempotent**: running +// `Content` twice in a row produces no further changes. +// This is what makes the package safe to call from both +// the import pipeline (writes the normalized form to disk) +// and the renderers (re-normalize what they read). +// +// # Concurrency +// +// All exported functions are pure data transformations +// over `string` / `[]string`. Concurrent callers never +// race. package normalize diff --git a/internal/cli/journal/core/obsidian/doc.go b/internal/cli/journal/core/obsidian/doc.go index 2061432d8..951b39d50 100644 --- a/internal/cli/journal/core/obsidian/doc.go +++ b/internal/cli/journal/core/obsidian/doc.go @@ -1,12 +1,38 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package obsidian builds an Obsidian vault from journal entries. +// Package obsidian builds an **Obsidian vault** from the +// project's enriched journal entries, the engine behind +// the user-facing `ctx journal obsidian` command. // -// [BuildVault] handles the full file generation pipeline: -// scan entries, create directories, transform frontmatter, -// convert links, build MOC pages, and write Home.md. +// The vault is a complete Obsidian-friendly directory +// tree: per-entry notes with vault-specific frontmatter, +// `[[wikilinks]]` instead of markdown links, MOC pages +// for navigation, and a `Home.md` landing page that +// surfaces recent entries and top topics. +// +// # Public Surface +// +// - **[BuildVault](journalDir, vaultDir, opts)**: +// end-to-end pipeline: scan entries (parse), +// create directory structure, transform +// frontmatter (frontmatter), convert links +// (wikilink), build MOC pages (moc), write +// `Home.md`. Idempotent: re-running with the +// same inputs produces byte-identical output. +// +// # Layout Produced +// +// - `/Home.md`: landing MOC +// - `/MOC.md`: topics overview +// - `/topics/.md`: per-topic pages +// - `///.md`: entries +// +// # Concurrency +// +// Single-process, sequential. `O(N)` over journal +// entries. package obsidian diff --git a/internal/cli/journal/core/parse/doc.go b/internal/cli/journal/core/parse/doc.go index 8c1a20d32..8f584ebe8 100644 --- a/internal/cli/journal/core/parse/doc.go +++ b/internal/cli/journal/core/parse/doc.go @@ -1,12 +1,41 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parse scans journal directories and parses entries. +// Package parse scans a journal directory and turns each +// markdown file into a typed [entity.JournalEntry] by +// reading and validating its YAML frontmatter, the +// upstream of every site-builder, Obsidian-exporter, MOC +// generator, and lock-state syncer in the journal pipeline. // -// [ScanJournalEntries] reads all markdown files in the journal -// directory, parsing frontmatter into JournalEntry structs. -// [JournalEntry] parses a single file by path. +// # Public Surface +// +// - **[ScanJournalEntries](dir)**: walks `dir`, +// parses every `*.md` file, and returns +// `[]*JournalEntry` plus an error slice for files +// that failed to parse. The walk continues past +// bad files so a single malformed entry does not +// abort the whole scan. +// - **[JournalEntry](path)**: parses one file by +// path. Used by single-entry callers (the +// `--show ` lookup, the lock CLI, and the +// drift checker). +// +// # Frontmatter Schema +// +// Each entry's frontmatter must satisfy the journal +// schema documented in +// `internal/entity/journal.go.JournalFrontmatter`: +// `id`, `date`, `title`, `slug`, optional `topics`, +// optional `locked`, optional `enriched`, optional +// `part` / `parts` for multipart entries. Unknown +// fields are preserved (round-trip safe). +// +// # Concurrency +// +// Stateless and filesystem-bound. Concurrent calls +// against the same directory each pay the full read +// cost. package parse diff --git a/internal/cli/journal/core/plan/doc.go b/internal/cli/journal/core/plan/doc.go index 0c6419cb3..8eeb6746c 100644 --- a/internal/cli/journal/core/plan/doc.go +++ b/internal/cli/journal/core/plan/doc.go @@ -4,10 +4,50 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package plan builds the import plan for journal import operations. +// Package plan builds the import plan for journal import +// operations. // -// [Import] scans available sessions, matches them against existing -// journal files, and produces an ImportPlan describing what to -// create, regenerate, skip, or rename. Locked entries are always -// preserved. +// When the user runs "ctx journal source", sessions must +// be matched against existing journal files to decide +// what to create, regenerate, skip, or rename. This +// package performs that planning without writing any +// files. +// +// # Import Function +// +// [Import] is the sole exported function. It accepts a +// list of sessions, the journal output directory, an +// index mapping session IDs to existing filenames, the +// journal processing state, import flags, and a flag +// indicating single-session mode. +// +// # Planning Algorithm +// +// For each session, Import: +// +// 1. Filters out empty messages (no text, tool uses, +// or tool results). +// 2. Splits long sessions into multiple parts using +// MaxMessagesPerPart from the journal config. +// 3. Resolves a title-based slug by checking the +// existing file for a frontmatter title, falling +// back to slug generation. +// 4. Detects renames when an old slug differs from +// the new one, recording a RenameOp. +// 5. Assigns each part an action: ActionNew for files +// that do not exist, ActionLocked for entries +// locked in state or frontmatter, ActionRegenerate +// when forced by flags, or ActionSkip otherwise. +// +// The result is an ImportPlan containing all FileAction +// items, RenameOps, and counters (NewCount, LockedCount, +// RegenCount, SkipCount). +// +// # Connection to Other Layers +// +// The cmd/journal package calls [Import] and then hands +// the plan to the write/journal package for execution. +// Locked entries are never overwritten; frontmatter +// locks are promoted to persistent state so future +// operations skip re-parsing. package plan diff --git a/internal/cli/journal/core/query/doc.go b/internal/cli/journal/core/query/doc.go index 34135c632..1aa9ae367 100644 --- a/internal/cli/journal/core/query/doc.go +++ b/internal/cli/journal/core/query/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package query provides session discovery for journal operations. +// Package query provides session discovery for journal +// operations. // -// [FindSessions] scans Claude Code session directories and returns -// sessions matching the current project. With allProjects=true, it -// returns all sessions regardless of project. +// Before the journal can import or list sessions, it +// needs to locate Claude Code session files on disk. +// This package wraps the lower-level session parser to +// provide project-scoped or global session discovery. +// +// # Session Discovery +// +// [FindSessions] is the sole exported function. When +// allProjects is false, it resolves the current working +// directory and delegates to parser.FindSessionsForCWD, +// returning only sessions whose project path matches. +// When allProjects is true, it calls +// parser.FindSessions to scan all known session +// directories regardless of project. +// +// The returned sessions are sorted by start time by the +// underlying parser. The cmd/journal layer passes them +// to the plan package for import planning or to the +// write layer for listing. +// +// # Error Handling +// +// If the working directory cannot be determined (e.g. +// the directory was deleted), FindSessions returns an +// fs.WorkingDirectory error. Parser-level errors from +// scanning session directories propagate unchanged. package query diff --git a/internal/cli/journal/core/reduce/doc.go b/internal/cli/journal/core/reduce/doc.go index 07b42c8df..25dfbfff1 100644 --- a/internal/cli/journal/core/reduce/doc.go +++ b/internal/cli/journal/core/reduce/doc.go @@ -1,13 +1,42 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package reduce strips formatting artifacts from session JSONL -// for clean journal markdown. +// Package reduce strips noise out of raw AI session JSONL so +// the journal markdown a user reads is the conversation, not +// the wire format. The package is the **noise-removal pass** +// the importer runs before the entry hits disk. // -// [StripFences] removes code fence markers. [StripSystemReminders] -// removes system-reminder XML tags. [CleanToolOutputJSON] simplifies -// tool output JSON for readability. +// # What Gets Reduced +// +// - **[StripFences](text)**: removes orphan code-fence +// markers left by the model when it abandoned a code +// block mid-response. Without this the renderer +// enters "code mode" for the rest of the document. +// - **[StripSystemReminders](text)**: Claude Code +// injects `` tags into tool results +// to nudge the model. The user did not write them +// and they should not appear in the journal. (See +// also [internal/parse.StripSystemReminders] which +// is the shared underlying helper.) +// - **[CleanToolOutputJSON](text)**: collapses raw +// JSON tool output into a more readable summary +// (top-level keys + first values + truncation +// notice) so a 5,000-line `ls` does not balloon +// the journal entry. The original is preserved +// under a `
` toggle for archival. +// +// # Idempotency +// +// All three functions are idempotent: running them +// twice in a row on the same input produces the same +// output as running them once. This is what makes +// them safe to run again during re-import. +// +// # Concurrency +// +// All functions are pure data transformations. +// Concurrent callers never race. package reduce diff --git a/internal/cli/journal/core/schema/check.go b/internal/cli/journal/core/schema/check.go index 51f4d1eb6..96e4c3b81 100644 --- a/internal/cli/journal/core/schema/check.go +++ b/internal/cli/journal/core/schema/check.go @@ -219,19 +219,24 @@ func SortedBlockTypes( // Returns: // - error: non-nil if the report cannot be written func WriteReport(c *schema.Collector) error { - contextDir := rc.ContextDir() - if contextDir == "" { - return nil + contextDir, ctxErr := rc.ContextDir() + if ctxErr != nil { + return ctxErr } reportsDir := filepath.Join(contextDir, dir.Reports) reportPath := filepath.Join(reportsDir, file.SchemaDrift) if !c.Drift() { - if _, statErr := os.Stat(reportPath); statErr == nil { + _, statErr := os.Stat(reportPath) + if statErr == nil { return os.Remove(reportPath) } - return nil + if os.IsNotExist(statErr) { + // No prior report on disk, nothing to clean up. + return nil + } + return statErr } mkErr := ctxIo.SafeMkdirAll(reportsDir, fs.PermExec) diff --git a/internal/cli/journal/core/schema/doc.go b/internal/cli/journal/core/schema/doc.go index 60bb65b03..4a8e3707d 100644 --- a/internal/cli/journal/core/schema/doc.go +++ b/internal/cli/journal/core/schema/doc.go @@ -1,16 +1,47 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package schema provides core logic for journal schema -// validation. -// -// It resolves which directories to scan based on CLI flags, -// runs validation across all JSONL files in those directories, -// and manages the drift report lifecycle in .context/reports/. -// Used by both the standalone check command and the import -// integration, which validates source files after importing -// sessions and prints a summary if drift is found. +// Package schema is the **CLI-side wrapper** around the +// underlying [internal/journal/schema] validator: it +// resolves which directories to scan based on user +// flags, runs validation across every JSONL session file +// it finds, and persists the resulting drift report +// under `.context/reports/`. +// +// Used by two surfaces: +// +// 1. **`ctx journal schema`**: the standalone +// drift-check command users run when investigating +// parser issues. +// 2. **`ctx journal import`**: runs validation +// **after** an import as a post-flight check; +// prints a summary if drift is found so users know +// the next Claude Code release may need a parser +// update. +// +// # Public Surface +// +// - **[Run](opts)**: orchestration; resolve scan +// paths from flags, dispatch validation per file +// via [internal/journal/schema], aggregate the +// [Report], optionally write it to +// `.context/reports/schema-drift-.md`. +// +// # The Drift Report +// +// Drift is **informational, not fatal**: a session +// with unknown fields still imports cleanly. The +// report exists so maintainers can update +// [internal/journal/parser]'s schema declarations +// when a new Claude Code release adds fields. See +// the [internal/journal/schema] doc.go for the +// upstream semantics. +// +// # Concurrency +// +// Sequential. The validation itself is fast (a few +// milliseconds per JSONL file). package schema diff --git a/internal/cli/journal/core/section/doc.go b/internal/cli/journal/core/section/doc.go index 75e8f71d0..8f8fad2a9 100644 --- a/internal/cli/journal/core/section/doc.go +++ b/internal/cli/journal/core/section/doc.go @@ -1,15 +1,53 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package section builds topic index pages and section-based site -// output for the journal. -// -// [BuildTopicIndex] aggregates entries by topic with popularity -// thresholds. [GenerateTopicsIndex] renders the topics index page. -// [GenerateTopicPage] renders a single topic's entry list. -// [WriteFormatted] and [WriteMonths] render section content into -// string builders. +// Package section builds **topic-based index pages** for the +// journal site: the page that lists every entry tagged with +// `#auth`, the page that lists every entry tagged with +// `#hooks`, and so on. It also assembles the section content +// (collated month/topic groupings) the site renderer drops +// into the navigation tree. +// +// The package is one of three site-rendering helpers; the +// other two are [moc] (Maps of Content for both the site and +// Obsidian) and [generate] (top-level page templates). +// +// # The Surface +// +// - **[BuildTopicIndex](entries, threshold)**: buckets +// entries by topic (frontmatter `topics:` list). +// Topics that appear in fewer than `threshold` entries +// are folded into a tail "other" bucket so the index +// stays readable as the journal grows. Returns a +// [TopicIndex] keyed by canonical topic slug. +// - **[GenerateTopicsIndex](idx)**: renders the +// topics-overview page: every topic name + entry +// count, sorted by popularity descending. Output is +// a `string` ready to be written to +// `site/topics/index.md`. +// - **[GenerateTopicPage](topic, entries)**: renders a +// single topic's entry list (date + title + slug +// link). Used per-topic by the site builder to +// produce `site/topics/.md`. +// - **[WriteFormatted](sb, entries)**: appends a +// formatted entry list into a `*strings.Builder`. +// - **[WriteMonths](sb, entries)**: appends entries +// grouped by year-month with month sub-headings. +// +// # Popularity Threshold +// +// The threshold is configurable via the site builder +// invocation; the default is "show topics with 3+ +// entries individually, fold the rest into 'other'". +// This is a tunable balance: too low and the topics +// page is dominated by one-off tags; too high and +// long-tail tags disappear entirely. +// +// # Concurrency +// +// All functions are pure data transformations over +// `[]Entry` / topic maps. Concurrent callers never race. package section diff --git a/internal/cli/journal/core/session/doc.go b/internal/cli/journal/core/session/doc.go index 16797da58..f7f2e9a03 100644 --- a/internal/cli/journal/core/session/doc.go +++ b/internal/cli/journal/core/session/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package session provides session statistics helpers for journal -// generation. +// Package session provides session statistics helpers +// for journal generation. // -// [CountUnique] returns the number of unique entries across all -// topics, used for site-level statistics. +// When the journal site is rendered, the index page +// shows aggregate statistics such as the total number +// of unique sessions across all topics. This package +// provides the counting logic for that purpose. +// +// # Unique Session Counting +// +// [CountUnique] is the sole exported function. It +// accepts a slice of TopicData values, each containing +// a list of journal entries. The function iterates +// every entry across all topics, collecting filenames +// into a set, and returns the set size. Because a +// single session can appear under multiple topics, the +// set-based approach avoids double-counting. +// +// The cmd/journal layer calls CountUnique after +// grouping entries by topic and passes the result to +// the write layer for rendering in the site header. +// +// # Data Flow +// +// TopicData arrives from the grouping and index +// packages. Each entry carries a Filename field that +// uniquely identifies the source session file. The +// count feeds into template data for the journal +// index page. package session diff --git a/internal/cli/journal/core/slug/doc.go b/internal/cli/journal/core/slug/doc.go index f8c4856d3..c3a7e4340 100644 --- a/internal/cli/journal/core/slug/doc.go +++ b/internal/cli/journal/core/slug/doc.go @@ -1,13 +1,47 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package slug generates URL-safe identifiers from session titles. +// Package slug generates **URL-safe, filesystem-safe +// identifiers** from session titles and handles the +// deduplication logic that keeps two sessions with the +// same title from colliding on disk. // -// [FromTitle] converts a title to a lowercase hyphenated slug. -// [CleanTitle] removes non-alphanumeric characters. [ForTitle] -// generates both a slug and cleaned title for a session, handling -// deduplication against existing slugs. +// Slugs are how journal entries are addressed throughout +// ctx: filenames are `YYYY-MM-DD-.md`, links use +// the slug as the path component, and `ctx journal source +// --show ` looks up by slug. +// +// # Public Surface +// +// - **[FromTitle](title)**: converts a title to a +// lowercase, hyphenated, alphanumeric slug. +// Strips punctuation, collapses runs of separators, +// trims leading/trailing hyphens. Idempotent. +// - **[CleanTitle](title)**: strips non-alphanumeric +// characters from a display title (kept for the +// filename's human-readable suffix when one is +// wanted in addition to the slug). +// - **[ForTitle](title, existing)**: the dedup-aware +// wrapper: produces both a slug and a cleaned +// title, appending `-2`, `-3`, etc. when the +// base slug already exists in `existing`. Used by +// the importer when two sessions share a topic +// summary. +// +// # Stability Contract +// +// The slug for a given (title, dedup-context) pair is +// **deterministic**: re-running the importer against +// the same source produces the same slug. This is +// what makes the importer idempotent and what lets +// `git diff` show a meaningful patch when an entry +// is re-enriched. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never +// race. package slug diff --git a/internal/cli/journal/core/source/doc.go b/internal/cli/journal/core/source/doc.go index 82a4dae24..0646c7a37 100644 --- a/internal/cli/journal/core/source/doc.go +++ b/internal/cli/journal/core/source/doc.go @@ -1,13 +1,41 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package source contains helpers for rendering session source -// content to journal markdown. +// Package source contains the **rendering helpers** that +// turn a parsed AI session ([entity.Session]) into the +// markdown that `ctx journal source --show ` and +// `ctx journal import` write to disk. // -// Subpackages: format (part navigation, duration, tool use -// formatting), frontmatter (heading resolution, YAML field -// writing). +// The package is split into two focused subpackages: +// +// - **[format]**: small format primitives; +// part-navigation links for multipart sessions +// ([PartNavigation]), [Duration] for human-readable +// time spans, and [ToolUse] for one-line tool-call +// summaries. +// - **[frontmatter]**: YAML frontmatter assembly; +// heading resolution from session content, field +// writing, and ordering so re-import produces +// byte-identical output. +// +// The top-level `source.go` here defines the [Opts] +// flag-bag the `ctx journal source` subcommand fills in +// (`--show`, `--latest`, `--limit`, `--full`, `--project`, +// `--since`, etc.) and used by callers that need to ask +// "which session(s) does the user mean?". +// +// # Public Surface +// +// - **[Opts]**: flag-bag for source +// selection. +// - **[format]**: see subpackage docs. +// - **[frontmatter]**: see subpackage docs. +// +// # Concurrency +// +// All helpers are pure data transformations over +// [entity.Session]. Concurrent callers never race. package source diff --git a/internal/cli/journal/core/source/format/doc.go b/internal/cli/journal/core/source/format/doc.go index 2055fdb07..96a147aa9 100644 --- a/internal/cli/journal/core/source/format/doc.go +++ b/internal/cli/journal/core/source/format/doc.go @@ -1,12 +1,45 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package format provides formatting for session source content. +// Package format provides the **fine-grained formatting +// primitives** used to render a parsed AI session into +// human-readable markdown: part navigation, duration +// strings, tool-call summaries, file references. // -// [PartNavigation] generates Previous/Next links for multipart -// sessions. [Duration] formats a time.Duration for display. -// [ToolUse] formats a tool invocation as a readable string. +// The package sits one level below +// [internal/cli/journal/core/source]: this package answers +// "how do I render *this fragment*", `source` answers +// "how do I render the whole session". +// +// # Public Surface +// +// - **[PartNavigation](currentPart, totalParts, slug)**: +// generates Previous / Next links for multipart +// sessions (sessions long enough to be split across +// several files). Returns markdown ready to splice +// into the per-part frontmatter. +// - **[Duration](d)**: formats a `time.Duration` as +// "23m 14s" / "2h 5m" / "3 days" depending on +// magnitude. Empty when zero. +// - **[ToolUse](tu)**: one-line summary of a tool +// call: tool name, key argument(s), success/error. +// Used in the per-turn header and in the +// compressed view. +// - **[ToolResult](tr)**: one-line summary of the +// tool's output, truncated to a configurable +// preview length. +// +// # Local Time vs UTC +// +// Date headers use **local time** so the user sees +// timestamps in their own timezone. UTC is reserved for +// stored timestamps (frontmatter fields) where +// timezone-stable comparisons matter. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never race. package format diff --git a/internal/cli/journal/core/source/format/format.go b/internal/cli/journal/core/source/format/format.go index d36242614..0450af19c 100644 --- a/internal/cli/journal/core/source/format/format.go +++ b/internal/cli/journal/core/source/format/format.go @@ -329,7 +329,7 @@ func JournalEntryPart( } for i, msg := range messages { - // Skip API error messages — they're retry noise. + // Skip API error messages; they're retry noise. if msg.IsApiError { sb.WriteString(tpl.RecallApiError + nl + nl) continue diff --git a/internal/cli/journal/core/source/frontmatter/doc.go b/internal/cli/journal/core/source/frontmatter/doc.go index 74c5ae737..6cd8e482a 100644 --- a/internal/cli/journal/core/source/frontmatter/doc.go +++ b/internal/cli/journal/core/source/frontmatter/doc.go @@ -4,10 +4,46 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package frontmatter handles heading and YAML field generation -// for session source content. +// Package frontmatter handles heading resolution and +// YAML field generation for journal source content. // -// [ResolveHeading] picks the best heading from title, slug, or -// base name. [WriteFmQuoted] and [WriteFmString] write individual -// YAML frontmatter fields to a string builder. +// Each journal entry file starts with a YAML +// frontmatter block containing metadata such as title, +// session ID, date, and message count. This package +// provides helpers that the source formatter uses to +// build that block. +// +// # Heading Resolution +// +// [ResolveHeading] picks the best available heading +// from three candidates in priority order: title, slug, +// then base filename. The first non-empty value wins. +// The source formatter calls this to produce the +// Markdown H1 heading that follows the frontmatter. +// +// # YAML Field Writers +// +// Three functions write individual YAML fields to a +// strings.Builder: +// +// - [WriteFmQuoted] writes a quoted string field +// using the FmQuoted template, suitable for +// values that may contain special characters. +// - [WriteFmString] writes a bare string field +// using the FmString template, for simple values +// like dates or session IDs. +// - [WriteFmInt] writes an integer field using the +// FmInt template, for numeric metadata such as +// message counts. +// +// Each writer appends a newline after the field. Write +// errors are silently discarded because the builder +// write to an in-memory buffer that does not fail. +// +// # Data Flow +// +// The source/format package calls these functions +// during journal file generation. Templates for field +// formatting live in the assets/tpl package. Newline +// and delimiter tokens come from config/token. package frontmatter diff --git a/internal/cli/journal/core/turn/doc.go b/internal/cli/journal/core/turn/doc.go index 35a253545..be2daa9d6 100644 --- a/internal/cli/journal/core/turn/doc.go +++ b/internal/cli/journal/core/turn/doc.go @@ -1,12 +1,35 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package turn handles conversation turn parsing and merging. +// Package turn handles **conversation turn parsing and +// merging** in journal markdown: finding where each +// user/assistant turn begins and ends, and merging +// adjacent turns from the same role into one block when +// the original transcript had artificial splits. // -// [Body] extracts the body text of a conversation turn starting -// from a given line index. [MergeConsecutive] combines adjacent -// turns from the same role into a single block. +// The package operates on already-normalized journal +// content (after [normalize] and friends have run); it +// is the per-turn slicer the renderers and the +// per-turn-anchor navigator both rely on. +// +// # Public Surface +// +// - **[Body](lines, startIdx)**: extracts the body +// text of a single turn starting at `startIdx`. +// Reads forward to the next turn header (or EOF) +// and returns the in-between lines. +// - **[MergeConsecutive](lines)**: collapses +// adjacent turns from the same role into a +// single combined block. Useful when Claude +// Code split a long assistant response across +// two consecutive `assistant:` turns due to +// internal pacing. +// +// # Concurrency +// +// Pure data transformation. Concurrent callers never +// race. package turn diff --git a/internal/cli/journal/core/validate/doc.go b/internal/cli/journal/core/validate/doc.go index bbf5789cc..6b726d122 100644 --- a/internal/cli/journal/core/validate/doc.go +++ b/internal/cli/journal/core/validate/doc.go @@ -4,9 +4,44 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package validate provides input validation for journal commands. +// Package validate provides input validation for +// journal commands. // -// [EmptyMessage] checks whether a session message has no -// substantive content. [ImportFlags] validates the combination -// of import flags (--all, --regenerate, positional args). +// Before the journal import pipeline begins planning +// or writing, inputs must be checked for consistency. +// This package contains the validation functions that +// the cmd/journal layer calls early in the import +// flow. +// +// # Message Validation +// +// [EmptyMessage] checks whether a session message has +// no substantive content. A message is empty when its +// Text field is blank and it carries no ToolUses or +// ToolResults. The plan package uses EmptyMessage to +// filter out empty messages before splitting sessions +// into parts, ensuring that journal files contain only +// meaningful conversation turns. +// +// # Flag Validation +// +// [ImportFlags] validates the combination of import +// flags and positional arguments. Two rules are +// enforced: +// +// - Passing a session ID together with --all is +// an error (AllWithID). +// - Using --regenerate without --all is an error +// (RegenerateRequiresAll). +// +// Both checks return typed errors from the err/session +// and err/journal packages, which the cmd layer +// renders to the user. +// +// # Data Flow +// +// The cmd/journal layer calls ImportFlags before +// calling query.FindSessions. EmptyMessage is called +// by plan.Import during action planning. No state is +// mutated; both functions are pure predicates. package validate diff --git a/internal/cli/journal/core/wikilink/doc.go b/internal/cli/journal/core/wikilink/doc.go index 97f9300f8..ed5309dbb 100644 --- a/internal/cli/journal/core/wikilink/doc.go +++ b/internal/cli/journal/core/wikilink/doc.go @@ -4,10 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package wikilink converts standard Markdown links to Obsidian -// wikilink format for vault generation. +// Package wikilink converts standard Markdown links to +// **Obsidian-style `[[wikilinks]]`** during vault export so +// Obsidian's graph view, backlinks, and unlinked-mentions +// features pick up the journal's cross-references natively. // -// [ConvertMarkdownLinks] rewrites [text](url) links to [[target|text]] -// syntax. [Format] builds a single wikilink string. [FormatEntry] -// builds a wikilink for a journal entry using its filename and title. +// The package is one of the per-renderer adapters in the +// site/vault pipeline; the site renderer keeps standard +// `[text](url.md)` markdown, the vault renderer routes +// links through here. +// +// # Public Surface +// +// - **[ConvertMarkdownLinks](text)**: rewrites every +// `[text](url.md)` in `text` to `[[target|text]]` +// (Obsidian's display-text wikilink form). +// Preserves URLs that are not journal entries (raw +// `https://...` links, anchor-only refs). +// - **[Format](target, display)**: builds a single +// wikilink string. `display` is optional; pass +// empty to get `[[target]]`. +// - **[FormatEntry](entry)**: convenience that +// produces the canonical wikilink for a journal +// entry using its slug as target and its title +// as display text. +// +// # The "Why Obsidian Form" Question +// +// Obsidian's wikilinks resolve **by note name**, not by +// path. A vault expects `[[my-note]]` regardless of where +// `my-note.md` lives in the folder hierarchy. Standard +// markdown links break the moment the vault is +// reorganized; wikilinks survive. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never +// race. package wikilink diff --git a/internal/cli/journal/doc.go b/internal/cli/journal/doc.go index 3157caa18..393becbce 100644 --- a/internal/cli/journal/doc.go +++ b/internal/cli/journal/doc.go @@ -4,18 +4,40 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package journal implements the "ctx journal" command for analyzing and -// publishing exported AI session files. +// Package journal implements the "ctx journal" command for +// analyzing and publishing exported AI session files. // -// The journal system provides two output formats from -// .context/journal/ entries: +// The journal system ingests session transcripts from +// .context/journal/, enriches them with YAML frontmatter +// metadata (topics, type, outcome, technologies, +// key_files), and publishes them in browsable formats. // -// - ctx journal site: generates a zensical-compatible static site with -// browsable session history, topic/file/type indices, and search. -// - ctx journal obsidian: generates an Obsidian vault with wikilinks, -// MOC (Map of Content) pages, and graph-optimized cross-linking. +// # Subcommands // -// Both formats reuse the same scan/parse/index infrastructure -// and consume the same enriched journal entries (YAML frontmatter -// with topics, type, outcome, technologies, key_files). +// - source: list or inspect raw journal entries +// - import: ingest exported session files into the +// journal directory +// - schema: output the journal entry JSON Schema +// - lock: mark a journal entry as finalized +// - unlock: revert a locked entry to editable state +// - sync: synchronize journal state with the source +// - site: generate a zensical-compatible static site +// with browsable history, indices, and search +// - obsidian: generate an Obsidian vault with +// wikilinks, MOC pages, and graph cross-linking +// +// Both site and obsidian formats reuse the same +// scan/parse/index infrastructure and consume identical +// enriched journal entries. +// +// # Subpackages +// +// cmd/source: entry listing and inspection +// cmd/importer: session file ingestion +// cmd/schema: JSON Schema output +// cmd/lock, cmd/unlock: entry finalization +// cmd/sync: state synchronization +// cmd/site: static site generation +// cmd/obsidian: Obsidian vault generation +// core: scan, parse, index, and enrichment logic package journal diff --git a/internal/cli/learning/cmd/reindex/cmd.go b/internal/cli/learning/cmd/reindex/cmd.go index 07c686d4a..43a8ac735 100644 --- a/internal/cli/learning/cmd/reindex/cmd.go +++ b/internal/cli/learning/cmd/reindex/cmd.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package reindex provides the "ctx learning reindex" subcommand. package reindex import ( diff --git a/internal/cli/learning/cmd/reindex/doc.go b/internal/cli/learning/cmd/reindex/doc.go index 4ffcdee48..42bc4e2b3 100644 --- a/internal/cli/learning/cmd/reindex/doc.go +++ b/internal/cli/learning/cmd/reindex/doc.go @@ -1,12 +1,43 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package reindex implements the ctx learning reindex subcommand. +// Package reindex implements the "ctx learning reindex" +// command. // -// [Cmd] builds the cobra.Command. [Run] parses all timestamped -// entry headers in LEARNINGS.md, regenerates the index table at -// the top of the file sorted by date, and writes the result. +// # Overview +// +// The reindex command regenerates the index table at the +// top of LEARNINGS.md. It parses all timestamped entry +// headers in the file, sorts them by date, rebuilds the +// index section, and writes the result back in place. +// +// This is useful after manual edits to LEARNINGS.md that +// may have left the index out of sync with the actual +// entries, or after bulk imports that appended entries +// without updating the index. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] resolves the LEARNINGS.md path from the context +// directory and delegates to the shared index.Reindex +// function, which handles parsing, sorting, and +// rewriting the file. +// +// The reindex operation is idempotent: running it +// multiple times on an already-indexed file produces +// the same output. +// +// # Output +// +// Prints a confirmation message to stdout indicating +// that the index was regenerated, along with the number +// of entries found. package reindex diff --git a/internal/cli/learning/cmd/reindex/run.go b/internal/cli/learning/cmd/reindex/run.go index d74b09c4f..717ed5303 100644 --- a/internal/cli/learning/cmd/reindex/run.go +++ b/internal/cli/learning/cmd/reindex/run.go @@ -26,7 +26,12 @@ import ( // Returns: // - error: Non-nil if the file read/write fails func Run(cmd *cobra.Command, _ []string) error { - filePath := filepath.Join(rc.ContextDir(), ctx.Learning) + ctxDir, err := rc.RequireContextDir() + if err != nil { + cmd.SilenceUsage = true + return err + } + filePath := filepath.Join(ctxDir, ctx.Learning) return index.Reindex( cmd.OutOrStdout(), filePath, diff --git a/internal/cli/learning/doc.go b/internal/cli/learning/doc.go index 701058698..364ddb895 100644 --- a/internal/cli/learning/doc.go +++ b/internal/cli/learning/doc.go @@ -4,9 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package learning implements the "ctx learning" command for managing. +// Package learning implements the **`ctx learning`** +// command group for managing `LEARNINGS.md`, currently +// just the `reindex` subcommand that regenerates the +// quick-reference index table at the top of the file. // -// Key exports: [Cmd]. -// See source files for implementation details. -// Part of the cli subsystem. +// `LEARNINGS.md` is the project's running record of +// gotchas, "gotcha" notes, and hard-won lessons. The +// quick-reference index lets `ctx agent` inject a +// token-cheap **table of contents** instead of the full +// prose, so the AI can scan available learnings and +// request the ones it needs by ID. +// +// # Subcommands +// +// - **`ctx learning reindex`**: rebuilds the index +// table by parsing every entry header in +// `LEARNINGS.md` and emitting a fresh +// chronologically-sorted table between the +// `` / `` +// markers. Idempotent. See +// [internal/cli/learning/cmd/reindex] for the +// implementation. +// +// # Adding Entries +// +// New learnings are added through `ctx add learning` +// (the `add` family lives in [internal/cli/add]); this +// package currently only owns the index-maintenance +// side. The `_ctx-learning-add` skill wraps the add +// flow with a guided prompt. +// +// # Concurrency +// +// Stateless. The CLI command runs once and exits. package learning diff --git a/internal/cli/learning/learning_test.go b/internal/cli/learning/learning_test.go index 2e0e8bac4..652bba7c4 100644 --- a/internal/cli/learning/learning_test.go +++ b/internal/cli/learning/learning_test.go @@ -14,6 +14,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/ctx" "github.com/ActiveMemory/ctx/internal/config/dir" "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) func TestCmd(t *testing.T) { @@ -64,7 +65,7 @@ func TestRunReindex_NoFile(t *testing.T) { _ = os.Chdir(tempDir) defer func() { _ = os.Chdir(origDir) }() - rc.Reset() + testctx.Declare(t, tempDir) defer rc.Reset() cmd := Cmd() @@ -82,7 +83,7 @@ func TestRunReindex_WithFile(t *testing.T) { _ = os.Chdir(tempDir) defer func() { _ = os.Chdir(origDir) }() - rc.Reset() + testctx.Declare(t, tempDir) defer rc.Reset() // Create the context directory and LEARNINGS.md file @@ -124,7 +125,7 @@ func TestRunReindex_EmptyFile(t *testing.T) { _ = os.Chdir(tempDir) defer func() { _ = os.Chdir(origDir) }() - rc.Reset() + testctx.Declare(t, tempDir) defer rc.Reset() // Create the context directory and empty LEARNINGS.md diff --git a/internal/cli/load/cmd/root/doc.go b/internal/cli/load/cmd/root/doc.go index cc0d4e354..d7a21efc4 100644 --- a/internal/cli/load/cmd/root/doc.go +++ b/internal/cli/load/cmd/root/doc.go @@ -1,12 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package root implements the ctx load command. +// Package root implements the "ctx load" command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The load command reads context files from the .context/ +// directory and outputs them in the recommended read +// order, suitable for piping into an AI assistant or +// reviewing manually. +// +// Two output modes are available: assembled (default) +// and raw. Assembled mode applies token-budget-aware +// truncation and adds section headers. Raw mode outputs +// file contents verbatim without headers or assembly. +// +// # Flags +// +// --budget Token budget for assembled output +// (default 8000, or the value from +// the project config if set). +// --raw Output raw file contents without +// headers or assembly formatting. +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers the two +// flags. If --budget is not explicitly set on the +// command line, the configured project budget from rc +// is used instead. +// +// [Run] loads the context via context/load.Do, sorts +// files by read order, and dispatches to either +// writeLoad.Raw or writeLoad.Assembled depending on +// the --raw flag. +// +// If the .context/ directory does not exist, the command +// returns a "not initialized" error prompting the user +// to run "ctx init" first. +// +// # Output +// +// In assembled mode, outputs prioritized context with +// section headers and a token summary. In raw mode, +// outputs each file's contents separated by blank lines. package root diff --git a/internal/cli/load/cmd/root/run.go b/internal/cli/load/cmd/root/run.go index 0358c3a33..3b6ecec64 100644 --- a/internal/cli/load/cmd/root/run.go +++ b/internal/cli/load/cmd/root/run.go @@ -16,6 +16,7 @@ import ( "github.com/ActiveMemory/ctx/internal/context/load" errCtx "github.com/ActiveMemory/ctx/internal/err/context" errInit "github.com/ActiveMemory/ctx/internal/err/initialize" + "github.com/ActiveMemory/ctx/internal/rc" writeLoad "github.com/ActiveMemory/ctx/internal/write/load" ) @@ -32,6 +33,10 @@ import ( // Returns: // - error: Non-nil if context loading fails or .context/ is not found func Run(cmd *cobra.Command, budget int, raw bool) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } ctx, err := load.Do("") if err != nil { if _, ok := errors.AsType[*errCtx.NotFoundError](err); ok { diff --git a/internal/cli/load/core/convert/doc.go b/internal/cli/load/core/convert/doc.go index be59b0fbb..52e047fca 100644 --- a/internal/cli/load/core/convert/doc.go +++ b/internal/cli/load/core/convert/doc.go @@ -5,7 +5,21 @@ // SPDX-License-Identifier: Apache-2.0 // Package convert transforms context file names into -// human-readable titles. Converts SCREAMING_SNAKE_CASE.md -// filenames (e.g. "AGENT_PLAYBOOK.md") into Title Case -// strings suitable for display (e.g. "Agent Playbook"). +// human-readable titles for display in agent context +// packets and CLI output. +// +// [FileNameToTitle] converts SCREAMING_SNAKE_CASE.md +// filenames into Title Case strings. The transformation +// strips the .md extension, replaces underscores with +// spaces, and capitalizes the first letter of each word +// while lowercasing the rest. For example: +// +// - "TASKS.md" -> "Tasks" +// - "AGENT_PLAYBOOK.md" -> "Agent Playbook" +// - "CONSTITUTION.md" -> "Constitution" +// +// This is used by the context load pipeline to produce +// section headers in the agent context packet, making +// machine-named files readable without requiring a +// separate display-name mapping. package convert diff --git a/internal/cli/load/core/doc.go b/internal/cli/load/core/doc.go index b12c2b7c4..0d0b9a08c 100644 --- a/internal/cli/load/core/doc.go +++ b/internal/cli/load/core/doc.go @@ -1,12 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package core provides shared helpers for the load command: file. +// Package core provides shared helpers for the load +// command. // -// Shared helpers used by sibling cmd/ packages. -// Exports: [FileNameToTitle], [SortByReadOrder]. -// See package source files for implementation details. +// The "ctx load" command reads context files from the +// project directory and presents them to the user or +// to an AI agent. This core package holds the business +// logic that the cmd/load layer delegates to. +// +// # File Name Conversion +// +// The convert sub-package exports [convert.FileNameToTitle], +// which transforms SCREAMING_SNAKE_CASE markdown +// filenames into Title Case strings for display. It +// strips the .md extension, replaces underscores with +// spaces, and capitalizes each word. For example, +// "AGENT_PLAYBOOK.md" becomes "Agent Playbook". +// +// # Read-Order Sorting +// +// The sort sub-package exports [sort.ByReadOrder], +// which arranges context files according to a +// predefined priority list (ctx.ReadOrder). Files not +// in the list receive a fallback priority and appear +// at the end. The function returns a new sorted slice +// without modifying the original. +// +// # Data Flow +// +// The cmd/load layer discovers context files on disk, +// calls ByReadOrder to arrange them, and uses +// FileNameToTitle to generate section headings. The +// sorted, titled output is then rendered by the +// write/load package. package core diff --git a/internal/cli/load/core/sort/doc.go b/internal/cli/load/core/sort/doc.go index e84ba70e6..b21b08f83 100644 --- a/internal/cli/load/core/sort/doc.go +++ b/internal/cli/load/core/sort/doc.go @@ -5,7 +5,24 @@ // SPDX-License-Identifier: Apache-2.0 // Package sort orders context files by their configured -// read priority. Files not in the configured read-order -// list are assigned a fallback priority and appear at the -// end of the sorted output. +// read priority for consistent agent context packets. +// +// [ByReadOrder] sorts a slice of [entity.FileInfo] according +// to the [ctx.ReadOrder] configuration. Each file is +// assigned a priority based on its position in the read +// order list: CONSTITUTION.md first, then TASKS.md, and so +// on. Files not in the list receive a fallback priority +// equal to len(ReadOrder), placing them at the end. +// +// The function returns a new sorted slice without modifying +// the original, so callers can sort a working copy while +// preserving the original file order for other uses. +// +// # Why Read Order Matters +// +// The agent context packet presents files in a specific +// sequence so that the most important context (constitution, +// active tasks) appears first. This is critical when the +// agent's context budget is limited: higher-priority files +// are included before the budget is exhausted. package sort diff --git a/internal/cli/load/doc.go b/internal/cli/load/doc.go index 82628fb1b..44e7ae7d9 100644 --- a/internal/cli/load/doc.go +++ b/internal/cli/load/doc.go @@ -4,11 +4,13 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package load provides the command for outputting assembled context. +// Package load provides the command for outputting +// assembled context. // -// The load command assembles context files from .context/ and outputs them -// in the recommended read order, suitable for providing to an AI assistant. -// This is the primary mechanism for giving AI tools access to project context. +// The load command assembles context files from .context/ +// and outputs them in the recommended read order, suitable +// for providing to an AI assistant. This is the primary +// mechanism for giving AI tools access to project context. // // # Assembly Order // @@ -25,14 +27,21 @@ // // # Token Budget // -// The --budget flag limits output to approximately the specified token count. -// This is useful for AI assistants with context window limitations. Files are -// prioritized by importance, with lower-priority files truncated or omitted -// when budget constraints are reached. +// The --budget flag limits output to approximately the +// specified token count. This is useful for AI assistants +// with context window limitations. Files are prioritized +// by importance, with lower-priority files truncated or +// omitted when budget constraints are reached. // // # Raw Output // -// The --raw flag outputs file contents directly without assembly headers or -// priority-based ordering. This is useful for debugging or when exact file -// contents are needed. +// The --raw flag outputs file contents directly without +// assembly headers or priority-based ordering. This is +// useful for debugging or when exact file contents are +// needed. +// +// # Subpackages +// +// - cmd/root: cobra command definition and flag binding +// - core: assembly, ordering, and budget logic package load diff --git a/internal/cli/load/load_test.go b/internal/cli/load/load_test.go index 0880cebc2..597214844 100644 --- a/internal/cli/load/load_test.go +++ b/internal/cli/load/load_test.go @@ -11,6 +11,7 @@ import ( "testing" "github.com/ActiveMemory/ctx/internal/cli/initialize" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) // TestLoadCommand tests the load command. @@ -27,6 +28,8 @@ func TestLoadCommand(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, tmpDir) + // First init initCmd := initialize.Cmd() initCmd.SetArgs([]string{}) @@ -57,6 +60,8 @@ func TestLoadRawOutput(t *testing.T) { } defer func() { _ = os.Chdir(origDir) }() + testctx.Declare(t, tmpDir) + // First init initCmd := initialize.Cmd() initCmd.SetArgs([]string{}) diff --git a/internal/cli/loop/cmd/root/doc.go b/internal/cli/loop/cmd/root/doc.go index 129fa6d18..d3c6dcf0e 100644 --- a/internal/cli/loop/cmd/root/doc.go +++ b/internal/cli/loop/cmd/root/doc.go @@ -1,12 +1,51 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package root implements the ctx loop command. +// Package root implements the "ctx loop" command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The loop command generates a shell script that runs an +// AI assistant in a repeated loop until a completion +// signal is detected. This enables iterative development +// where the AI builds on its previous work across +// multiple invocations. +// +// The generated script is written to a file (default +// "loop.sh") with executable permissions and includes +// usage instructions printed to stdout. +// +// # Flags +// +// -p, --prompt Prompt file for the AI +// (default ".context/loop.md"). +// -t, --tool AI tool: claude, aider, +// or generic (default "claude"). +// -n, --max-iterations Maximum loop iterations; +// 0 means unlimited (default 0). +// -c, --completion Completion signal string +// (default "SYSTEM_CONVERGED"). +// -o, --output Output script filename +// (default "loop.sh"). +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers all five +// flags with their defaults. [Run] validates the tool +// selection against a known-good set, generates the +// script via core/script.Generate, writes it with +// executable permissions, and prints usage instructions. +// +// If the tool name is not recognized, the command returns +// an "invalid tool" error listing valid options. +// +// # Output +// +// Prints the output filename, the tool being used, the +// prompt file path, iteration limit, and completion +// signal. The generated script itself is written to +// the output file, not to stdout. package root diff --git a/internal/cli/loop/core/doc.go b/internal/cli/loop/core/doc.go index 55fa005d8..da7351d4a 100644 --- a/internal/cli/loop/core/doc.go +++ b/internal/cli/loop/core/doc.go @@ -1,12 +1,44 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 // Package core provides loop script generation logic. // -// Key exports: [GenerateLoopScript]. -// Shared helpers used by sibling cmd/ packages. -// Used by core cmd/ packages. +// The "ctx loop" command creates a bash script that +// runs an AI tool repeatedly with the same prompt file +// until a completion signal appears in the output. +// This core package holds the script generation +// business logic. +// +// # Script Generation +// +// The script sub-package exports [script.Generate], +// which builds a complete bash script string. The +// function accepts four parameters: the prompt file +// path, the AI tool name, a maximum iteration count, +// and a completion message string. +// +// The tool parameter selects the AI command template: +// +// - "claude": runs Claude Code with the prompt +// file via the LoopCmdClaude template. +// - "aider": runs Aider with the prompt file +// via the LoopCmdAider template. +// - "generic": runs a generic command via the +// LoopCmdGeneric template. +// +// When maxIterations is greater than zero, the script +// includes an iteration-limit guard that stops after +// the specified number of runs and sends a +// notification. The script monitors output for the +// completion message and exits cleanly when detected. +// +// # Data Flow +// +// The cmd/loop layer validates user inputs and calls +// script.Generate. The resulting script string is +// written to stdout or a file for the user to execute +// in their shell. package core diff --git a/internal/cli/loop/core/script/doc.go b/internal/cli/loop/core/script/doc.go index 4623c4587..ae4aeecf0 100644 --- a/internal/cli/loop/core/script/doc.go +++ b/internal/cli/loop/core/script/doc.go @@ -5,7 +5,35 @@ // SPDX-License-Identifier: Apache-2.0 // Package script generates bash scripts for running AI tool -// iteration loops. Supports Claude, Aider, and generic tool -// configurations with configurable iteration limits and -// completion detection signals. +// iteration loops. The generated script repeatedly invokes +// an AI tool with a prompt file until a completion signal is +// detected in the output or a maximum iteration count is +// reached. +// +// # Supported Tools +// +// [Generate] accepts a tool identifier that selects the +// invocation command template: +// +// - "claude" (default): runs Claude Code in headless +// mode with the prompt file piped as input. +// - "aider": runs the Aider CLI with the prompt file +// as the message argument. +// - "generic": runs a shell command that reads the +// prompt file, suitable for custom tool wrappers. +// +// # Iteration Control +// +// The maxIterations parameter caps the loop. When set to +// zero, the loop runs indefinitely until the completion +// signal appears. Each iteration checks the tool's output +// for the completion message string; a match exits the +// loop cleanly with a desktop notification. +// +// # Path Handling +// +// The prompt file path is resolved to an absolute path +// via filepath.Abs before being embedded in the script, +// so the generated script works regardless of the working +// directory at execution time. package script diff --git a/internal/cli/loop/doc.go b/internal/cli/loop/doc.go index 17cc7ea89..844471254 100644 --- a/internal/cli/loop/doc.go +++ b/internal/cli/loop/doc.go @@ -4,12 +4,14 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package loop provides the command for generating Ralph loop scripts. +// Package loop provides the command for generating Ralph +// loop scripts. // -// A Ralph loop is an iterative development technique where an AI assistant -// runs repeatedly with the same prompt until a completion signal is detected. -// This enables autonomous development where the AI builds on its previous work -// across multiple iterations. +// A Ralph loop is an iterative development technique where +// an AI assistant runs repeatedly with the same prompt +// until a completion signal is detected. This enables +// autonomous development where the AI builds on its +// previous work across multiple iterations. // // # How It Works // @@ -18,11 +20,13 @@ // 1. Reads the prompt file (default: .context/loop.md) // 2. Runs the AI tool with the prompt // 3. Checks output for a completion signal -// 4. Repeats until signal is detected or max iterations reached +// 4. Repeats until signal is detected or max iterations +// reached // // # Supported Tools // -// The loop command generates scripts for different AI tools: +// The loop command generates scripts for different AI +// tools: // // - claude: Claude Code CLI (default) // - aider: Aider AI pair programming tool @@ -30,8 +34,14 @@ // // # Completion Signal // -// The completion signal (default: "SYSTEM_CONVERGED") indicates the AI has -// finished its work. The AI should output this signal when it determines -// that the task is complete. The loop script watches for this signal and +// The completion signal (default: "SYSTEM_CONVERGED") +// indicates the AI has finished its work. The AI should +// output this signal when it determines that the task is +// complete. The loop script watches for this signal and // exits when detected. +// +// # Subpackages +// +// - cmd/root: cobra command definition and flag binding +// - core: script generation and template rendering package loop diff --git a/internal/cli/mcp/cmd/root/cmd.go b/internal/cli/mcp/cmd/root/cmd.go index 2a88f75fb..5ac910b29 100644 --- a/internal/cli/mcp/cmd/root/cmd.go +++ b/internal/cli/mcp/cmd/root/cmd.go @@ -22,6 +22,11 @@ import ( // Returns: // - error: Non-nil if the server fails to start or encounters an I/O error func Cmd(cmd *cobra.Command, _ []string) error { - srv := internalMcp.New(rc.ContextDir(), cmd.Root().Version) + ctxDir, err := rc.RequireContextDir() + if err != nil { + cmd.SilenceUsage = true + return err + } + srv := internalMcp.New(ctxDir, cmd.Root().Version) return srv.Serve() } diff --git a/internal/cli/mcp/cmd/root/cmd_test.go b/internal/cli/mcp/cmd/root/cmd_test.go new file mode 100644 index 000000000..1c7e698dd --- /dev/null +++ b/internal/cli/mcp/cmd/root/cmd_test.go @@ -0,0 +1,35 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package root + +import ( + "testing" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/env" + "github.com/ActiveMemory/ctx/internal/rc" +) + +// TestMcpServe_FailsClosedOnUnsetCTXDIR is the regression guard +// required by spec/single-source-context-anchor.md. The MCP serve +// path must route through rc.RequireContextDir; with CTX_DIR +// unset, the cobra Run should return an error rather than starting +// a server bound to an empty path. +func TestMcpServe_FailsClosedOnUnsetCTXDIR(t *testing.T) { + t.Setenv(env.CtxDir, "") + rc.Reset() + t.Cleanup(rc.Reset) + + c := &cobra.Command{Use: "serve"} + c.SetArgs(nil) + + err := Cmd(c, nil) + if err == nil { + t.Fatal("Cmd() err = nil, want non-nil when CTX_DIR is unset") + } +} diff --git a/internal/cli/mcp/cmd/root/doc.go b/internal/cli/mcp/cmd/root/doc.go index 962b0e35d..788db345c 100644 --- a/internal/cli/mcp/cmd/root/doc.go +++ b/internal/cli/mcp/cmd/root/doc.go @@ -1,12 +1,44 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package root provides the run function for the ctx MCP server command. +// Package root implements the "ctx mcp" command. // -// Key exports: [Cmd]. -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Registered by the cmd parent command. +// # Overview +// +// The mcp command starts a Model Context Protocol (MCP) +// server that communicates over stdin/stdout using +// JSON-RPC 2.0. It exposes ctx's context management +// capabilities as MCP tools, allowing AI assistants to +// read and manipulate project context programmatically. +// +// This command is typically invoked by an MCP client +// (such as Claude Code) rather than run directly by the +// user. The client launches "ctx mcp" as a subprocess +// and communicates via the standard streams. +// +// # Flags +// +// This command accepts no flags. It reads the context +// directory from rc and the version from the root +// cobra.Command. +// +// # Behavior +// +// [Cmd] creates a new MCP server instance using the +// resolved context directory and the CLI version string, +// then calls srv.Serve which blocks until the client +// disconnects or an I/O error occurs. +// +// The server registers tools for reading context files, +// querying project state, and other context operations +// defined in the internal/mcp/server package. +// +// # Output +// +// All communication happens over stdin/stdout in +// JSON-RPC 2.0 format. No human-readable output is +// produced on stderr under normal operation. package root diff --git a/internal/cli/mcp/doc.go b/internal/cli/mcp/doc.go index 0cdcc46fa..c435daace 100644 --- a/internal/cli/mcp/doc.go +++ b/internal/cli/mcp/doc.go @@ -4,9 +4,21 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mcp provides the "ctx mcp" CLI command for starting the. +// Package mcp provides the "ctx mcp" CLI command for +// starting the Model Context Protocol server. // -// Key exports: [Cmd]. -// See source files for implementation details. -// Part of the cli subsystem. +// The MCP server exposes ctx context operations as MCP +// tools that AI coding assistants can invoke over stdio +// transport. This allows tools like Claude Code, Cursor, +// and other MCP-aware clients to read, write, and query +// project context without shelling out to the ctx CLI. +// +// # Subpackages +// +// cmd/root: MCP server bootstrap, tool registration, +// and stdio transport setup. Starts the MCP server on +// stdio, registering tool handlers for context +// operations. The command annotates itself with SkipInit +// so it can run without a fully initialized .context/ +// directory. package mcp diff --git a/internal/cli/mcp/mcp.go b/internal/cli/mcp/mcp.go index 39afdcd5d..21d0b20ec 100644 --- a/internal/cli/mcp/mcp.go +++ b/internal/cli/mcp/mcp.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mcp provides the CLI command for running the MCP server. package mcp import ( diff --git a/internal/cli/memory/cmd/diff/doc.go b/internal/cli/memory/cmd/diff/doc.go index 9cbf32a61..eafc58556 100644 --- a/internal/cli/memory/cmd/diff/doc.go +++ b/internal/cli/memory/cmd/diff/doc.go @@ -1,12 +1,39 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package diff implements the ctx memory diff subcommand. +// Package diff implements the "ctx memory diff" command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The diff command computes and displays a line-based +// diff between the mirror copy of MEMORY.md (stored in +// .context/memory/) and the current source MEMORY.md. +// This lets the user see what has changed since the last +// sync without performing the sync itself. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] resolves the project root from the context +// directory, discovers the source MEMORY.md path, and +// calls mem.Diff to compute the line-based difference +// between the mirror and the source. +// +// If the source file cannot be discovered (no MEMORY.md +// exists in the project), the command returns a "not +// found" error. +// +// # Output +// +// When differences exist, prints the unified diff to +// stdout. When the mirror and source are identical, +// prints a "no changes" message. The diff format uses +// standard addition/removal markers for easy scanning. package diff diff --git a/internal/cli/memory/cmd/diff/run.go b/internal/cli/memory/cmd/diff/run.go index 40816579b..32261b132 100644 --- a/internal/cli/memory/cmd/diff/run.go +++ b/internal/cli/memory/cmd/diff/run.go @@ -7,13 +7,11 @@ package diff import ( - "path/filepath" - "github.com/spf13/cobra" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" errMemory "github.com/ActiveMemory/ctx/internal/err/memory" mem "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/memory" ) @@ -26,8 +24,10 @@ import ( // Returns: // - error: on discovery or diff failure. func Run(cmd *cobra.Command) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) + contextDir, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + return err + } sourcePath, discoverErr := mem.DiscoverPath(projectRoot) if discoverErr != nil { diff --git a/internal/cli/memory/cmd/importer/doc.go b/internal/cli/memory/cmd/importer/doc.go index e060fbca7..20b0204ca 100644 --- a/internal/cli/memory/cmd/importer/doc.go +++ b/internal/cli/memory/cmd/importer/doc.go @@ -1,12 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package importer implements the ctx memory import subcommand. +// Package importer implements the "ctx memory import" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The import command parses entries from MEMORY.md, +// classifies each one by heuristic keyword matching, +// deduplicates against previously imported entries, and +// promotes new entries into the appropriate .context/ +// files (TASKS.md, DECISIONS.md, CONVENTIONS.md, or +// LEARNINGS.md). +// +// # Flags +// +// --dry-run Show the classification plan without +// writing any files. Each entry is +// printed with its target file and the +// keywords that triggered classification. +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers the +// --dry-run flag. [Run] performs the following steps: +// +// 1. Discovers the source MEMORY.md in the project. +// 2. Parses all entries from the file content. +// 3. Loads import state to identify duplicates. +// 4. For each entry, computes a content hash, skips +// already-imported entries, classifies the entry +// by keyword matching, and either promotes it +// (normal mode) or reports it (dry-run mode). +// 5. Saves updated import state with hashes of newly +// imported entries. +// +// Entries classified as "skip" are silently ignored +// unless --dry-run is active. +// +// # Output +// +// Prints a scan header with the source name and entry +// count, followed by per-entry results (added or +// classified), and a summary with counts by target file +// plus duplicates and skipped entries. package importer diff --git a/internal/cli/memory/cmd/importer/run.go b/internal/cli/memory/cmd/importer/run.go index eb5a11784..016f8cb33 100644 --- a/internal/cli/memory/cmd/importer/run.go +++ b/internal/cli/memory/cmd/importer/run.go @@ -7,22 +7,17 @@ package importer import ( - "path/filepath" - "github.com/spf13/cobra" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" "github.com/ActiveMemory/ctx/internal/config/entry" cfgFmt "github.com/ActiveMemory/ctx/internal/config/format" cfgMemory "github.com/ActiveMemory/ctx/internal/config/memory" "github.com/ActiveMemory/ctx/internal/entity" - errMemory "github.com/ActiveMemory/ctx/internal/err/memory" errState "github.com/ActiveMemory/ctx/internal/err/state" "github.com/ActiveMemory/ctx/internal/format" - "github.com/ActiveMemory/ctx/internal/io" "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/ctximport" - "github.com/ActiveMemory/ctx/internal/write/sync" ) // Run parses MEMORY.md entries, classifies them by heuristic keyword @@ -36,20 +31,18 @@ import ( // Returns: // - error: on discovery, read, state, or promotion failure. func Run(cmd *cobra.Command, dryRun bool) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) + contextDir, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + return err + } - sourcePath, discoverErr := memory.DiscoverPath(projectRoot) + sourcePath, discoverErr := resolve.DiscoverSource(cmd, projectRoot) if discoverErr != nil { - sync.ErrAutoMemoryNotActive(cmd, discoverErr) - return errMemory.NotFound() + return discoverErr } - - sourceData, readErr := io.SafeReadFile( - filepath.Dir(sourcePath), filepath.Base(sourcePath), - ) + sourceData, readErr := resolve.ReadSource(sourcePath) if readErr != nil { - return errMemory.Read(readErr) + return readErr } entries := memory.Entries(string(sourceData)) diff --git a/internal/cli/memory/cmd/publish/doc.go b/internal/cli/memory/cmd/publish/doc.go index 3dda0cab7..42f684fb0 100644 --- a/internal/cli/memory/cmd/publish/doc.go +++ b/internal/cli/memory/cmd/publish/doc.go @@ -1,12 +1,48 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package publish implements the ctx memory publish subcommand. +// Package publish implements the "ctx memory publish" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The publish command selects high-value context from +// .context/ files, formats it as a marked block, and +// writes it into MEMORY.md. This makes curated project +// context available to AI tools that read MEMORY.md +// but do not have direct access to .context/. +// +// The published block is delimited by markers so it can +// be updated or removed by subsequent publish or +// unpublish operations without affecting user-authored +// content in MEMORY.md. +// +// # Flags +// +// --budget Maximum line count for the published +// block (default from config). +// --dry-run Show what would be published without +// writing any files. +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers the two +// flags. [Run] discovers MEMORY.md, selects content +// from the context directory up to the budget limit, +// prints a plan showing counts by category (tasks, +// decisions, conventions, learnings) and total lines, +// then either writes the block (normal mode) or stops +// after the plan (dry-run mode). +// +// If MEMORY.md cannot be discovered, the command prints +// a warning and returns a "not found" error. +// +// # Output +// +// Prints a publication plan with category counts and +// total lines, followed by a "done" confirmation or +// a "dry run" notice. package publish diff --git a/internal/cli/memory/cmd/publish/run.go b/internal/cli/memory/cmd/publish/run.go index 1749f7fb6..2a107e3c1 100644 --- a/internal/cli/memory/cmd/publish/run.go +++ b/internal/cli/memory/cmd/publish/run.go @@ -7,15 +7,12 @@ package publish import ( - "path/filepath" - "github.com/spf13/cobra" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" errMemory "github.com/ActiveMemory/ctx/internal/err/memory" mem "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/publish" - "github.com/ActiveMemory/ctx/internal/write/sync" ) // Run selects the high-value context, formats it, and writes a marked block @@ -29,13 +26,14 @@ import ( // Returns: // - error: on discovery, selection, or publish failure. func Run(cmd *cobra.Command, budget int, dryRun bool) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) + contextDir, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + return err + } - memoryPath, discoverErr := mem.DiscoverPath(projectRoot) + memoryPath, discoverErr := resolve.DiscoverSource(cmd, projectRoot) if discoverErr != nil { - sync.ErrAutoMemoryNotActive(cmd, discoverErr) - return errMemory.NotFound() + return discoverErr } result, selectErr := mem.SelectContent(contextDir, budget) diff --git a/internal/cli/memory/cmd/status/cmd.go b/internal/cli/memory/cmd/status/cmd.go index 06d246d8c..671790dee 100644 --- a/internal/cli/memory/cmd/status/cmd.go +++ b/internal/cli/memory/cmd/status/cmd.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package status implements the "ctx memory status" subcommand. package status import ( diff --git a/internal/cli/memory/cmd/status/doc.go b/internal/cli/memory/cmd/status/doc.go index eae443b0a..5b10d72cd 100644 --- a/internal/cli/memory/cmd/status/doc.go +++ b/internal/cli/memory/cmd/status/doc.go @@ -1,12 +1,52 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package status implements the ctx memory status subcommand. +// Package status implements the "ctx memory status" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The status command prints a dashboard of the memory +// bridge state, showing the source MEMORY.md location, +// mirror path, last sync timestamp, line counts for +// both source and mirror, drift detection, and archive +// count. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] resolves the project root, discovers the source +// MEMORY.md, loads the sync state, and prints each +// status section: +// +// 1. Bridge header with source and mirror paths. +// 2. Last sync timestamp with relative duration. +// 3. Source and mirror line counts. +// 4. Drift indicator (whether source differs from +// mirror). +// 5. Archive count from the memory archive directory. +// +// # Exit Codes +// +// 0 No drift detected; source and mirror match. +// 2 Drift detected; source has changed since the +// last sync. This exit code enables scripted +// checks in CI or automation. +// +// If the source MEMORY.md cannot be discovered, the +// command prints a "not active" message and returns +// an error. +// +// # Output +// +// Prints a structured status report to stdout with +// labeled fields for each metric. The drift line +// uses a visual indicator for quick scanning. package status diff --git a/internal/cli/memory/cmd/status/run.go b/internal/cli/memory/cmd/status/run.go index acb61a4e5..9f0a85796 100644 --- a/internal/cli/memory/cmd/status/run.go +++ b/internal/cli/memory/cmd/status/run.go @@ -14,6 +14,7 @@ import ( "github.com/spf13/cobra" "github.com/ActiveMemory/ctx/internal/cli/memory/core/count" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" "github.com/ActiveMemory/ctx/internal/config/dir" "github.com/ActiveMemory/ctx/internal/config/memory" cfgTime "github.com/ActiveMemory/ctx/internal/config/time" @@ -21,7 +22,6 @@ import ( "github.com/ActiveMemory/ctx/internal/format" "github.com/ActiveMemory/ctx/internal/io" mem "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" writeMem "github.com/ActiveMemory/ctx/internal/write/memory" ) @@ -34,8 +34,10 @@ import ( // Returns: // - error: on discovery failure. func Run(cmd *cobra.Command) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) + contextDir, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + return err + } sourcePath, discoverErr := mem.DiscoverPath(projectRoot) if discoverErr != nil { diff --git a/internal/cli/memory/cmd/sync/cmd.go b/internal/cli/memory/cmd/sync/cmd.go index 59c1c458b..1ad142405 100644 --- a/internal/cli/memory/cmd/sync/cmd.go +++ b/internal/cli/memory/cmd/sync/cmd.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sync implements the "ctx memory sync" subcommand. package sync import ( diff --git a/internal/cli/memory/cmd/sync/doc.go b/internal/cli/memory/cmd/sync/doc.go index 23aab4867..77cba4dd9 100644 --- a/internal/cli/memory/cmd/sync/doc.go +++ b/internal/cli/memory/cmd/sync/doc.go @@ -1,12 +1,49 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sync implements the ctx memory sync subcommand. +// Package sync implements the "ctx memory sync" command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The sync command discovers the source MEMORY.md file, +// mirrors it into .context/memory/, archives the +// previous mirror copy, and updates the sync state +// timestamp. This establishes a snapshot that other +// memory commands (diff, import, status) use as their +// baseline. +// +// # Flags +// +// --dry-run Report what would happen without +// writing any files. Shows source and +// mirror paths and whether drift exists. +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers the +// --dry-run flag. [Run] performs these steps: +// +// 1. Resolves the project root and discovers the +// source MEMORY.md path. +// 2. In dry-run mode, reports the paths and drift +// status, then exits. +// 3. In normal mode, calls memory.Sync which copies +// the source to the mirror directory and archives +// the previous mirror. +// 4. Loads the sync state, marks it as synced with +// the current timestamp, and saves it. +// +// If the source MEMORY.md cannot be discovered, the +// command prints a warning and returns a "not found" +// error. +// +// # Output +// +// In dry-run mode, prints the source path, mirror path, +// and drift status. In normal mode, prints the source +// name, mirror path, archive filename, and line counts +// for both source and mirror. package sync diff --git a/internal/cli/memory/cmd/sync/run.go b/internal/cli/memory/cmd/sync/run.go index 29ffd2773..739f68f6a 100644 --- a/internal/cli/memory/cmd/sync/run.go +++ b/internal/cli/memory/cmd/sync/run.go @@ -11,11 +11,11 @@ import ( "github.com/spf13/cobra" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" cfgMem "github.com/ActiveMemory/ctx/internal/config/memory" errMem "github.com/ActiveMemory/ctx/internal/err/memory" errState "github.com/ActiveMemory/ctx/internal/err/state" "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/sync" ) @@ -30,13 +30,14 @@ import ( // Returns: // - error: on discovery failure, sync failure, or state persistence failure. func Run(cmd *cobra.Command, dryRun bool) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) + contextDir, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + return err + } - sourcePath, discoverErr := memory.DiscoverPath(projectRoot) + sourcePath, discoverErr := resolve.DiscoverSource(cmd, projectRoot) if discoverErr != nil { - sync.ErrAutoMemoryNotActive(cmd, discoverErr) - return errMem.NotFound() + return discoverErr } if dryRun { diff --git a/internal/cli/memory/cmd/unpublish/doc.go b/internal/cli/memory/cmd/unpublish/doc.go index 8917ab9dd..2b5baaf53 100644 --- a/internal/cli/memory/cmd/unpublish/doc.go +++ b/internal/cli/memory/cmd/unpublish/doc.go @@ -1,12 +1,45 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package unpublish implements the ctx memory unpublish subcommand. +// Package unpublish implements the "ctx memory unpublish" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The unpublish command removes the ctx-managed marker +// block from MEMORY.md that was previously written by +// "ctx memory publish". All user-authored content +// outside the markers is preserved intact. +// +// This is the inverse of the publish command and is +// useful when the user wants to stop sharing curated +// context through MEMORY.md or wants to rewrite the +// published section manually. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] discovers the source MEMORY.md, reads its +// content, and calls memory.RemovePublished to strip +// the marked block. If no published block is found, +// prints a "not found" message and returns nil. If +// the block is found, writes the cleaned content back +// to the file and prints a confirmation. +// +// If the source MEMORY.md cannot be discovered, the +// command prints a warning and returns a "not found" +// error. +// +// # Output +// +// Prints either a "not found" notice when no published +// block exists, or an "unpublished" confirmation when +// the block was successfully removed. package unpublish diff --git a/internal/cli/memory/cmd/unpublish/run.go b/internal/cli/memory/cmd/unpublish/run.go index 10b53444d..e348ac132 100644 --- a/internal/cli/memory/cmd/unpublish/run.go +++ b/internal/cli/memory/cmd/unpublish/run.go @@ -7,18 +7,15 @@ package unpublish import ( - "path/filepath" - "github.com/spf13/cobra" + "github.com/ActiveMemory/ctx/internal/cli/memory/core/resolve" "github.com/ActiveMemory/ctx/internal/config/fs" cfgMem "github.com/ActiveMemory/ctx/internal/config/memory" ctxErr "github.com/ActiveMemory/ctx/internal/err/memory" "github.com/ActiveMemory/ctx/internal/io" "github.com/ActiveMemory/ctx/internal/memory" - "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/publish" - "github.com/ActiveMemory/ctx/internal/write/sync" ) // Run removes the ctx-managed marker block from MEMORY.md, @@ -30,20 +27,18 @@ import ( // Returns: // - error: on discovery, read, or write failure. func Run(cmd *cobra.Command) error { - contextDir := rc.ContextDir() - projectRoot := filepath.Dir(contextDir) - - memoryPath, discoverErr := memory.DiscoverPath(projectRoot) + _, projectRoot, err := resolve.ContextAndRoot(cmd) + if err != nil { + cmd.SilenceUsage = true + return err + } + memoryPath, discoverErr := resolve.DiscoverSource(cmd, projectRoot) if discoverErr != nil { - sync.ErrAutoMemoryNotActive(cmd, discoverErr) - return ctxErr.NotFound() + return discoverErr } - - data, readErr := io.SafeReadFile( - filepath.Dir(memoryPath), filepath.Base(memoryPath), - ) + data, readErr := resolve.ReadSource(memoryPath) if readErr != nil { - return ctxErr.Read(readErr) + return readErr } cleaned, found := memory.RemovePublished(string(data)) diff --git a/internal/cli/memory/core/count/doc.go b/internal/cli/memory/core/count/doc.go index 6e0a8b326..61fb47d28 100644 --- a/internal/cli/memory/core/count/doc.go +++ b/internal/cli/memory/core/count/doc.go @@ -5,7 +5,26 @@ // SPDX-License-Identifier: Apache-2.0 // Package count provides line counting utilities for memory -// file analysis. Used by the memory status command to report -// source and mirror line counts for drift detection between -// the external memory source and its local mirror. +// file analysis. +// +// [FileLines] counts the number of newline characters in +// raw file bytes using [bytes.Count]. This gives a fast +// approximation of line count without parsing the file +// content. The count is newline-based (LF), consistent with +// the project's LF-only convention. +// +// The memory status command uses this to report source and +// mirror line counts side by side. A mismatch between source +// and mirror line counts indicates drift: the external +// memory source (e.g., Claude Code's MEMORY.md) has changed +// since the last sync, or the local mirror has been edited +// independently. +// +// # Design Choice +// +// Counting newlines rather than splitting into lines avoids +// allocating a string slice for what is purely a numeric +// query. For large memory files this keeps the status +// command's memory footprint proportional to file size, +// not line count. package count diff --git a/internal/cli/memory/core/doc.go b/internal/cli/memory/core/doc.go index 04f43e92e..0d333972e 100644 --- a/internal/cli/memory/core/doc.go +++ b/internal/cli/memory/core/doc.go @@ -1,12 +1,32 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package core provides shared helpers for memory subcommands. +// Package core provides shared helpers for the memory +// subcommands. // -// Shared helpers used by sibling cmd/ packages. -// Exports: [CountFileLines]. -// See package source files for implementation details. +// The "ctx memory" command family reports statistics +// about context files such as line counts and token +// estimates. This core package holds the counting +// logic that the cmd/memory layer delegates to. +// +// # Line Counting +// +// The count sub-package exports [count.FileLines], +// which counts the number of newline characters in a +// byte slice. It uses bytes.Count with the LF token +// from config/token. The cmd/memory layer reads each +// context file into memory and passes the raw bytes +// to FileLines to obtain per-file line counts for +// display. +// +// # Data Flow +// +// The cmd/memory layer discovers context files, reads +// them from disk, and calls FileLines for each file. +// The resulting counts are passed to the write/memory +// package for formatted output showing file sizes and +// totals. package core diff --git a/internal/cli/memory/core/resolve/doc.go b/internal/cli/memory/core/resolve/doc.go new file mode 100644 index 000000000..7fc319e02 --- /dev/null +++ b/internal/cli/memory/core/resolve/doc.go @@ -0,0 +1,22 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package resolve centralizes path resolution shared by every +// memory-bridge subcommand. Each subcommand needs the declared +// context directory (for the .context/memory/ mirror) and its +// parent (the project root, where MEMORY.md lives). +// +// Before this package existed, every memory Run function repeated +// the rc.RequireContextDir + filepath.Dir + cobra.SilenceUsage +// sequence verbatim. Collapsing those three lines into a single +// ContextAndRoot call makes the Run functions read like the task +// they perform, not like the setup scaffolding every Run shares. +// +// The package does not cover memory.DiscoverPath: each caller +// handles its discovery-failure case differently (some emit +// StatusNotActive output, some a tailored NotFound error), so that +// step stays inline where the differences live. +package resolve diff --git a/internal/cli/memory/core/resolve/resolve.go b/internal/cli/memory/core/resolve/resolve.go new file mode 100644 index 000000000..d429a4f67 --- /dev/null +++ b/internal/cli/memory/core/resolve/resolve.go @@ -0,0 +1,91 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package resolve + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + errMemory "github.com/ActiveMemory/ctx/internal/err/memory" + "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/memory" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/write/sync" +) + +// ContextAndRoot resolves the context directory and its parent +// (project root) for a memory subcommand Run. +// +// Silences cobra's usage dump on error: a missing CTX_DIR is a +// declaration problem, not a misuse of the command. Callers return +// the error unchanged so the standard tailored message from +// rc.RequireContextDir reaches the user. +// +// Parameters: +// - cmd: the cobra command being run (used only for SilenceUsage). +// +// Returns: +// - string: absolute path to the declared context directory. +// - string: project root (filepath.Dir of the context directory), +// where MEMORY.md is expected to live. +// - error: non-nil when the context directory is not declared. +func ContextAndRoot(cmd *cobra.Command) (string, string, error) { + contextDir, err := rc.RequireContextDir() + if err != nil { + cmd.SilenceUsage = true + return "", "", err + } + return contextDir, filepath.Dir(contextDir), nil +} + +// DiscoverSource runs memory.DiscoverPath and applies the standard +// "auto memory not active" treatment: surface the helper notice to +// the Cobra command's output and return errMemory.NotFound. This is +// the shape four of the six memory subcommands (importer, publish, +// sync, unpublish) share. The diff and status commands want a +// different discovery-failure message and keep their handling +// inline. +// +// Parameters: +// - cmd: the cobra command being run (passed through to the +// sync.ErrAutoMemoryNotActive helper for user-facing output). +// - projectRoot: project root previously resolved via +// [ContextAndRoot]. +// +// Returns: +// - string: absolute path to the MEMORY.md source file when +// discovered successfully. +// - error: errMemory.NotFound when DiscoverPath fails; nil on +// success. +func DiscoverSource(cmd *cobra.Command, projectRoot string) (string, error) { + sourcePath, err := memory.DiscoverPath(projectRoot) + if err != nil { + sync.ErrAutoMemoryNotActive(cmd, err) + return "", errMemory.NotFound() + } + return sourcePath, nil +} + +// ReadSource reads the MEMORY.md file at the given path, splitting +// it into the directory + base filename that io.SafeReadFile wants. +// The helper wraps read failures in errMemory.Read so callers get a +// consistent user-facing error message. +// +// Parameters: +// - path: absolute path to the MEMORY.md source file. +// +// Returns: +// - []byte: file contents on success. +// - error: errMemory.Read wrapping the underlying io error. +func ReadSource(path string) ([]byte, error) { + data, err := io.SafeReadFile(filepath.Dir(path), filepath.Base(path)) + if err != nil { + return nil, errMemory.Read(err) + } + return data, nil +} diff --git a/internal/cli/memory/doc.go b/internal/cli/memory/doc.go index 295833178..25b0f752f 100644 --- a/internal/cli/memory/doc.go +++ b/internal/cli/memory/doc.go @@ -4,19 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package memory implements the "ctx memory" command for bridging Claude -// Code's auto memory into the .context/ directory. +// Package memory implements the "ctx memory" command for +// bridging Claude Code's auto memory into the .context/ +// directory. // -// The memory bridge discovers MEMORY.md from Claude Code's project-scoped -// auto memory, mirrors it locally for drift detection, and supports -// importing classified entries into structured context files. +// The memory bridge discovers MEMORY.md from Claude +// Code's project-scoped auto memory, mirrors it locally +// for drift detection, and supports importing classified +// entries into structured context files. This ensures +// knowledge captured by Claude Code's auto-memory system +// feeds back into the persistent project context. // -// Subcommands: +// # Subcommands // -// - sync: copy MEMORY.md to .context/memory/mirror.md with archival -// - status: show drift status and line counts -// - diff: show line-level differences between mirror and source -// - import: classify and promote entries into context files -// - publish: push curated context back into MEMORY.md -// - unpublish: remove published sections from MEMORY.md +// - sync: copy MEMORY.md to .context/memory/mirror.md +// with archival of the previous mirror +// - status: show drift status and line counts between +// source and mirror +// - diff: show line-level differences between mirror +// and source +// - import: classify and promote entries into the +// appropriate context files (decisions, learnings, +// conventions, tasks) +// - publish: push curated context back into MEMORY.md +// - unpublish: remove published sections from +// MEMORY.md +// +// # Subpackages +// +// cmd/sync, cmd/status, cmd/diff: mirror operations +// cmd/importer: entry classification and promotion +// cmd/publish, cmd/unpublish: bidirectional sync +// core: mirror discovery, diff, and classification package memory diff --git a/internal/cli/message/cmd.go b/internal/cli/message/cmd.go index 376381e58..ba23a9e36 100644 --- a/internal/cli/message/cmd.go +++ b/internal/cli/message/cmd.go @@ -12,7 +12,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/message/cmd/root" ) -// Cmd returns the "ctx message" top-level command. +// Cmd returns the "ctx hook message" command. // // Returns: // - *cobra.Command: Configured message command with subcommands diff --git a/internal/cli/message/cmd/edit/cmd.go b/internal/cli/message/cmd/edit/cmd.go index cc41eb82d..c0afa33ef 100644 --- a/internal/cli/message/cmd/edit/cmd.go +++ b/internal/cli/message/cmd/edit/cmd.go @@ -13,7 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx message edit" subcommand. +// Cmd returns the "ctx hook message edit" subcommand. // // Returns: // - *cobra.Command: Configured edit subcommand diff --git a/internal/cli/message/cmd/edit/doc.go b/internal/cli/message/cmd/edit/doc.go index 3d1716349..740a48799 100644 --- a/internal/cli/message/cmd/edit/doc.go +++ b/internal/cli/message/cmd/edit/doc.go @@ -1,12 +1,51 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package edit provides the ctx system message edit subcommand for. +// Package edit implements the "ctx hook message edit" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The edit command creates a local override file for a +// hook message template, allowing the user to customize +// what ctx injects into AI tool hooks. The override +// copy is written to the .context/messages/ directory +// where ctx reads it instead of the embedded default. +// +// # Arguments +// +// Requires exactly two positional arguments: +// +// 1. hook The hook name (e.g. "PreToolUse"). +// 2. variant The template variant (e.g. "default"). +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a cobra.Command requiring exactly two +// positional arguments. [Run] performs these steps: +// +// 1. Looks up the hook/variant pair in the message +// registry. Returns an error if unknown. +// 2. Checks whether an override file already exists; +// returns an error if it does. +// 3. For ctx-specific templates, prints a warning +// about scope limitations. +// 4. Reads the embedded default template content. +// 5. Creates the override directory and writes the +// template file with restricted permissions. +// 6. Prints the override path, an edit hint, and +// the available template variables. +// +// # Output +// +// Prints the path to the newly created override file, +// a hint to edit it, and a list of template variables +// that can be used in the template content. package edit diff --git a/internal/cli/message/cmd/edit/run.go b/internal/cli/message/cmd/edit/run.go index 1391ff01d..986e8a884 100644 --- a/internal/cli/message/cmd/edit/run.go +++ b/internal/cli/message/cmd/edit/run.go @@ -20,6 +20,7 @@ import ( "github.com/ActiveMemory/ctx/internal/err/fs" errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -34,12 +35,19 @@ import ( // - error: Non-nil if the hook/variant is unknown, override exists, // or file operations fail func Run(cmd *cobra.Command, hk, variant string) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } info := messages.Lookup(hk, variant) if info == nil { return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } - oPath := message.OverridePath(hk, variant) + oPath, pathErr := message.OverridePath(hk, variant) + if pathErr != nil { + return pathErr + } if _, statErr := os.Stat(oPath); statErr == nil { return errTrigger.OverrideExists(oPath, hk, variant) diff --git a/internal/cli/message/cmd/list/cmd.go b/internal/cli/message/cmd/list/cmd.go index c9ea76de1..1ac114287 100644 --- a/internal/cli/message/cmd/list/cmd.go +++ b/internal/cli/message/cmd/list/cmd.go @@ -15,7 +15,7 @@ import ( cFlag "github.com/ActiveMemory/ctx/internal/config/flag" ) -// Cmd returns the "ctx message list" subcommand. +// Cmd returns the "ctx hook message list" subcommand. // // Returns: // - *cobra.Command: Configured list subcommand diff --git a/internal/cli/message/cmd/list/doc.go b/internal/cli/message/cmd/list/doc.go index 3b128e9c5..345c53457 100644 --- a/internal/cli/message/cmd/list/doc.go +++ b/internal/cli/message/cmd/list/doc.go @@ -1,12 +1,48 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package list provides the ctx system message list subcommand for. +// Package list implements the "ctx hook message list" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The list command displays all registered hook message +// templates, showing each template's hook name, variant, +// category, and whether a local override exists. This +// gives the user a complete inventory of customizable +// messages. +// +// # Flags +// +// --json Output the template list as formatted +// JSON instead of the default table format. +// +// # Behavior +// +// [Cmd] builds the cobra.Command and registers the +// --json flag. [Run] iterates over the message registry, +// checks for local overrides, and outputs the results. +// +// In table mode, prints a header row followed by one +// row per template with columns for hook, variant, +// category, and override status. +// +// In JSON mode, encodes the full list as a JSON array +// with indentation, including template variables and +// descriptions. +// +// # Output +// +// Table mode (default): +// +// HOOK VARIANT CATEGORY OVERRIDE +// PreToolUse default general no +// ... +// +// JSON mode produces an array of objects with hook, +// variant, category, description, templateVars, and +// hasOverride fields. package list diff --git a/internal/cli/message/cmd/list/run.go b/internal/cli/message/cmd/list/run.go index f388830ce..44fa8c4e6 100644 --- a/internal/cli/message/cmd/list/run.go +++ b/internal/cli/message/cmd/list/run.go @@ -16,6 +16,7 @@ import ( cFlag "github.com/ActiveMemory/ctx/internal/config/flag" "github.com/ActiveMemory/ctx/internal/config/token" "github.com/ActiveMemory/ctx/internal/entity" + "github.com/ActiveMemory/ctx/internal/rc" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -27,17 +28,25 @@ import ( // Returns: // - error: Non-nil on JSON encoding failure func Run(cmd *cobra.Command) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } registry := messages.Registry() entries := make([]entity.MessageListEntry, 0, len(registry)) for _, info := range registry { + hasOverride, overrideErr := message.HasOverride(info.Hook, info.Variant) + if overrideErr != nil { + return overrideErr + } entry := entity.MessageListEntry{ Hook: info.Hook, Variant: info.Variant, Category: info.Category, Description: info.Description, TemplateVars: info.TemplateVars, - HasOverride: message.HasOverride(info.Hook, info.Variant), + HasOverride: hasOverride, } if entry.TemplateVars == nil { entry.TemplateVars = []string{} diff --git a/internal/cli/message/cmd/reset/cmd.go b/internal/cli/message/cmd/reset/cmd.go index b962cf832..d7b43f91d 100644 --- a/internal/cli/message/cmd/reset/cmd.go +++ b/internal/cli/message/cmd/reset/cmd.go @@ -13,7 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx message reset" subcommand. +// Cmd returns the "ctx hook message reset" subcommand. // // Returns: // - *cobra.Command: Configured reset subcommand diff --git a/internal/cli/message/cmd/reset/doc.go b/internal/cli/message/cmd/reset/doc.go index ab5b62fcd..8d5eda877 100644 --- a/internal/cli/message/cmd/reset/doc.go +++ b/internal/cli/message/cmd/reset/doc.go @@ -1,12 +1,52 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package reset provides the ctx system message reset subcommand. +// Package reset implements the "ctx hook message reset" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The reset command removes a local override file for a +// hook message template, restoring the embedded default. +// After reset, ctx will use the built-in template for +// that hook/variant pair. +// +// # Arguments +// +// Requires exactly two positional arguments: +// +// 1. hook The hook name (e.g. "PreToolUse"). +// 2. variant The template variant (e.g. "default"). +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a cobra.Command requiring exactly two +// positional arguments. [Run] performs these steps: +// +// 1. Validates the hook/variant pair against the +// message registry. Returns an error if unknown. +// 2. Removes the override file at the computed path. +// 3. If the file does not exist, prints a "no +// override" message and returns nil. +// 4. Attempts to clean up the now-empty parent +// directories (hook dir and messages dir). +// 5. Prints a confirmation that the override was +// removed. +// +// Directory cleanup failures are logged as warnings +// but do not cause the command to fail, since the +// directories may contain other override files. +// +// # Output +// +// Prints either a "no override found" notice or an +// "override removed" confirmation with the hook and +// variant names. package reset diff --git a/internal/cli/message/cmd/reset/run.go b/internal/cli/message/cmd/reset/run.go index 5df198463..3c1621eab 100644 --- a/internal/cli/message/cmd/reset/run.go +++ b/internal/cli/message/cmd/reset/run.go @@ -17,6 +17,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/warn" errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" ctxLog "github.com/ActiveMemory/ctx/internal/log/warn" + "github.com/ActiveMemory/ctx/internal/rc" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -30,12 +31,19 @@ import ( // Returns: // - error: Non-nil if the hook/variant is unknown or removal fails func Run(cmd *cobra.Command, hk, variant string) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } info := messages.Lookup(hk, variant) if info == nil { return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } - oPath := message.OverridePath(hk, variant) + oPath, pathErr := message.OverridePath(hk, variant) + if pathErr != nil { + return pathErr + } if removeErr := os.Remove(oPath); removeErr != nil { if os.IsNotExist(removeErr) { diff --git a/internal/cli/message/cmd/root/cmd.go b/internal/cli/message/cmd/root/cmd.go index 57ec5a957..fb93a5ec9 100644 --- a/internal/cli/message/cmd/root/cmd.go +++ b/internal/cli/message/cmd/root/cmd.go @@ -17,7 +17,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx message" top-level command. +// Cmd returns the "ctx hook message" command. // // Returns: // - *cobra.Command: Configured message command diff --git a/internal/cli/message/cmd/root/doc.go b/internal/cli/message/cmd/root/doc.go index 1487a291e..b8bf39ee6 100644 --- a/internal/cli/message/cmd/root/doc.go +++ b/internal/cli/message/cmd/root/doc.go @@ -1,12 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package root provides the parent command for ctx system message. +// Package root provides the "ctx hook message" parent +// command. // -// Key exports: [Cmd]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// This package groups message template management +// subcommands under a single namespace. It does not +// contain business logic itself; it delegates to its +// four children: +// +// - list: displays all registered hook message +// templates with their override status. +// - show: prints the content of a specific template, +// using the override if one exists. +// - edit: creates a local override file so the user +// can customize a template. +// - reset: removes a local override to restore the +// embedded default. +// +// # Usage +// +// ctx hook message list [--json] +// ctx hook message show +// ctx hook message edit +// ctx hook message reset +// +// # Behavior +// +// [Cmd] uses the parent.Cmd helper to build a +// cobra.Command with descriptions loaded from embedded +// assets. It attaches the list, show, edit, and reset +// subcommands as children. Running the parent without a +// subcommand prints the help text. package root diff --git a/internal/cli/message/cmd/show/cmd.go b/internal/cli/message/cmd/show/cmd.go index e5cccaf76..48fbc1c1f 100644 --- a/internal/cli/message/cmd/show/cmd.go +++ b/internal/cli/message/cmd/show/cmd.go @@ -13,7 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx message show" subcommand. +// Cmd returns the "ctx hook message show" subcommand. // // Returns: // - *cobra.Command: Configured show subcommand diff --git a/internal/cli/message/cmd/show/doc.go b/internal/cli/message/cmd/show/doc.go index 13e4d7206..015b282d7 100644 --- a/internal/cli/message/cmd/show/doc.go +++ b/internal/cli/message/cmd/show/doc.go @@ -1,12 +1,51 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package show provides the ctx system message show subcommand. +// Package show implements the "ctx hook message show" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The show command displays the content of a hook +// message template. If a local override exists in +// .context/messages/, that version is shown; otherwise +// the embedded default template is displayed. +// +// This lets the user inspect what content ctx will +// inject into a hook without needing to locate the +// override file or browse embedded assets. +// +// # Arguments +// +// Requires exactly two positional arguments: +// +// 1. hook The hook name (e.g. "PreToolUse"). +// 2. variant The template variant (e.g. "default"). +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a cobra.Command requiring exactly two +// positional arguments. [Run] performs these steps: +// +// 1. Validates the hook/variant pair against the +// message registry. Returns an error if unknown. +// 2. Checks for a local override file. If found, +// prints a "source: override" header with the +// file path, template variables, and content. +// 3. If no override exists, reads the embedded +// default template and prints a "source: default" +// header with template variables and content. +// +// # Output +// +// Prints the source label (override path or "default"), +// available template variables, and the full template +// content in a formatted block. package show diff --git a/internal/cli/message/cmd/show/run.go b/internal/cli/message/cmd/show/run.go index 21c3656ec..41def8d83 100644 --- a/internal/cli/message/cmd/show/run.go +++ b/internal/cli/message/cmd/show/run.go @@ -15,6 +15,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/file" errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -28,12 +29,19 @@ import ( // Returns: // - error: Non-nil if the hook/variant is unknown or template is missing func Run(cmd *cobra.Command, hk, variant string) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } info := messages.Lookup(hk, variant) if info == nil { return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } - oPath := message.OverridePath(hk, variant) + oPath, pathErr := message.OverridePath(hk, variant) + if pathErr != nil { + return pathErr + } if data, readErr := io.SafeReadUserFile(oPath); readErr == nil { writeMessage.SourceOverride(cmd, oPath) writeMessage.TemplateVars(cmd, message.FormatTemplateVars(info)) diff --git a/internal/cli/message/doc.go b/internal/cli/message/doc.go index d55d87d2e..007b17aa3 100644 --- a/internal/cli/message/doc.go +++ b/internal/cli/message/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package message provides the parent command for ctx system message. +// Package message provides the ctx hook message command +// for injecting messages into AI sessions via the Claude +// Code hook message protocol. // -// Key exports: [Cmd]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// The message command reads structured input and produces +// hook-compatible JSON output that Claude Code interprets +// as injected context. This enables skills, triggers, and +// automation scripts to surface information to the AI +// agent mid-session without user intervention. +// +// # How It Works +// +// When a Claude Code hook fires, message reads the hook +// payload from stdin, evaluates whether a message should +// be injected, and writes a JSON response to stdout. The +// response may contain a message string that Claude Code +// prepends to the next AI prompt. +// +// # Subcommands +// +// - edit: edit a previously sent message +// - list: list messages in the current session +// - reset: reset message state +// - show: display a specific message +// +// # Subpackages +// +// cmd/root: cobra command definition, stdin reading, +// and JSON response formatting package message diff --git a/internal/cli/notify/cmd/setup/cmd.go b/internal/cli/notify/cmd/setup/cmd.go index e0f82f331..373d8f0c0 100644 --- a/internal/cli/notify/cmd/setup/cmd.go +++ b/internal/cli/notify/cmd/setup/cmd.go @@ -15,7 +15,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx notify setup" subcommand. +// Cmd returns the "ctx hook notify setup" subcommand. // // Returns: // - *cobra.Command: Configured setup subcommand diff --git a/internal/cli/notify/cmd/setup/doc.go b/internal/cli/notify/cmd/setup/doc.go index ac3d3e8df..fd67b9aa1 100644 --- a/internal/cli/notify/cmd/setup/doc.go +++ b/internal/cli/notify/cmd/setup/doc.go @@ -1,12 +1,44 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package setup implements the ctx notify setup subcommand. +// Package setup implements the "ctx hook notify setup" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The setup command configures webhook-based +// notifications for ctx hooks. It prompts the user to +// enter a webhook URL via stdin, validates the input, +// and saves the URL in encrypted form for use by +// subsequent hook executions. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] is exported for testability and accepts an +// *os.File for stdin injection. It performs these steps: +// +// 1. Prints a prompt asking for the webhook URL. +// 2. Reads one line from stdin. +// 3. Validates that the input is non-empty. +// 4. Saves the webhook URL via iNotify.SaveWebhook, +// which encrypts and persists it. +// 5. Prints a confirmation with the masked URL and +// the encryption method used. +// +// If stdin is empty or the URL is blank, the command +// returns an appropriate error. +// +// # Output +// +// Prints a setup prompt, then a confirmation showing +// the masked webhook URL (only the last few characters +// visible) and the encryption algorithm used. package setup diff --git a/internal/cli/notify/cmd/setup/run.go b/internal/cli/notify/cmd/setup/run.go index 55a035ad5..f9edff172 100644 --- a/internal/cli/notify/cmd/setup/run.go +++ b/internal/cli/notify/cmd/setup/run.go @@ -17,6 +17,7 @@ import ( "github.com/ActiveMemory/ctx/internal/err/fs" errNotify "github.com/ActiveMemory/ctx/internal/err/notify" iNotify "github.com/ActiveMemory/ctx/internal/notify" + "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/notify" ) @@ -31,6 +32,10 @@ import ( // Returns: // - error: Non-nil on empty input or save failure func Run(cmd *cobra.Command, stdin *os.File) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } notify.SetupPrompt(cmd) scanner := bufio.NewScanner(stdin) diff --git a/internal/cli/notify/cmd/test/cmd.go b/internal/cli/notify/cmd/test/cmd.go index 9ba7c9139..3e0699249 100644 --- a/internal/cli/notify/cmd/test/cmd.go +++ b/internal/cli/notify/cmd/test/cmd.go @@ -13,7 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/embed/cmd" ) -// Cmd returns the "ctx notify test" subcommand. +// Cmd returns the "ctx hook notify test" subcommand. // // Returns: // - *cobra.Command: Configured test subcommand diff --git a/internal/cli/notify/cmd/test/doc.go b/internal/cli/notify/cmd/test/doc.go index ebb95c4fd..08d63059c 100644 --- a/internal/cli/notify/cmd/test/doc.go +++ b/internal/cli/notify/cmd/test/doc.go @@ -1,12 +1,44 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package test implements the ctx notify test subcommand. +// Package test implements the "ctx hook notify test" +// command. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Overview +// +// The test command sends a test notification to the +// configured webhook URL to verify that the notification +// pipeline is working end-to-end. It loads the saved +// webhook, sends a test payload, and reports the HTTP +// status code and success/failure result. +// +// # Flags +// +// This command accepts no flags. +// +// # Behavior +// +// [Cmd] builds a simple cobra.Command with no flags. +// [Run] delegates to coreTest.Send which loads the +// encrypted webhook URL, sends a test HTTP request, +// and returns the result. Then it dispatches to the +// appropriate output based on the result: +// +// - If no webhook is configured, prints a "no +// webhook" message. +// - If the notification was filtered (e.g. by rate +// limiting), prints a "filtered" notice. +// - In all cases where a request was made, prints +// the HTTP status code and whether it was +// successful. +// +// # Output +// +// Prints the HTTP status code and a pass/fail +// indicator, along with the encryption method used. +// If no webhook is configured, prints a message +// directing the user to run "ctx hook notify setup". package test diff --git a/internal/cli/notify/cmd/test/run.go b/internal/cli/notify/cmd/test/run.go index 447520e23..b0c678bb5 100644 --- a/internal/cli/notify/cmd/test/run.go +++ b/internal/cli/notify/cmd/test/run.go @@ -11,6 +11,7 @@ import ( coreTest "github.com/ActiveMemory/ctx/internal/cli/notify/core/test" "github.com/ActiveMemory/ctx/internal/config/crypto" + "github.com/ActiveMemory/ctx/internal/rc" writeNotify "github.com/ActiveMemory/ctx/internal/write/notify" ) @@ -22,6 +23,10 @@ import ( // Returns: // - error: Non-nil on webhook load or HTTP failure func Run(cmd *cobra.Command) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } r, sendErr := coreTest.Send() if sendErr != nil { return sendErr diff --git a/internal/cli/notify/core/test/doc.go b/internal/cli/notify/core/test/doc.go index 52bb35113..a19515e4a 100644 --- a/internal/cli/notify/core/test/doc.go +++ b/internal/cli/notify/core/test/doc.go @@ -1,12 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package test provides webhook test notification logic: building the. +// Package test provides webhook test notification +// logic. // -// Key exports: [Send], [OK]. -// Shared helpers used by sibling cmd/ packages. -// Used by core cmd/ packages. +// The "ctx notify test" command sends a test payload +// to the configured webhook URL to verify that the +// notification pipeline works end-to-end. This package +// contains the business logic for building, sending, +// and evaluating the test notification. +// +// # Sending a Test Notification +// +// [Send] is the primary function. It performs the +// following steps: +// +// 1. Loads the webhook URL from the project +// configuration via notify.LoadWebhook. If no URL +// is configured, it returns a Result with +// NoWebhook set to true. +// 2. Determines the project name from the current +// working directory, falling back to a default +// name if the directory cannot be resolved. +// 3. Builds a NotifyPayload with event type "test", +// a test message, an RFC3339 timestamp, and the +// project name. +// 4. Marshals the payload to JSON. +// 5. Checks whether the "test" event type is allowed +// by the configured event filter, recording the +// filtered flag. +// 6. Posts the JSON body to the webhook URL via +// notify.PostJSON. +// 7. Returns a Result containing the HTTP status code +// and filtered flag. +// +// # Result Evaluation +// +// [OK] checks whether a Result indicates success by +// testing that the status code falls in the 2xx range. +// +// # Result Type +// +// The [Result] struct carries three fields: NoWebhook +// (no URL configured), Filtered (event excluded by +// filter), and StatusCode (the HTTP response code). package test diff --git a/internal/cli/notify/doc.go b/internal/cli/notify/doc.go index 419cf4bd8..9be78c878 100644 --- a/internal/cli/notify/doc.go +++ b/internal/cli/notify/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package notify implements the ctx notify command for sending webhook. +// Package notify implements the **`ctx hook notify`** +// command surface (webhook send, setup, and test) +// that wraps the in-process [internal/notify] engine for +// CLI use. // -// Key exports: [Cmd]. -// See source files for implementation details. -// Part of the cli subsystem. +// The command lives under `ctx hook` rather than at root +// because notifications belong to the **hook subsystem** +// (delivered when hooks fire); see +// `internal/cli/hook/hook.go` for the parent registration. +// +// # Subcommands +// +// - **`ctx hook notify [message]`**: fire-and-forget +// send. Required: `--event `. Optional: +// `--session-id`, `--hook`, `--variant`. Honors +// the `notify.events` filter in `.ctxrc`; silent +// no-op when the event is not whitelisted. +// - **`ctx hook notify setup`**: interactive prompt +// to capture and encrypt the webhook URL. See +// [internal/cli/notify/cmd/setup]. +// - **`ctx hook notify test`**: sends a test event, +// **bypassing** the event filter so users can +// verify connectivity without subscribing the test +// event first. See [internal/cli/notify/cmd/test]. +// +// # Concurrency +// +// Stateless. The CLI command spawns one HTTP request +// and exits. package notify diff --git a/internal/cli/notify/notify.go b/internal/cli/notify/notify.go index ce82829af..53dc326f2 100644 --- a/internal/cli/notify/notify.go +++ b/internal/cli/notify/notify.go @@ -24,7 +24,7 @@ import ( iNotify "github.com/ActiveMemory/ctx/internal/notify" ) -// Cmd returns the "ctx notify" parent command. +// Cmd returns the "ctx hook notify" parent command. // // Returns: // - *cobra.Command: Configured notify command with subcommands diff --git a/internal/cli/notify/notify_test.go b/internal/cli/notify/notify_test.go index 0a78924ae..2b35ffa82 100644 --- a/internal/cli/notify/notify_test.go +++ b/internal/cli/notify/notify_test.go @@ -26,12 +26,15 @@ func setupCLITest(t *testing.T) (string, func()) { tempDir := t.TempDir() origDir, _ := os.Getwd() _ = os.Chdir(tempDir) - _ = os.MkdirAll(filepath.Join(tempDir, ".context"), 0o750) + ctxPath := filepath.Join(tempDir, ".context") + _ = os.MkdirAll(ctxPath, 0o750) // Create required files so isInitialized returns true for _, f := range ctx.FilesRequired { - p := filepath.Join(tempDir, ".context", f) + p := filepath.Join(ctxPath, f) _ = os.WriteFile(p, []byte("# "+f+"\n"), 0o600) } + // Declare context dir explicitly (explicit-context-dir model). + t.Setenv("CTX_DIR", ctxPath) rc.Reset() return tempDir, func() { _ = os.Chdir(origDir) diff --git a/internal/cli/pad/cmd/add/doc.go b/internal/cli/pad/cmd/add/doc.go index 42a1b7d20..80b00176a 100644 --- a/internal/cli/pad/cmd/add/doc.go +++ b/internal/cli/pad/cmd/add/doc.go @@ -4,9 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package add implements the ctx pad add subcommand. +// Package add implements the "ctx pad add" subcommand +// for appending entries to the encrypted scratchpad. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command accepts exactly one positional argument: +// the text to store. When run without flags, it creates +// a plain-text entry. When the --file (-f) flag is +// provided, the text argument becomes a label and the +// file contents are stored as a binary blob. +// +// Each new entry receives a stable auto-incrementing +// ID that persists across additions and deletions. +// The command prints a confirmation with the assigned +// ID on success. +// +// # Flags +// +// --file, -f Import a file as a blob entry; +// the positional arg becomes the +// label for the blob. +// +// # Output +// +// On success, prints a one-line confirmation showing +// the newly assigned entry ID. On failure, returns an +// error for oversized content or read/write problems. +// +// # Delegation +// +// Entry creation is handled by [coreAdd.EntryWithID] +// and [coreAdd.BlobWithID] in the core/add package. +// Persistence goes through [store.WriteEntriesWithIDs]. +// User-facing output is routed through [writePad]. package add diff --git a/internal/cli/pad/cmd/add/run.go b/internal/cli/pad/cmd/add/run.go index 9eb83c806..bab2fc8cf 100644 --- a/internal/cli/pad/cmd/add/run.go +++ b/internal/cli/pad/cmd/add/run.go @@ -12,6 +12,7 @@ import ( coreAdd "github.com/ActiveMemory/ctx/internal/cli/pad/core/add" "github.com/ActiveMemory/ctx/internal/cli/pad/core/parse" "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" + "github.com/ActiveMemory/ctx/internal/rc" writePad "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -28,6 +29,10 @@ import ( // Returns: // - error: Non-nil on read/write failure or too large func Run(cmd *cobra.Command, text, filePath string) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } var entries []parse.Entry var id int var addErr error diff --git a/internal/cli/pad/cmd/edit/doc.go b/internal/cli/pad/cmd/edit/doc.go index 2148b1a02..2a7cb170c 100644 --- a/internal/cli/pad/cmd/edit/doc.go +++ b/internal/cli/pad/cmd/edit/doc.go @@ -4,9 +4,26 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package edit implements the ctx pad edit subcommand. +// Package edit implements **`ctx pad edit`**, the +// subcommand that decrypts the scratchpad to a temp file, +// spawns the user's `$EDITOR` against it, and re-encrypts +// the result on save. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// - **Cleartext temp file** lives in the secure +// temp directory and is `0o600`. +// - **Editor invocation** uses `$EDITOR` (or +// `vi` as fallback). Foreground; ctx blocks +// until the editor exits. +// - **Re-encrypt** on successful exit. Editor +// non-zero exit aborts the write and leaves the +// scratchpad untouched. +// - **Cleanup**: the temp file is removed in a +// deferred handler regardless of outcome so a +// crashed editor does not leak plaintext. +// +// # Concurrency +// +// Single-process, sequential. package edit diff --git a/internal/cli/pad/cmd/edit/run.go b/internal/cli/pad/cmd/edit/run.go index 81d00c0a1..b465bb3bb 100644 --- a/internal/cli/pad/cmd/edit/run.go +++ b/internal/cli/pad/cmd/edit/run.go @@ -11,6 +11,7 @@ import ( coreEdit "github.com/ActiveMemory/ctx/internal/cli/pad/core/edit" "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" + "github.com/ActiveMemory/ctx/internal/rc" writePad "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -24,6 +25,10 @@ import ( // - error: Non-nil on invalid index, type mismatch, // or read/write failure func Run(cmd *cobra.Command, opts coreEdit.Opts) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } var entries []string var editErr error diff --git a/internal/cli/pad/cmd/export/doc.go b/internal/cli/pad/cmd/export/doc.go index d8ca55158..e95690133 100644 --- a/internal/cli/pad/cmd/export/doc.go +++ b/internal/cli/pad/cmd/export/doc.go @@ -4,9 +4,44 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package export implements the ctx pad export subcommand. +// Package export implements the "ctx pad export" +// subcommand for writing blob entries to disk. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command scans the scratchpad for blob entries +// and writes each blob's binary data to a file in the +// target directory. The target defaults to the current +// directory (".") but accepts an optional positional +// argument for a different path. +// +// When a destination file already exists, the command +// appends a timestamp suffix to avoid overwriting, +// unless --force is set. The --dry-run flag previews +// the export plan without writing any files. +// +// Plain-text entries are silently skipped; only blob +// entries produce output files. +// +// # Flags +// +// --force, -f Overwrite existing files instead +// of generating timestamped names. +// --dry-run Print the export plan without +// writing files to disk. +// +// # Output +// +// Each exported blob prints a confirmation line with +// the blob label. At the end, a summary line reports +// the total count of exported (or planned) files. +// In dry-run mode, name collisions are noted with +// the alternative filename that would be used. +// +// # Delegation +// +// Export planning is handled by [coreExport.Plan]. +// File I/O uses [ctxIo.SafeWriteFile] with secret +// permissions. Output formatting goes through the +// [writePad] and [writeExport] packages. package export diff --git a/internal/cli/pad/cmd/export/run.go b/internal/cli/pad/cmd/export/run.go index 45d345b8b..c3a04aef6 100644 --- a/internal/cli/pad/cmd/export/run.go +++ b/internal/cli/pad/cmd/export/run.go @@ -13,6 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/fs" errFs "github.com/ActiveMemory/ctx/internal/err/fs" ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" writeExport "github.com/ActiveMemory/ctx/internal/write/export" writePad "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -28,6 +29,10 @@ import ( // Returns: // - error: On directory creation or scratchpad read failure func Run(cmd *cobra.Command, dir string, force, dryRun bool) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } if !dryRun { if mkErr := ctxIo.SafeMkdirAll(dir, fs.PermExec); mkErr != nil { return errFs.Mkdir(dir, mkErr) diff --git a/internal/cli/pad/cmd/merge/doc.go b/internal/cli/pad/cmd/merge/doc.go index a2c198e82..d94d3315b 100644 --- a/internal/cli/pad/cmd/merge/doc.go +++ b/internal/cli/pad/cmd/merge/doc.go @@ -4,9 +4,45 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package merge implements the ctx pad merge subcommand. +// Package merge implements the "ctx pad merge" +// subcommand for importing entries from external +// scratchpad files into the current pad. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command reads one or more input files, each +// containing scratchpad entries (optionally encrypted +// with a different key). It deduplicates incoming +// entries against the current pad and appends only +// new ones. Duplicate entries are reported but not +// added. +// +// When the input contains binary blob entries, the +// command warns the user. If a blob label conflicts +// with an existing blob (same label, different data), +// a conflict notice is printed. +// +// # Flags +// +// --key, -k Path to the encryption key +// for the input files. When +// omitted, uses the project key. +// --dry-run Print the merge summary +// without writing changes. +// +// # Output +// +// Each new entry prints a confirmation line with the +// source file. Duplicates are reported individually. +// A final summary line shows the count of entries +// added and duplicates skipped, plus whether it was +// a dry run. +// +// # Delegation +// +// Key loading and file parsing are handled by the +// core/merge package. Deduplication uses an in-memory +// set built from the current pad. Blob conflict +// detection is provided by [merge.HasBlobConflict]. +// Persistence goes through [store.WriteEntries]. package merge diff --git a/internal/cli/pad/cmd/merge/run.go b/internal/cli/pad/cmd/merge/run.go index c7aa1cf77..f16d4b2df 100644 --- a/internal/cli/pad/cmd/merge/run.go +++ b/internal/cli/pad/cmd/merge/run.go @@ -13,6 +13,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/pad/core/merge" "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" errFs "github.com/ActiveMemory/ctx/internal/err/fs" + "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -33,12 +34,19 @@ func Run( keyFile string, dryRun bool, ) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } current, readErr := store.ReadEntries() if readErr != nil { return readErr } - key := merge.LoadKey(keyFile) + key, keyErr := merge.LoadKey(keyFile) + if keyErr != nil { + return keyErr + } seen := make(map[string]bool, len(current)) for _, e := range current { diff --git a/internal/cli/pad/cmd/mv/doc.go b/internal/cli/pad/cmd/mv/doc.go index f5f5f8460..7a7871217 100644 --- a/internal/cli/pad/cmd/mv/doc.go +++ b/internal/cli/pad/cmd/mv/doc.go @@ -4,9 +4,39 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mv implements the ctx pad mv subcommand. +// Package mv implements the "ctx pad mv" subcommand +// for reordering scratchpad entries by position. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command accepts exactly two positional arguments: +// a source position N and a destination position M, +// both 1-based. It extracts the entry at position N, +// removes it from its current slot, and inserts it at +// position M. All other entries shift accordingly. +// +// Both positions are validated against the current +// entry count before any mutation occurs. Invalid +// indices produce an error without modifying the pad. +// +// Note that positions are display-order indices, not +// stable IDs. The command operates on the physical +// ordering of entries in the pad file. +// +// # Flags +// +// None. This command takes no flags. +// +// # Output +// +// On success, prints a one-line confirmation showing +// the source and destination positions. On failure, +// returns an error for out-of-range positions or +// read/write problems. +// +// # Delegation +// +// Index validation is handled by [validate.Index]. +// Entry persistence goes through [store.WriteEntries]. +// User-facing output is routed through [pad.EntryMoved]. package mv diff --git a/internal/cli/pad/cmd/mv/run.go b/internal/cli/pad/cmd/mv/run.go index d82660620..cfc4acaba 100644 --- a/internal/cli/pad/cmd/mv/run.go +++ b/internal/cli/pad/cmd/mv/run.go @@ -11,6 +11,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" "github.com/ActiveMemory/ctx/internal/cli/pad/core/validate" + "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -24,6 +25,10 @@ import ( // Returns: // - error: Non-nil on invalid index or read/write failure func Run(cmd *cobra.Command, n, m int) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } entries, err := store.ReadEntries() if err != nil { return err diff --git a/internal/cli/pad/cmd/normalize/doc.go b/internal/cli/pad/cmd/normalize/doc.go index 11adaf344..365af8845 100644 --- a/internal/cli/pad/cmd/normalize/doc.go +++ b/internal/cli/pad/cmd/normalize/doc.go @@ -7,6 +7,8 @@ // Package normalize implements the "ctx pad normalize" // subcommand for compacting stable entry IDs. // +// # Behavior +// // Pad entries carry stable IDs that persist across // additions and deletions. Over time, gaps accumulate // (e.g., 1, 3, 7) as entries are removed. Normalize @@ -16,10 +18,26 @@ // This is a deliberate user action, not automatic: it // invalidates any IDs the user may have noted or that // appear in prior session transcripts. The command -// prints a confirmation with the count of entries -// renumbered. +// should only be run when gap cosmetics matter, not +// as routine maintenance. +// +// When the pad is empty, the command prints an empty +// notice and exits without writing. +// +// # Flags +// +// None. This command takes no flags. +// +// # Output +// +// On success, prints a confirmation line showing the +// count of entries renumbered. When the pad has no +// entries, prints an empty-pad notice instead. +// +// # Delegation // // The underlying ID reassignment logic lives in // [parse.Normalize]; this package handles the CLI -// wiring, file I/O, and user output. +// wiring, file I/O via [store.WriteEntriesWithIDs], +// and user output via [writePad]. package normalize diff --git a/internal/cli/pad/cmd/normalize/run.go b/internal/cli/pad/cmd/normalize/run.go index f81d1cd15..897fe1339 100644 --- a/internal/cli/pad/cmd/normalize/run.go +++ b/internal/cli/pad/cmd/normalize/run.go @@ -11,6 +11,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/pad/core/parse" "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" + "github.com/ActiveMemory/ctx/internal/rc" writePad "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -22,6 +23,10 @@ import ( // Returns: // - error: Non-nil on read/write failure func Run(cmd *cobra.Command) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } entries, readErr := store.ReadEntriesWithIDs() if readErr != nil { return readErr diff --git a/internal/cli/pad/cmd/resolve/doc.go b/internal/cli/pad/cmd/resolve/doc.go index 4e0618889..e9c876649 100644 --- a/internal/cli/pad/cmd/resolve/doc.go +++ b/internal/cli/pad/cmd/resolve/doc.go @@ -4,9 +4,42 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package resolve implements the ctx pad resolve subcommand. +// Package resolve implements the "ctx pad resolve" +// subcommand for inspecting merge conflicts in the +// encrypted scratchpad. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// When a git merge conflict occurs on the encrypted +// pad file, git cannot merge the binary ciphertext. +// Instead, ctx stores both sides as separate files +// (ours and theirs). This command decrypts and +// displays both sides so the user can decide which +// entries to keep. +// +// The command requires scratchpad encryption to be +// enabled. If encryption is off, it returns an error +// immediately. It loads the project encryption key +// and attempts to decrypt each conflict file. If +// both files are missing, it reports that no conflict +// exists. +// +// # Flags +// +// None. This command takes no flags. +// +// # Output +// +// For each conflict side that exists (ours, theirs), +// prints a labeled header followed by the decrypted +// entries in display format. Missing sides are +// silently skipped. If neither side exists, returns +// an error indicating no conflict files were found. +// +// # Delegation +// +// Decryption is performed by [padCrypto.DecryptFile]. +// Display formatting uses [coreResolve.DisplayAll]. +// Key loading goes through [crypto.LoadKey]. Output +// is routed through [writePad.ResolveSide]. package resolve diff --git a/internal/cli/pad/cmd/resolve/run.go b/internal/cli/pad/cmd/resolve/run.go index 8dfa3968e..17afb08ec 100644 --- a/internal/cli/pad/cmd/resolve/run.go +++ b/internal/cli/pad/cmd/resolve/run.go @@ -33,13 +33,21 @@ func Run(cmd *cobra.Command) error { return errPad.ResolveNotEncrypted() } - kp := store.KeyPath() + kp, kpErr := store.KeyPath() + if kpErr != nil { + cmd.SilenceUsage = true + return kpErr + } key, loadErr := crypto.LoadKey(kp) if loadErr != nil { return errCrypto.LoadKey(loadErr, kp) } - dir := rc.ContextDir() + dir, dirErr := rc.RequireContextDir() + if dirErr != nil { + cmd.SilenceUsage = true + return dirErr + } ours, errOurs := padCrypto.DecryptFile( key, dir, pad.EncOurs, diff --git a/internal/cli/pad/cmd/rm/doc.go b/internal/cli/pad/cmd/rm/doc.go index 239796f93..e0d8438bd 100644 --- a/internal/cli/pad/cmd/rm/doc.go +++ b/internal/cli/pad/cmd/rm/doc.go @@ -4,9 +4,40 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package rm implements the ctx pad rm subcommand. +// Package rm implements the "ctx pad rm" subcommand +// for removing entries from the scratchpad by stable +// ID. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command accepts one or more positional arguments +// specifying entry IDs to remove. Arguments can be +// individual IDs or ranges (e.g., "3-5" expands to +// 3, 4, 5). All IDs are resolved against the current +// pad before any deletion occurs, preventing shift- +// induced mismatches when removing multiple entries +// in a single invocation. +// +// If any specified ID does not exist in the pad, the +// command returns an error without modifying anything. +// On success, the remaining entries keep their +// original stable IDs (no renumbering occurs). +// +// # Flags +// +// None. This command takes no flags. +// +// # Output +// +// Each successfully removed entry prints a one-line +// confirmation showing the removed ID. On failure, +// returns an error identifying the missing entry ID. +// +// # Delegation +// +// Argument parsing and range expansion are handled by +// [parse.IDs]. ID-to-index resolution uses +// [parse.FindByID]. Persistence goes through +// [store.WriteEntriesWithIDs]. User-facing output +// is routed through [pad.EntryRemoved]. package rm diff --git a/internal/cli/pad/cmd/rm/run.go b/internal/cli/pad/cmd/rm/run.go index 0fedc9d03..6baf04760 100644 --- a/internal/cli/pad/cmd/rm/run.go +++ b/internal/cli/pad/cmd/rm/run.go @@ -12,6 +12,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/pad/core/parse" "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" errPad "github.com/ActiveMemory/ctx/internal/err/pad" + "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -26,6 +27,10 @@ import ( // Returns: // - error: Non-nil on invalid ID or read/write failure func Run(cmd *cobra.Command, ids []int) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } entries, readErr := store.ReadEntriesWithIDs() if readErr != nil { return readErr diff --git a/internal/cli/pad/cmd/root/doc.go b/internal/cli/pad/cmd/root/doc.go index 44c57ec91..beed8eb63 100644 --- a/internal/cli/pad/cmd/root/doc.go +++ b/internal/cli/pad/cmd/root/doc.go @@ -4,9 +4,40 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package root implements the ctx pad import subcommand. +// Package root implements the "ctx pad import" +// subcommand for bulk-loading entries into the +// scratchpad from files, stdin, or directories. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command accepts exactly one positional argument: +// a file path, the literal "-" for stdin, or a +// directory path when used with the --blob flag. +// +// Without --blob, the command reads the source as a +// text file (or stdin stream) and imports each line +// as a separate plain-text entry. With --blob, it +// treats the argument as a directory and imports each +// file in that directory as a blob entry, using the +// filename as the blob label. +// +// # Flags +// +// --blob Import directory contents as blob +// entries instead of reading lines from +// a text file. +// +// # Output +// +// Delegates all output to the core/load package, +// which prints per-entry confirmations and a summary +// count. Errors from file I/O or oversized entries +// are returned to the caller. +// +// # Delegation +// +// Line-based imports are handled by [load.Lines]. +// Blob directory imports use [load.Blobs]. Both +// functions handle reading, entry creation, and +// persistence internally. package root diff --git a/internal/cli/pad/cmd/root/run.go b/internal/cli/pad/cmd/root/run.go index 1f89f84ce..9e51e3fe6 100644 --- a/internal/cli/pad/cmd/root/run.go +++ b/internal/cli/pad/cmd/root/run.go @@ -10,6 +10,7 @@ import ( "github.com/spf13/cobra" "github.com/ActiveMemory/ctx/internal/cli/pad/core/load" + "github.com/ActiveMemory/ctx/internal/rc" ) // Run imports entries into the scratchpad from a file, stdin, or directory. @@ -25,6 +26,10 @@ import ( // Returns: // - error: Non-nil on read/write failure func Run(cmd *cobra.Command, path string, blobs bool) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } if blobs { return load.Blobs(cmd, path) } diff --git a/internal/cli/pad/cmd/show/doc.go b/internal/cli/pad/cmd/show/doc.go index 4c6d522e6..d82b7d98a 100644 --- a/internal/cli/pad/cmd/show/doc.go +++ b/internal/cli/pad/cmd/show/doc.go @@ -4,9 +4,44 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package show implements the ctx pad show subcommand. +// Package show implements the "ctx pad show" subcommand +// for displaying a single scratchpad entry by stable +// ID. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command accepts exactly one positional argument: +// the stable entry ID (1-based integer). It looks up +// the entry and prints its raw content to stdout +// without any numbering prefix, making the output +// suitable for piping into other commands: +// +// ctx pad edit 1 --append "$(ctx pad show 3)" +// +// For blob entries, the binary data is written to +// stdout by default. When the --out flag is set, the +// blob data is written to the specified file path +// instead. The --out flag is only valid for blob +// entries; using it with a plain-text entry returns +// an error. +// +// # Flags +// +// --out Write blob data to a file instead +// of stdout. Only valid for blob +// entries. +// +// # Output +// +// For plain-text entries, prints the entry content +// on stdout. For blobs without --out, writes raw +// binary data to stdout. For blobs with --out, prints +// a confirmation showing byte count and file path. +// +// # Delegation +// +// Entry lookup uses [parse.FindByID]. Blob detection +// and splitting are handled by [blob.Split]. File +// output uses [ctxIo.SafeWriteFile] with secret +// permissions. Display is routed through [pad]. package show diff --git a/internal/cli/pad/cmd/show/run.go b/internal/cli/pad/cmd/show/run.go index c28535c83..529bd1c42 100644 --- a/internal/cli/pad/cmd/show/run.go +++ b/internal/cli/pad/cmd/show/run.go @@ -16,6 +16,7 @@ import ( errFs "github.com/ActiveMemory/ctx/internal/err/fs" errPad "github.com/ActiveMemory/ctx/internal/err/pad" ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -29,6 +30,10 @@ import ( // Returns: // - error: Non-nil on invalid ID, read or write failure func Run(cmd *cobra.Command, id int, outPath string) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } entries, readErr := store.ReadEntriesWithIDs() if readErr != nil { return readErr diff --git a/internal/cli/pad/cmd/tag/doc.go b/internal/cli/pad/cmd/tag/doc.go index f56a6d27c..a02f2f716 100644 --- a/internal/cli/pad/cmd/tag/doc.go +++ b/internal/cli/pad/cmd/tag/doc.go @@ -4,9 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package tag implements the ctx pad tag subcommand. +// Package tag implements the "ctx pad tag" subcommand +// for listing tags found across scratchpad entries. // -// Key exports: [Cmd], [Run]. -// Follows the cmd/root + core taxonomy. -// Registered by the cmd parent command. +// # Behavior +// +// The command scans all scratchpad entries and +// extracts inline tags (e.g., #topic). It collects +// unique tags, counts how many entries each tag +// appears in, and displays them sorted alphabetically. +// +// When no tags are found, the command prints a notice +// and exits. When entries contain tags, each tag is +// printed with its occurrence count. +// +// # Flags +// +// --json Output the tag list as a JSON array +// of objects with "tag" and "count" +// fields instead of the default +// human-readable format. +// +// # Output +// +// Default mode prints one line per tag with the tag +// name and occurrence count. JSON mode prints a +// single JSON array to stdout, suitable for piping +// into jq or other tools. +// +// When no tags exist, prints a "no tags" notice +// regardless of output mode. +// +// # Delegation +// +// Tag extraction from individual entries is handled +// by [tag.Extract] in the core/tag package. Entry +// reading goes through [store.ReadEntries]. Output +// formatting is routed through [writePad]. package tag diff --git a/internal/cli/pad/cmd/tag/run.go b/internal/cli/pad/cmd/tag/run.go index da5fe866a..47f903476 100644 --- a/internal/cli/pad/cmd/tag/run.go +++ b/internal/cli/pad/cmd/tag/run.go @@ -14,6 +14,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/pad/core/store" "github.com/ActiveMemory/ctx/internal/cli/pad/core/tag" + "github.com/ActiveMemory/ctx/internal/rc" writePad "github.com/ActiveMemory/ctx/internal/write/pad" ) @@ -26,6 +27,10 @@ import ( // Returns: // - error: Non-nil on read failure or JSON marshal error func Run(cmd *cobra.Command, jsonOut bool) error { + if _, ctxErr := rc.RequireContextDir(); ctxErr != nil { + cmd.SilenceUsage = true + return ctxErr + } entries, err := store.ReadEntries() if err != nil { return err diff --git a/internal/cli/pad/core/add/doc.go b/internal/cli/pad/core/add/doc.go index 23b0246ac..217ac6443 100644 --- a/internal/cli/pad/core/add/doc.go +++ b/internal/cli/pad/core/add/doc.go @@ -1,12 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package add provides scratchpad entry append logic. +// Package add provides scratchpad entry creation logic +// with stable ID assignment. // -// Key exports: [Entry], [Blob]. -// Shared helpers used by sibling cmd/ packages. -// Used by core cmd/ packages. +// The "ctx pad add" command appends a new text or blob +// entry to the encrypted scratchpad. This package +// handles ID generation and entry construction without +// performing the final write. +// +// # Adding Text Entries +// +// [EntryWithID] loads the current scratchpad entries +// with their IDs via store.ReadEntriesWithIDs, computes +// the next available ID using parse.NextID, appends a +// new parse.Entry with the given text, and returns the +// updated slice together with the assigned ID. The +// caller (cmd layer) is responsible for encrypting and +// writing the result. +// +// # Adding Blob Entries +// +// [BlobWithID] reads a file from disk via SafeReadUserFile, +// validates that its size does not exceed MaxBlobSize, +// encodes it as a blob entry using blob.Make (base64 +// with a label prefix), and appends it with a stable +// ID. Like EntryWithID, it returns the updated entries +// and the new ID without writing. +// +// # ID Assignment +// +// Both functions use parse.NextID to find the smallest +// unused integer ID across existing entries. IDs are +// stable: deleting an entry does not reassign IDs to +// remaining entries. package add diff --git a/internal/cli/pad/core/blob/doc.go b/internal/cli/pad/core/blob/doc.go index 92178a9b0..7b9330264 100644 --- a/internal/cli/pad/core/blob/doc.go +++ b/internal/cli/pad/core/blob/doc.go @@ -1,12 +1,52 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package blob handles binary blob encoding and decoding within. +// Package blob handles binary blob encoding and +// decoding within scratchpad entries. // -// Key exports: [Contains], [Split], [Make], [DisplayEntry]. -// Shared helpers used by sibling cmd/ packages. -// Used by core cmd/ packages. +// Scratchpad entries can hold either plain text or +// binary file content. Binary content is stored as a +// base64-encoded string prefixed by a label and a blob +// separator token. This package provides the codec for +// that format. +// +// # Blob Format +// +// A blob entry is a single string in the form: +// +// resolves outside project root " -func OutsideRoot(dir, root string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrValidateContextOutsideRoot), dir, root, - ) +// - *NotFoundError: typed error for errors.As matching +func NotFound(path string) *NotFoundError { + return &NotFoundError{Dir: path} } // DirSymlink returns an error when .context/ is a symlink. // // Parameters: -// - dir: the context directory path +// - path: the context directory path // // Returns: -// - error: "context directory is a symlink" -func DirSymlink(dir string) error { +// - error: "context directory is a symlink" +func DirSymlink(path string) error { return fmt.Errorf( - desc.Text(text.DescKeyErrValidateContextDirSymlink), dir, + desc.Text(text.DescKeyErrValidateContextDirSymlink), path, ) } @@ -78,3 +215,50 @@ func FileSymlink(file string) error { desc.Text(text.DescKeyErrValidateContextFileSymlink), file, ) } + +// NotDeclared returns the standard "no context directory specified" +// error used by rc.RequireContextDir when CTX_DIR has not been +// declared. +// +// The returned message is tailored by how many .context/ candidates +// are visible from the caller's CWD, so users get a next-step hint +// specific to their situation: +// +// - zero candidates: suggest `ctx init`. +// - one candidate: name it as the likely target and suggest +// `eval "$(ctx activate)"`. +// - many candidates: list all of them and refer the user to +// `ctx activate` from a more specific cwd. +// +// The scan that produces candidates is read-only (rc.ScanCandidates) +// and never binds anything; resolution itself stays explicit. +// +// Parameters: +// - candidates: absolute paths of every visible .context/ +// directory, ordered innermost-first. Empty/nil when none. +// +// Returns: +// - error: a multi-line, actionable message ready to be returned +// from a Cobra Run function. +func NotDeclared(candidates []string) error { + switch len(candidates) { + case 0: + return errors.New(desc.Text(text.DescKeyErrContextNotDeclaredZero)) + case 1: + return fmt.Errorf( + desc.Text(text.DescKeyErrContextNotDeclaredOne), + candidates[0], + ) + default: + var b strings.Builder + for _, p := range candidates { + b.WriteString(token.Indent2) + b.WriteString(p) + b.WriteString(token.NewlineLF) + } + return fmt.Errorf( + desc.Text(text.DescKeyErrContextNotDeclaredMany), + strings.TrimRight(b.String(), token.NewlineLF), + ) + } +} diff --git a/internal/err/context/doc.go b/internal/err/context/doc.go index 3dea79470..1bf5413a0 100644 --- a/internal/err/context/doc.go +++ b/internal/err/context/doc.go @@ -4,9 +4,40 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package context provides error constructors for context directory. +// Package context defines the typed error constructors +// for .context/ directory validation. These errors +// fire during bootstrap when the CLI verifies that the +// context directory exists, is not a symlink, and +// resides within the project root. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NotFound], [OutsideRoot], [DirSymlink], [FileSymlink]. +// # Domain +// +// Errors fall into two categories: +// +// - **Not found**: the .context/ directory does +// not exist. The [NotFoundError] struct +// implements the error interface and supports +// errors.As matching. Constructor: [NotFound]. +// - **Security validation**: the directory or a +// file inside it is a symlink. Constructors: +// [DirSymlink], [FileSymlink]. +// +// # Typed Error: NotFoundError +// +// [NotFoundError] is the only typed struct in the +// err/ tree that carries a Dir field. This lets +// callers distinguish "directory missing" from +// other errors with errors.As and inspect which +// path was checked. +// +// # Wrapping Strategy +// +// Security validators return plain errors (no cause +// wrapping) because the failure is a policy +// violation, not an IO failure. All user-facing +// text is resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package context diff --git a/internal/err/crypto/doc.go b/internal/err/crypto/doc.go index 8a34e438c..e0119b499 100644 --- a/internal/err/crypto/doc.go +++ b/internal/err/crypto/doc.go @@ -1,13 +1,43 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package crypto provides error constructors for encryption and key management. +// Package crypto defines the **typed error constructors** +// returned by [internal/crypto] and its consumers +// ([internal/pad], [internal/notify]). Every encryption, +// decryption, and key-management failure flows through +// one of these constructors. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [LoadKey], [EncryptFailed], [DecryptFailed], +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors +// (`io.EOF`, `os.ErrNotExist`) when needed. +// +// # Public Surface +// +// Constructors (one per failure mode): +// [LoadKey], [EncryptFailed], [DecryptFailed], // [NoKeyAt], [SaveKey], [MkdirKeyDir]. +// +// # Why "NoKeyAt" Is Distinct from "LoadKey" +// +// "Key file does not exist yet" is the *normal* +// state on first use; consumers ([pad], [notify]) +// treat it as "generate one" rather than "fail". +// Other load failures (permission denied, wrong +// size) are real errors and surface through +// [LoadKey]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package crypto diff --git a/internal/err/date/doc.go b/internal/err/date/doc.go index b0e76ccb0..f0dbdcdac 100644 --- a/internal/err/date/doc.go +++ b/internal/err/date/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package date provides error constructors for date parsing and validation. +// Package date defines the typed error constructors +// for date parsing and validation. These errors fire +// when the user supplies a malformed date string to +// flags like --since or --until, or when a date value +// in context metadata fails to parse. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [InvalidValue], [Invalid]. +// # Domain +// +// Two constructors cover the entire surface: +// +// - [InvalidValue]: a standalone date string +// does not match the expected YYYY-MM-DD format. +// Used during metadata validation. +// - [Invalid]: a date flag value fails to parse. +// Wraps the underlying time.Parse error and +// includes the flag name for context. +// +// # Wrapping Strategy +// +// [Invalid] wraps its cause with fmt.Errorf %w so +// callers can inspect the underlying parse error. +// [InvalidValue] returns a plain error because +// there is no system cause to chain. All user- +// facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package date diff --git a/internal/err/doc.go b/internal/err/doc.go index ec869bcb2..91fcd3312 100644 --- a/internal/err/doc.go +++ b/internal/err/doc.go @@ -4,9 +4,42 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package err provides shared error constructors for CLI commands. +// Package err is the root of the typed-error tree for +// the ctx CLI. Each child package under err/ defines +// domain-scoped error constructors for a single +// subsystem (e.g. [err/backup], [err/config], +// [err/journal]). // -// Errors that appear in multiple CLI packages belong here. This -// centralizes error message wording, makes duplicates visible, and -// lays the groundwork for sentinel errors. +// # Design +// +// The err/ tree separates error construction from +// error handling. Constructors live here; renderers +// live in internal/write/*. This split gives three +// benefits: +// +// - **Stable categories**: adding a new error +// constructor is an explicit, reviewable change. +// - **Localized text**: all user-facing wording +// is looked up through [internal/assets/read/desc] +// so that error messages are consistent and +// centrally maintained. +// - **Wrapping**: every constructor that accepts a +// cause wraps it with fmt.Errorf %w, so callers +// can use errors.Is / errors.As against system +// errors (os.ErrNotExist, io.EOF, etc.). +// +// # Package Layout +// +// Each child package exports: +// +// - Pure constructor functions (no state, no IO). +// - Occasionally a sentinel error variable (e.g. +// [err/schema.ErrDrift]). +// - Occasionally a typed error struct (e.g. +// [err/context.NotFoundError]). +// +// # Concurrency +// +// All constructors are pure functions. Concurrent +// callers never race. package err diff --git a/internal/err/drift/doc.go b/internal/err/drift/doc.go index 2036c2809..47fece335 100644 --- a/internal/err/drift/doc.go +++ b/internal/err/drift/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package drift provides error constructors for drift detection. +// Package drift defines the typed error constructors +// for the drift detection subsystem. Drift detection +// scans context files for stale paths, broken +// cross-references, and constitution violations. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [Violations]. +// # Domain +// +// A single constructor covers the entire surface: +// +// - [Violations]: drift detection completed and +// found one or more violations. The CLI uses +// this as a non-zero exit signal after printing +// the violation report. +// +// This package is intentionally minimal. The drift +// scanner itself reports individual violations +// through the writer layer; this sentinel error +// only signals the aggregate outcome. +// +// # Wrapping Strategy +// +// [Violations] returns a plain errors.New value +// with no cause wrapping because the error +// represents a summary, not a single IO failure. +// All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructor. Concurrent callers never race. package drift diff --git a/internal/err/fmt/doc.go b/internal/err/fmt/doc.go index dea375a44..ba9789847 100644 --- a/internal/err/fmt/doc.go +++ b/internal/err/fmt/doc.go @@ -4,10 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package fmt provides error constructors for the fmt command. +// Package fmt defines the typed error constructors +// for the `ctx fmt` command, which normalizes +// whitespace, heading levels, and list markers +// across context files (TASKS.md, DECISIONS.md, +// LEARNINGS.md, CONVENTIONS.md). // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NoContextDir], [FileRead], [FileWrite], [NoFiles], -// [NeedsFormatting]. +// # Domain +// +// Errors fall into three categories: +// +// - **Missing context**: the context directory +// does not exist or contains no files. +// Constructors: [NoContextDir], [NoFiles]. +// - **File IO**: a context file could not be +// read or written during formatting. +// Constructors: [FileRead], [FileWrite]. +// - **Check mode**: the formatter ran in +// --check mode and found files that need +// formatting. Constructor: [NeedsFormatting]. +// +// # Wrapping Strategy +// +// IO constructors ([FileRead], [FileWrite]) wrap +// their cause with fmt.Errorf %w so callers can +// errors.Is against system errors. Pure validation +// constructors return plain errors. All user-facing +// text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package fmt diff --git a/internal/err/fs/doc.go b/internal/err/fs/doc.go index 73958540e..8e35d0f86 100644 --- a/internal/err/fs/doc.go +++ b/internal/err/fs/doc.go @@ -1,13 +1,43 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package fs provides error constructors for filesystem operations. +// Package fs defines the **typed error constructors** +// for filesystem-level operations every other ctx +// package eventually performs: directory creation, +// reading, writing, amending. The package is the +// lowest level of the typed-error layer. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [Mkdir], [ReadDir], [DirNotFound], +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is(err, os.ErrNotExist)` against +// system errors when needed. +// +// # Public Surface +// +// Constructors: [Mkdir], [ReadDir], [DirNotFound], // [FileWrite], [FileRead], [FileAmend]. +// +// # When to Use [DirNotFound] vs [ReadDir] +// +// [DirNotFound] is for the actionable case "the +// directory the user expects to exist does not"; +// [ReadDir] wraps the underlying generic read +// failure (permission denied, IO error, etc.). +// The CLI surfaces them differently: the former +// suggests `ctx init`, the latter suggests +// checking permissions. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package fs diff --git a/internal/err/fs/fs.go b/internal/err/fs/fs.go index 6a718fb42..90752c320 100644 --- a/internal/err/fs/fs.go +++ b/internal/err/fs/fs.go @@ -206,20 +206,6 @@ func CreateDir(dir string, cause error) error { ) } -// BoundaryViolation wraps a boundary validation error with a hint -// to use --allow-outside-cwd. -// -// Parameters: -// - cause: the underlying validation error -// -// Returns: -// - error: "\nUse --allow-outside-cwd to override this check" -func BoundaryViolation(cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrFsBoundaryViolation), cause, - ) -} - // ReadFile wraps a file read failure. // // Parameters: diff --git a/internal/err/git/doc.go b/internal/err/git/doc.go index f1d6d8e4e..f4b407289 100644 --- a/internal/err/git/doc.go +++ b/internal/err/git/doc.go @@ -4,9 +4,32 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package git provides error constructors for git repository operations. +// Package git defines the typed error constructors +// for git repository detection and interaction. +// These errors fire when the CLI needs a git +// repository but either git is not installed or the +// working directory is not inside a repository. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NotFound], [NotInRepo]. +// # Domain +// +// Two constructors cover the entire surface: +// +// - [NotFound]: the git binary is not on PATH. +// Returns a plain error with installation +// guidance loaded from the assets catalog. +// - [NotInRepo]: git rev-parse failed, meaning +// the current directory is not inside a git +// repository. Wraps the underlying exec error. +// +// # Wrapping Strategy +// +// [NotInRepo] wraps its cause with fmt.Errorf %w +// so callers can inspect the exec failure. +// [NotFound] returns a plain errors.New value. +// All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package git diff --git a/internal/err/http/doc.go b/internal/err/http/doc.go index 279262d9e..3dd691a74 100644 --- a/internal/err/http/doc.go +++ b/internal/err/http/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package http provides error constructors for HTTP client safety checks. +// Package http defines the typed error constructors +// for HTTP client safety checks. These errors fire +// when the CLI validates URLs before making outbound +// requests, enforcing scheme restrictions and +// redirect limits. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [UnsafeURLScheme], [ParseURL], [TooManyRedirects]. +// # Domain +// +// Three constructors cover the entire surface: +// +// - [UnsafeURLScheme]: a URL uses a scheme +// other than http or https. This is a security +// boundary that prevents file://, ftp://, or +// other protocol handlers from being invoked. +// - [ParseURL]: a URL string failed to parse. +// Wraps the underlying url.Parse error. +// - [TooManyRedirects]: an HTTP response chain +// exceeded the configured redirect limit. +// +// # Wrapping Strategy +// +// [ParseURL] wraps its cause with fmt.Errorf %w. +// The other two return plain errors because they +// represent policy violations, not IO failures. +// All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package http diff --git a/internal/err/hub/doc.go b/internal/err/hub/doc.go index 1d9a6f906..cd8dc5d14 100644 --- a/internal/err/hub/doc.go +++ b/internal/err/hub/doc.go @@ -4,9 +4,37 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package hub provides error constructors for the hub subsystem. +// Package hub defines the typed error constructors +// for the hub subsystem, the background daemon +// that coordinates multi-project context sharing +// and peer synchronization. // -// Key exports: [GenerateToken], [InternalErr]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Domain +// +// Errors fall into three categories: +// +// - **Token generation**: the hub failed to +// generate a cryptographic token for peer +// authentication. Constructor: [GenerateToken]. +// - **Internal errors**: a catch-all wrapper +// for unexpected failures inside the hub +// server. Constructor: [InternalErr]. +// - **Registration**: a project is already +// registered with the hub, or a peer action +// is unrecognized. Constructors: +// [DuplicateProject], [InvalidPeerAction]. +// +// # Wrapping Strategy +// +// [GenerateToken] and [InternalErr] wrap their +// cause with fmt.Errorf %w so callers can inspect +// the underlying crypto/rand or server error. +// [DuplicateProject] and [InvalidPeerAction] +// return plain formatted errors. All user-facing +// text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package hub diff --git a/internal/err/hub/hub.go b/internal/err/hub/hub.go index 2498acb5a..222e16033 100644 --- a/internal/err/hub/hub.go +++ b/internal/err/hub/hub.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package hub provides error constructors for the hub subsystem. package hub import ( diff --git a/internal/err/initialize/doc.go b/internal/err/initialize/doc.go index c51c0180b..3c1ee4004 100644 --- a/internal/err/initialize/doc.go +++ b/internal/err/initialize/doc.go @@ -4,11 +4,43 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package initialize provides error constructors for context initialization. +// Package initialize defines the typed error +// constructors for the `ctx init` command and +// bootstrap sequence. These errors fire when +// creating the .context/ directory, deploying +// templates, or verifying preconditions. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NotInitialized], [ContextNotInitialized], -// [DetectReferenceTime], [DeployList], [DeployRead], -// [HomeDir], [ReadProjectReadme], [ReadTemplate]. +// # Domain +// +// Errors fall into four categories: +// +// - **Not initialized**: the project has no +// .context/ directory. Constructors: +// [NotInitialized], [ContextNotInitialized]. +// - **Environment**: the home directory cannot +// be resolved or ctx is not on PATH. +// Constructors: [HomeDir], [CtxNotInPath]. +// - **Template IO**: an embedded template or +// project README could not be read, or the +// Makefile could not be created. +// Constructors: [ReadTemplate], +// [ReadProjectReadme], [CreateMakefile]. +// - **Deployment**: listing or reading +// embedded files during template deployment +// failed. Constructors: [DeployList], +// [DeployRead], [DetectReferenceTime]. +// +// # Wrapping Strategy +// +// IO constructors wrap their cause with +// fmt.Errorf %w so callers can errors.Is against +// system errors. [NotInitialized] and +// [ContextNotInitialized] return plain errors +// because they signal a missing precondition, +// not an IO failure. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package initialize diff --git a/internal/err/journal/doc.go b/internal/err/journal/doc.go index db63ad535..cd9523a13 100644 --- a/internal/err/journal/doc.go +++ b/internal/err/journal/doc.go @@ -1,13 +1,34 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package journal provides error constructors for journal pipeline operations. +// Package journal defines the **typed error constructors** +// returned by the journal pipeline: state-file load +// and save failures, missing journal directories, and +// related operational errors that reach the user +// through `ctx journal *` subcommands. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [LoadState], [SaveState], [LoadStateErr], -// [LoadStateFailed], [SaveStateFailed], [NoDir]. +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors when +// needed. +// +// # Public Surface +// +// Constructors: [LoadState], [SaveState], +// [LoadStateErr], [LoadStateFailed], +// [SaveStateFailed], [NoDir]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package journal diff --git a/internal/err/mcp/doc.go b/internal/err/mcp/doc.go index 52091b647..437df9131 100644 --- a/internal/err/mcp/doc.go +++ b/internal/err/mcp/doc.go @@ -4,10 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mcp provides error constructors for MCP server operations. +// Package mcp defines the typed error constructors +// for the MCP (Model Context Protocol) server. These +// errors fire when MCP tool calls are missing +// required parameters, when the context directory +// cannot be read during search, or when an +// unrecognized session event type is received. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [QueryRequired], [SearchRead], [TypeContentRequired], -// [UnknownEventType]. +// # Domain +// +// Errors fall into two categories: +// +// - **Validation**: a required field is missing +// from a tool call payload. Constructors: +// [TypeContentRequired], [QueryRequired], +// [UnknownEventType]. +// - **Search IO**: the context directory could +// not be read during a search operation. +// Constructor: [SearchRead]. +// +// # Wrapping Strategy +// +// [SearchRead] wraps its cause with fmt.Errorf %w +// so callers can inspect the underlying read error. +// Validation constructors return plain errors. +// All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package mcp diff --git a/internal/err/memory/doc.go b/internal/err/memory/doc.go index ddf034e7c..0c98b590b 100644 --- a/internal/err/memory/doc.go +++ b/internal/err/memory/doc.go @@ -1,13 +1,43 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package memory provides error constructors for memory bridge operations. +// Package memory defines the **typed error constructors** +// returned by [internal/memory] (the Claude Code auto- +// memory bridge) for discovery, diff, mirror, and +// publish failures. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NotFound], [DiscoverFailed], [DiffFailed], -// [SelectContentFailed], [PublishFailed], [Read]. +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors when +// needed. +// +// # Public Surface +// +// Constructors: [NotFound], [DiscoverFailed], +// [DiffFailed], [SelectContentFailed], +// [PublishFailed], [Read]. +// +// # Why "NotFound" Is Distinct from "DiscoverFailed" +// +// "Auto memory does not exist for this project" +// ([NotFound]) is a normal state Claude Code +// returns for projects with no recorded memory; +// the CLI surfaces it as "no memory yet, run a +// session first". Discover failures (path +// resolution errors, permission denied) are real +// errors that need user attention. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package memory diff --git a/internal/err/notify/doc.go b/internal/err/notify/doc.go index 28043039f..ab76bdaf6 100644 --- a/internal/err/notify/doc.go +++ b/internal/err/notify/doc.go @@ -4,10 +4,35 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package notify provides error constructors for webhook notifications. +// Package notify defines the typed error constructors +// for the webhook notification subsystem. These +// errors fire when configuring, persisting, or +// sending webhook notifications triggered by context +// changes. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [WebhookEmpty], [SaveWebhook], -// [LoadWebhook], [MarshalPayload], [SendNotification]. +// # Domain +// +// Errors fall into three categories: +// +// - **Validation**: the webhook URL is blank. +// Constructor: [WebhookEmpty]. +// - **Persistence**: saving or loading the +// encrypted webhook configuration failed. +// Constructors: [SaveWebhook], [LoadWebhook]. +// - **Delivery**: marshaling the JSON payload +// or sending the HTTP request failed. +// Constructors: [MarshalPayload], +// [SendNotification]. +// +// # Wrapping Strategy +// +// IO and delivery constructors wrap their cause +// with fmt.Errorf %w so callers can inspect the +// underlying error. [WebhookEmpty] returns a plain +// errors.New value. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package notify diff --git a/internal/err/pad/doc.go b/internal/err/pad/doc.go index a8be47887..249f4f98f 100644 --- a/internal/err/pad/doc.go +++ b/internal/err/pad/doc.go @@ -4,10 +4,43 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package pad provides error constructors for encrypted scratchpad operations. +// Package pad defines the typed error constructors +// for the encrypted scratchpad (`ctx pad`). These +// errors fire during entry selection, editing mode +// validation, blob operations, merge conflict +// resolution, and scratchpad reads. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [EntryRange], [EditBlobTextConflict], -// [EditTextConflict], [EditNoMode]. +// # Domain +// +// Errors fall into four categories: +// +// - **Entry selection**: the requested entry +// index is out of range, not found by ID, or +// not a valid number. Constructors: +// [EntryRange], [EntryNotFound], +// [InvalidIndex]. +// - **Editing modes**: mutually exclusive edit +// flags were combined, or no mode was given. +// Constructors: [EditBlobTextConflict], +// [EditTextConflict], [EditNoMode]. +// - **Blob operations**: a blob-only flag was +// used on a text entry, or a file exceeds the +// size limit. Constructors: [NotBlobEntry], +// [OutFlagRequiresBlob], [FileTooLarge]. +// - **Conflict resolution**: the scratchpad is +// not encrypted, or no conflict files exist. +// Constructors: [ResolveNotEncrypted], +// [NoConflictFiles], [Read]. +// +// # Wrapping Strategy +// +// [Read] wraps its cause with fmt.Errorf %w. +// Validation constructors return plain errors +// because the failures are policy violations, not +// IO errors. All user-facing text is resolved +// through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package pad diff --git a/internal/err/parser/doc.go b/internal/err/parser/doc.go index ca45bb8c3..396e0d504 100644 --- a/internal/err/parser/doc.go +++ b/internal/err/parser/doc.go @@ -4,10 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parser provides error constructors for session transcript parsing. +// Package parser defines the typed error constructors +// for session transcript parsing. These errors fire +// when scanning, opening, or parsing AI tool session +// files (Claude Code JSONL, Aider markdown, etc.) +// and when validating frontmatter delimiters. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [ReadFile], [OpenFile], [NoMatch], -// [WalkDir], [FileError], [ScanFile]. +// # Domain +// +// Errors fall into three categories: +// +// - **Frontmatter**: a session file is missing +// its opening or closing --- delimiter. +// Constructors: [MissingOpenDelim], +// [MissingCloseDelim]. +// - **File IO**: a session file could not be +// read, opened, scanned, or walked. +// Constructors: [ReadFile], [OpenFile], +// [ScanFile], [WalkDir]. +// - **Parse**: no parser matches a file, JSON +// unmarshaling failed, or a per-file parse +// error occurred. Constructors: [NoMatch], +// [Unmarshal], [FileError], [ParseFile]. +// +// # Wrapping Strategy +// +// IO and parse constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. Frontmatter constructors +// return plain errors. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package parser diff --git a/internal/err/prompt/doc.go b/internal/err/prompt/doc.go index 32b14f36e..fbc56cc5c 100644 --- a/internal/err/prompt/doc.go +++ b/internal/err/prompt/doc.go @@ -4,11 +4,35 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package prompt provides error constructors for prompt template operations. +// Package prompt defines the typed error constructors +// for prompt template operations. These errors fire +// when the CLI loads, lists, or validates embedded +// prompt templates used to generate CLAUDE.md and +// other tool-specific instruction files. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NoTemplate], [ListTemplates], -// [ReadTemplate], [TemplateMissingMarkers], -// [MarkerNotFound]. +// # Domain +// +// Errors fall into two categories: +// +// - **Template IO**: an embedded template could +// not be found, listed, or read. +// Constructors: [NoTemplate], [ListTemplates], +// [ReadTemplate]. +// - **Template validation**: a template is +// missing required section markers (e.g. ctx +// or prompt markers). Constructors: +// [TemplateMissingMarkers], [MarkerNotFound]. +// +// # Wrapping Strategy +// +// IO constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. Validation constructors +// return plain formatted errors. All user-facing +// text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package prompt diff --git a/internal/err/reminder/doc.go b/internal/err/reminder/doc.go index d4a787857..998df22b4 100644 --- a/internal/err/reminder/doc.go +++ b/internal/err/reminder/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package reminder provides error constructors for session reminder operations. +// Package reminder defines the typed error +// constructors for session reminder operations. +// These errors fire when reading, parsing, or +// looking up reminders stored in the context +// directory. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [Read], [Parse], [InvalidID], [NotFound], [IDRequired]. +// # Domain +// +// Errors fall into two categories: +// +// - **File IO**: the reminders file could not +// be read or parsed. Constructors: [Read], +// [Parse]. +// - **Lookup**: no reminder matches the given +// ID, or no ID was provided. Constructors: +// [NotFound], [IDRequired]. +// +// # Wrapping Strategy +// +// [Read] and [Parse] wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. [NotFound] returns a plain +// formatted error. [IDRequired] returns a plain +// errors.New value. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package reminder diff --git a/internal/err/schema/doc.go b/internal/err/schema/doc.go index d330d647f..db78de370 100644 --- a/internal/err/schema/doc.go +++ b/internal/err/schema/doc.go @@ -4,11 +4,37 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package schema provides error constructors for schema -// validation. +// Package schema defines the typed error constructors +// and sentinel errors for schema validation. The +// primary export is the [ErrDrift] sentinel, which +// signals that JSONL schema drift was detected. // -// The sentinel ErrDrift is returned by the schema check command -// and the import integration when JSONL drift is detected. It -// signals a non-zero exit code without halting operation — drift -// warnings are informational, never blocking. +// # Domain +// +// A single sentinel and its constructor cover the +// entire surface: +// +// - [ErrDrift]: a package-level sentinel error +// variable. Callers can match it with +// errors.Is(err, schema.ErrDrift). +// - [Drift]: convenience constructor that +// returns the ErrDrift sentinel. +// +// The schema check command and the journal import +// pipeline both return ErrDrift when JSONL fields +// do not match the expected schema. Drift warnings +// are informational; they trigger a non-zero +// exit code but never block operations. +// +// # Wrapping Strategy +// +// ErrDrift is a plain errors.New sentinel with +// no cause wrapping. Its message text comes from +// [internal/config/schema.ErrMsgDrift]. +// +// # Concurrency +// +// The sentinel is a package-level variable +// initialized at import time. Safe for concurrent +// use. package schema diff --git a/internal/err/serve/doc.go b/internal/err/serve/doc.go index 2cc3806ec..a02a5c200 100644 --- a/internal/err/serve/doc.go +++ b/internal/err/serve/doc.go @@ -4,10 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package serve provides error constructors for the serve -// subsystem, including hub daemon management. +// Package serve defines the typed error constructors +// for the serve subsystem, which manages the hub +// daemon lifecycle: PID file reading, process +// lookups, and graceful shutdown. // -// Key exports: [NoRunningHub], [InvalidPID], [Kill]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Domain +// +// Three constructors cover the entire surface: +// +// - [NoRunningHub]: the PID file could not be +// read, meaning no hub daemon is running. +// Wraps the underlying read error. +// - [InvalidPID]: the PID file contents could +// not be parsed as a valid process ID. +// Wraps the underlying parse error. +// - [Kill]: sending a signal to the daemon +// process failed. Wraps the underlying +// os.Process.Kill error and includes the PID. +// +// # Wrapping Strategy +// +// All three constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package serve diff --git a/internal/err/session/doc.go b/internal/err/session/doc.go index bfcf68814..79da0e83f 100644 --- a/internal/err/session/doc.go +++ b/internal/err/session/doc.go @@ -4,10 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package session provides error constructors for session lookup and selection. +// Package session defines the typed error constructors +// for session lookup and selection. These errors fire +// when scanning for AI tool sessions, resolving a +// session by ID or slug, or validating flag +// combinations on session subcommands. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [Find], [NotFound], [NoneFound], -// [AmbiguousQuery], [IDRequired], [AllWithID]. +// # Domain +// +// Errors fall into three categories: +// +// - **Scan failures**: the session scanner could +// not enumerate available sessions. +// Constructor: [Find]. +// - **Lookup**: no session matches the query, +// no sessions exist at all, or the query is +// ambiguous. Constructors: [NotFound], +// [NoneFound], [AmbiguousQuery], [IDRequired]. +// - **Flag validation**: mutually exclusive +// flags were combined (--all with a session ID +// or pattern, or an invalid --type value). +// Constructors: [AllWithID], [AllWithPattern], +// [EventInvalidType]. +// +// # Wrapping Strategy +// +// [Find] wraps its cause with fmt.Errorf %w so +// callers can inspect the underlying parser error. +// Lookup and validation constructors return plain +// errors. All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package session diff --git a/internal/err/setup/doc.go b/internal/err/setup/doc.go index f6bd4147a..ecd8a7363 100644 --- a/internal/err/setup/doc.go +++ b/internal/err/setup/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package setup provides error constructors for tool setup operations. +// Package setup defines the typed error constructors +// for the tool setup subsystem. These errors fire +// when `ctx setup` creates directories, writes +// configuration files, or syncs steering files for +// a specific AI tool (Claude Code, Aider, etc.). // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [CreateDir], [MarshalConfig], [WriteFile], [SyncSteering]. +// # Domain +// +// Four constructors cover the entire surface: +// +// - [CreateDir]: a setup directory could not be +// created. Wraps the underlying OS error. +// - [MarshalConfig]: the MCP configuration +// JSON could not be marshaled. +// - [WriteFile]: a setup file could not be +// written to disk. +// - [SyncSteering]: steering file sync failed +// during the setup sequence. +// +// # Wrapping Strategy +// +// All four constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package setup diff --git a/internal/err/site/doc.go b/internal/err/site/doc.go index 99367c464..28b2ea8a6 100644 --- a/internal/err/site/doc.go +++ b/internal/err/site/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package site provides error constructors for static site generation. +// Package site defines the typed error constructors +// for the static site generation subsystem. These +// errors fire when the `ctx site` command validates +// the zensical configuration, marshals Atom feeds, +// or checks for the zensical binary. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [NoConfig], [MarshalFeed], [ZensicalNotFound]. +// # Domain +// +// Three constructors cover the entire surface: +// +// - [NoConfig]: the zensical.toml configuration +// file is missing from the expected directory. +// - [MarshalFeed]: the Atom XML feed could not +// be marshaled from journal entries. Wraps the +// underlying encoding/xml error. +// - [ZensicalNotFound]: the zensical binary is +// not installed. Returns a plain error with +// installation instructions. +// +// # Wrapping Strategy +// +// [MarshalFeed] wraps its cause with fmt.Errorf %w. +// [NoConfig] returns a plain formatted error. +// [ZensicalNotFound] returns a plain errors.New +// value. All user-facing text is resolved through +// [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package site diff --git a/internal/err/skill/doc.go b/internal/err/skill/doc.go index ace31f991..968877b54 100644 --- a/internal/err/skill/doc.go +++ b/internal/err/skill/doc.go @@ -1,16 +1,39 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package skill provides error constructors for skill operations. +// Package skill defines the **typed error constructors** +// returned by [internal/skill] (the install / list / +// load / remove engine) and its CLI consumers. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [CreateDest], [Install], [InvalidManifest], -// [InvalidYAML], [List], [Load], [MissingClosingDelimiter], -// [MissingName], [MissingOpeningDelimiter], [NotFound], -// [NotValidDir], [NotValidSource], [Read], [ReadDir], -// [Remove], [SkillLoad]. +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors when +// needed. +// +// # Public Surface +// +// Constructors fall into three groups: +// +// - **Install / Remove**: [CreateDest], +// [Install], [NotFound], [Remove], [List], +// [ReadDir], [NotValidDir], [NotValidSource]. +// - **Load / Read**: [Load], [SkillLoad], +// [Read], [InvalidYAML]. +// - **Manifest validation**: [InvalidManifest], +// [MissingName], [MissingClosingDelimiter], +// [MissingOpeningDelimiter]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package skill diff --git a/internal/err/state/doc.go b/internal/err/state/doc.go index fc14fd963..a1c336f43 100644 --- a/internal/err/state/doc.go +++ b/internal/err/state/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package state provides error constructors for runtime state persistence. +// Package state defines the typed error constructors +// for runtime state persistence. These errors fire +// when ctx reads, loads, or saves its internal state +// files (.context/.state/) which track things like +// last-run timestamps, import cursors, and feature +// flags. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [ReadingDir], [Load], [Save]. +// # Domain +// +// Three constructors cover the entire surface: +// +// - [ReadingDir]: the state directory could not +// be read. Wraps the underlying OS error. +// - [Load]: a state file could not be loaded +// (read + unmarshal). Wraps the underlying +// error. +// - [Save]: a state file could not be saved +// (marshal + write). Wraps the underlying +// error. +// +// # Wrapping Strategy +// +// All three constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package state diff --git a/internal/err/steering/doc.go b/internal/err/steering/doc.go index edf2ba343..d1ab3f769 100644 --- a/internal/err/steering/doc.go +++ b/internal/err/steering/doc.go @@ -1,17 +1,52 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package steering provides error constructors for steering operations. -// -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [ComputeRelPath], [ContextDirMissing], [CreateDir], -// [FileExists], [InvalidYAML], [MissingClosingDelimiter], -// [MissingOpeningDelimiter], [NoTool], [OutputEscapesRoot], -// [Parse], [ReadDir], [ReadFile], [ResolveOutput], -// [ResolveRoot], [SyncAll], [SyncName], [UnsupportedTool], -// [WriteFile], [WriteSteeringFile], [WriteInitFile]. +// Package steering defines the **typed error +// constructors** returned by [internal/steering]: +// frontmatter parse failures, sync target validation, +// path-boundary violations, and missing-tool errors. +// +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors when +// needed. +// +// # Public Surface +// +// Constructors fall into three groups: +// +// - **Parse / IO**: [Parse], [InvalidYAML], +// [MissingClosingDelimiter], +// [MissingOpeningDelimiter], [ReadFile], +// [ReadDir], [WriteFile], [WriteSteeringFile], +// [WriteInitFile]. +// - **Sync**: [SyncAll], [SyncName], +// [UnsupportedTool], [NoTool], +// [ResolveOutput], [ResolveRoot], +// [OutputEscapesRoot], [ComputeRelPath], +// [CreateDir], [FileExists]. +// - **Context**: [ContextDirMissing]. +// +// # The Boundary Check +// +// [OutputEscapesRoot] is fired when a sync +// target's resolved absolute path would land +// outside the project root, a defensive check +// that prevents a malicious or buggy steering +// file from writing to arbitrary filesystem +// locations. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package steering diff --git a/internal/err/task/doc.go b/internal/err/task/doc.go index d5a0e5ef3..c4f8a3d74 100644 --- a/internal/err/task/doc.go +++ b/internal/err/task/doc.go @@ -4,10 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package task provides error constructors for task file operations. +// Package task defines the typed error constructors +// for TASKS.md file operations. These errors fire +// when reading, writing, querying, or archiving +// tasks in the context directory. // -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [FileNotFound], [FileRead], [FileWrite], -// [MultipleMatches], [NotFound], [NoneCompleted]. +// # Domain +// +// Errors fall into three categories: +// +// - **File IO**: TASKS.md does not exist, or +// reading/writing it failed. Constructors: +// [FileNotFound], [FileRead], [FileWrite], +// [SnapshotWrite]. +// - **Query**: no task matches the search +// query, multiple tasks match, or no task was +// specified. Constructors: [NotFound], +// [MultipleMatches], [NoneSpecified], +// [NoMatch]. +// - **Archive**: there are no completed tasks +// to archive. Constructor: [NoneCompleted]. +// +// # Wrapping Strategy +// +// IO constructors ([FileRead], [FileWrite], +// [SnapshotWrite]) wrap their cause with +// fmt.Errorf %w. Query and validation constructors +// return plain errors. All user-facing text is +// resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package task diff --git a/internal/err/trace/doc.go b/internal/err/trace/doc.go index d80a45314..dd0f1d2f4 100644 --- a/internal/err/trace/doc.go +++ b/internal/err/trace/doc.go @@ -4,12 +4,40 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trace provides error constructors for the -// trace command. -// -// Covers git log failures, commit resolution errors, -// and history/override write failures. -// Key exports: [GitLog], [NoteRequired], -// [ResolveCommit], [UnknownAction], [WriteHistory], -// [WriteOverride]. +// Package trace defines the typed error constructors +// for the `ctx trace` command, which manages git +// commit annotations, hook installation, and history +// recording for context-aware commit messages. +// +// # Domain +// +// Errors fall into four categories: +// +// - **Git operations**: git rev-parse or git +// log failed, or a commit ref could not be +// resolved. Constructors: [GitDir], [GitLog], +// [ResolveCommit]. +// - **Hook management**: a non-ctx hook already +// exists, or writing the hook script failed. +// Constructors: [HookExists], [HookWrite]. +// - **History / override IO**: writing the +// trace history or override file failed. +// Constructors: [WriteHistory], +// [WriteOverride]. +// - **Validation**: the --note flag is missing, +// or an unknown action was provided. +// Constructors: [NoteRequired], +// [UnknownAction]. +// +// # Wrapping Strategy +// +// IO constructors wrap their cause with +// fmt.Errorf %w so callers can inspect the +// underlying error. [NoteRequired] returns a +// plain errors.New value. All user-facing text +// is resolved through [internal/assets/read/desc]. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package trace diff --git a/internal/err/trigger/doc.go b/internal/err/trigger/doc.go index 30cc273aa..a9344dcb3 100644 --- a/internal/err/trigger/doc.go +++ b/internal/err/trigger/doc.go @@ -1,17 +1,56 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trigger provides error constructors for trigger operations. -// -// Error constructors return structured errors with context for -// user-facing messages routed through internal/assets text lookups. -// Exports: [Chmod], [CreateDir], [DiscoverFailed], -// [EmbeddedTemplateNotFound], [Exit], [InvalidJSONOutput], -// [InvalidType], [MarshalInput], [NotFound], [OverrideExists], -// [RemoveOverride], [ResolveHooksDir], [ResolvePath], -// [ScriptExists], [Stat], [StatPath], [Timeout], [Unknown], -// [UnknownVariant], [Validate], [WriteScript], [WriteOverride]. +// Package trigger defines the **typed error +// constructors** returned by [internal/trigger]: every +// validation, discovery, and execution failure the +// trigger lifecycle can produce. +// +// # Why Typed Errors +// +// - **Stability**: error categories are part of +// the public API. +// - **Routing**: write-side packages map error +// types to localized text via +// [internal/assets/read/desc]. +// - **Wrapping**: constructors wrap the +// underlying cause via `%w` so callers can +// `errors.Is` against system errors when +// needed. +// +// # Public Surface +// +// Constructors fall into four groups: +// +// - **Validation**: [Validate], [InvalidType], +// [Symlink] (boundary check), +// [ResolveHooksDir], [ResolvePath], [Boundary], +// [Stat], [StatPath], [NotFound], +// [ScriptExists]. +// - **Discovery / Lifecycle**: [DiscoverFailed], +// [Chmod], [CreateDir], [Unknown], +// [UnknownVariant]. +// - **Override Management**: [OverrideExists], +// [WriteOverride], [RemoveOverride], +// [EmbeddedTemplateNotFound], [WriteScript]. +// - **Execution**: [Exit] (non-zero hook +// exit), [Timeout] (hook ran past the +// configured timeout), [InvalidJSONOutput] +// (hook stdout failed to parse), +// [MarshalInput] (input encoding failed). +// +// # Why So Many Constructors +// +// Triggers run **untrusted code** at a security- +// sensitive boundary. Every distinct failure mode +// gets its own typed error so the user-facing +// message is precise about *which* invariant the +// script violated and *what* to do about it. +// +// # Concurrency +// +// Pure constructors. Concurrent callers never race. package trigger diff --git a/internal/exec/daemon/doc.go b/internal/exec/daemon/doc.go index 9d5157eaa..de99620a5 100644 --- a/internal/exec/daemon/doc.go +++ b/internal/exec/daemon/doc.go @@ -4,10 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package daemon provides process management for background -// hub server operation. +// Package daemon provides process management for +// background hub server operation. // -// Key exports: [Start]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Starting a Daemon +// +// Start launches a detached background process with +// the given binary path and arguments. It returns the +// PID of the started process. The child process is +// fully detached from the parent session so it +// survives when the parent shell exits. +// +// pid, err := daemon.Start("/usr/bin/ctx", args) +// +// # Platform Detachment +// +// The detachAttrs function returns platform-specific +// SysProcAttr values: +// +// - Unix: sets Setsid to create a new session, +// making the child a session leader that is +// independent of the parent terminal. +// - Windows: sets CREATE_NEW_PROCESS_GROUP to +// disable CTRL+C propagation and HideWindow +// to suppress the console window flash. +// +// # Process Isolation +// +// The started process has nil stdout and stderr, +// ensuring it does not hold open the parent's file +// descriptors. This prevents the parent from hanging +// on exit while waiting for the child to close its +// output streams. package daemon diff --git a/internal/exec/gio/doc.go b/internal/exec/gio/doc.go deleted file mode 100644 index df7851132..000000000 --- a/internal/exec/gio/doc.go +++ /dev/null @@ -1,14 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -// Package gio wraps GNOME GIO command execution. -// -// Used for mounting SMB shares via gio mount during backup -// operations. The mount target URL comes from user configuration. -// -// Key exports: [Mount]. -// Part of the exec subsystem. -package gio diff --git a/internal/exec/gio/mount.go b/internal/exec/gio/mount.go deleted file mode 100644 index 12ca22790..000000000 --- a/internal/exec/gio/mount.go +++ /dev/null @@ -1,27 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package gio - -import ( - "os/exec" - - "github.com/ActiveMemory/ctx/internal/config/archive" -) - -// Mount runs `gio mount` with the given URL. -// -// Parameters: -// - url: mount target (e.g. smb://host/share) -// -// Returns: -// - error: non-nil if gio is not found or the mount fails -func Mount(url string) error { - //nolint:gosec // G204: url is from user config - return exec.Command( - archive.GioBinary, archive.GioMount, url, - ).Run() -} diff --git a/internal/exec/git/doc.go b/internal/exec/git/doc.go index f5fbb3983..5ac26adc5 100644 --- a/internal/exec/git/doc.go +++ b/internal/exec/git/doc.go @@ -4,12 +4,47 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package git wraps git command execution behind typed functions. +// Package git wraps git command execution behind +// typed functions. // -// All exec.Command calls for git are centralized here. LookPath -// is checked once per call. Callers never import os/exec directly. +// All exec.Command calls for git are centralized +// here. LookPath is checked on every call. Callers +// never import os/exec directly for git operations. // -// Key exports: [Run], [Root], [RemoteURL], [LogSince], -// [LastCommitMessage], [DiffTreeHead]. -// Part of the exec subsystem. +// # Running Commands +// +// Run executes a git command with the given arguments +// and returns raw stdout output. +// +// out, err := git.Run("log", "--oneline") +// +// # Repository Queries +// +// Root returns the repository root directory for the +// current working directory. RemoteURL returns the +// origin remote URL for a given directory path (best +// effort, returns empty on error). +// +// root, err := git.Root() +// url := git.RemoteURL("/path/to/repo") +// +// # Log and Diff +// +// LogSince runs git log with a --since time filter. +// LastCommitMessage returns the full message of the +// most recent commit. DiffTreeHead lists files +// changed in HEAD. +// +// out, err := git.LogSince(since, "--oneline") +// msg, err := git.LastCommitMessage() +// files, err := git.DiffTreeHead() +// +// # HEAD Queries +// +// ShortHead returns the abbreviated commit hash for +// HEAD. CurrentBranch returns the current branch +// name (empty if detached or on error). +// +// hash := git.ShortHead() +// branch := git.CurrentBranch() package git diff --git a/internal/exec/sysinfo/doc.go b/internal/exec/sysinfo/doc.go index 967a693f3..7b6edadb1 100644 --- a/internal/exec/sysinfo/doc.go +++ b/internal/exec/sysinfo/doc.go @@ -4,14 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sysinfo provides helpers for executing system information -// commands (sysctl, vm_stat) used by the sysinfo collector. +// Package sysinfo provides helpers for executing +// system information commands used by the sysinfo +// collector. // -// This package centralizes os/exec calls for platform-specific -// system queries, keeping nolint:gosec annotations in one place. -// The commands executed are fixed strings with no user input, -// but are routed through internal/exec/ to satisfy the project -// convention of no exec.Command calls outside this tree. +// # macOS Commands // -// Key exports: [Sysctl], [VMStat]. +// Sysctl runs the sysctl command with the given +// arguments and returns raw stdout output. This is +// used to query hardware parameters like memory +// size and CPU core count. +// +// out, err := sysinfo.Sysctl("-n", "hw.memsize") +// +// VMStat runs the vm_stat command and returns raw +// stdout output. This is used to query virtual +// memory statistics like page counts and swap usage. +// +// out, err := sysinfo.VMStat() +// +// # Build Constraints +// +// The implementation is gated behind a darwin build +// tag. Other platforms would need their own files +// with equivalent queries (e.g., reading /proc on +// Linux). +// +// # Centralization +// +// This package centralizes os/exec calls for +// platform-specific system queries, keeping nolint +// annotations in one place. The commands are fixed +// strings with no user input, but are routed through +// internal/exec to satisfy the project convention +// of no exec.Command calls outside this tree. package sysinfo diff --git a/internal/exec/trigger/doc.go b/internal/exec/trigger/doc.go index 5076f45a1..a63fe8c7d 100644 --- a/internal/exec/trigger/doc.go +++ b/internal/exec/trigger/doc.go @@ -4,10 +4,32 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trigger centralizes process execution for lifecycle trigger -// scripts. All exec.Command calls for trigger runners live here. +// Package trigger centralizes process execution for +// lifecycle trigger scripts. // -// [CommandContext] wraps exec.CommandContext to create a hook -// process with the given context and script path, providing -// a single point for testing and security auditing. +// # Command Creation +// +// CommandContext wraps exec.CommandContext to create +// a hook process with the given context and script +// path. The context enables timeout enforcement so +// that runaway hook scripts can be cancelled. +// +// cmd := trigger.CommandContext(ctx, "/path/hook.sh") +// cmd.Stdin = input +// cmd.Stdout = output +// err := cmd.Run() +// +// # Security +// +// The script path is validated by hook.ValidatePath +// before reaching this package. The exec.Command +// call carries a gosec nolint annotation since the +// path is caller-controlled and pre-validated. +// +// # Centralization +// +// All exec.Command calls for trigger runners live +// here, providing a single point for testing and +// security auditing. Callers wire stdin, stdout, +// and stderr on the returned exec.Cmd as needed. package trigger diff --git a/internal/exec/zensical/doc.go b/internal/exec/zensical/doc.go index 5d03183ea..41ed6f30c 100644 --- a/internal/exec/zensical/doc.go +++ b/internal/exec/zensical/doc.go @@ -4,9 +4,28 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package zensical wraps the zensical static site generator binary,. +// Package zensical wraps the zensical static site +// generator binary. // -// Key exports: [Run]. -// See source files for implementation details. -// Part of the exec subsystem. +// # Running Zensical +// +// Run launches zensical with a subcommand in the +// given working directory. It checks LookPath first +// and returns an error if the binary is not found. +// +// err := zensical.Run("/path/to/site", "build") +// err := zensical.Run("/path/to/site", "serve") +// +// # I/O Wiring +// +// The zensical process inherits the parent's stdout, +// stderr, and stdin so that build output and +// interactive prompts flow through to the user's +// terminal. +// +// # Error Handling +// +// If the zensical binary is not in PATH, Run returns +// a ZensicalNotFound error from the site error +// package. Other errors propagate from exec.Cmd.Run. package zensical diff --git a/internal/flagbind/doc.go b/internal/flagbind/doc.go index f1f024dd8..8550decb2 100644 --- a/internal/flagbind/doc.go +++ b/internal/flagbind/doc.go @@ -4,24 +4,54 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package flagbind provides helpers for cobra flag registration. -// -// All cobra flag registration must go through this package. Direct calls -// to cobra's Flags().StringVar, Flags().BoolVar, and similar methods are -// prohibited outside flagbind. This ensures every flag description is -// routed through the YAML-backed assets pipeline ([desc.Flag]) rather -// than hardcoded inline, keeping flag text localizable and consistent. -// -// Each helper accepts a descKey that maps to a YAML entry in -// internal/assets/commands/flags.yaml. Flag name constants come from -// internal/config/flag. -// -// Key exports: [BoolFlag], [BoolFlagP], [IntFlagP], -// [StringFlag], [StringFlagP], [StringFlagPDefault], [LastJSON]. -// -// Batch helpers ([BindStringFlagsP], [BindStringFlags], -// [BindBoolFlags], [BindBoolFlagsP], [BindStringFlagShorts], -// [BindStringFlagsPDefault]) register multiple flags of the -// same kind in a single call via parallel slices, replacing -// repetitive one-at-a-time registrations. +// Package flagbind provides helpers for cobra flag +// registration that enforce the YAML-backed description +// pipeline. +// +// All cobra flag registration must go through this +// package. Direct calls to cobra's Flags().StringVar, +// Flags().BoolVar, and similar methods are prohibited +// outside flagbind. This ensures every flag description +// is routed through [desc.Flag] rather than hardcoded +// inline, keeping flag text localizable and consistent. +// +// # Single-Flag Helpers +// +// Each helper accepts a descKey that maps to a YAML +// entry in internal/assets/commands/flags.yaml. Flag +// name constants come from internal/config/flag. +// +// - [BoolFlag], [BoolFlagP] register boolean flags +// with optional shorthand, defaulting to false. +// - [BoolFlagDefault] registers a boolean flag with +// a non-false default value. +// - [BoolFlagNoPtr], [BoolFlagShort] register flags +// retrieved later via cmd.Flags().GetBool(). +// - [IntFlag], [IntFlagP] register integer flags. +// - [DurationFlag] registers a time.Duration flag. +// - [StringFlag], [StringFlagP] register string +// flags with optional shorthand. +// - [StringFlagDefault], [StringFlagPDefault] +// register string flags with non-empty defaults. +// - [StringFlagShort] registers a no-pointer string +// flag with shorthand. +// - [StringArrayFlagP] registers repeatable string +// flags (--tag x --tag y). +// - [PersistentBoolFlag] registers a persistent bool +// flag inherited by children. +// - [LastJSON] registers the --last/--json pair for +// list-style commands. +// +// # Batch Helpers +// +// Batch functions register multiple flags of the same +// kind in a single call via parallel slices: +// +// - [BindStringFlagsP], [BindStringFlags] +// - [BindBoolFlags], [BindBoolFlagsP] +// - [BindStringFlagShorts] +// - [BindStringFlagsPDefault] +// +// All slice arguments must have matching lengths; +// each index produces one single-flag call. package flagbind diff --git a/internal/flagbind/flag.go b/internal/flagbind/flag.go index ee0ed96e5..f5aa6067a 100644 --- a/internal/flagbind/flag.go +++ b/internal/flagbind/flag.go @@ -164,38 +164,6 @@ func DurationFlag( ) } -// PersistentBoolFlag registers a persistent boolean -// flag with no shorthand, defaulting to false. -// -// Parameters: -// - c: Cobra command to register on -// - p: Pointer to the bool variable -// - name: Flag name constant -// - descKey: YAML DescKey for the flag description -func PersistentBoolFlag( - c *cobra.Command, p *bool, name, descKey string, -) { - c.PersistentFlags().BoolVar( - p, name, false, desc.Flag(descKey), - ) -} - -// PersistentStringFlag registers a persistent string -// flag with no shorthand, defaulting to empty string. -// -// Parameters: -// - c: Cobra command to register on -// - p: Pointer to the string variable -// - name: Flag name constant -// - descKey: YAML DescKey for the flag description -func PersistentStringFlag( - c *cobra.Command, p *string, name, descKey string, -) { - c.PersistentFlags().StringVar( - p, name, "", desc.Flag(descKey), - ) -} - // StringArrayFlagP registers a string array flag with a shorthand // letter. The flag can be repeated: --tag x --tag y. // diff --git a/internal/format/doc.go b/internal/format/doc.go index b934d132e..3110fb5aa 100644 --- a/internal/format/doc.go +++ b/internal/format/doc.go @@ -4,10 +4,55 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package format converts typed values into human-readable display. +// Package format converts typed Go values into the +// **human-readable display strings** ctx prints in CLI +// output, hook nudges, and journal headers: relative time +// ("3 hours ago"), durations ("23m 14s"), truncated previews, +// and grouped numbers ("1,234,567"). // -// Key exports: [TimeAgo], [Duration], [DurationAgo], [TruncateFirstLine], -// [Number]. -// See source files for implementation details. -// Part of the internal subsystem. +// The package is the small, well-tested layer below every +// renderer; centralizing the formatters keeps presentation +// consistent across the CLI and prevents subtle drift like +// "3h ago" in one place vs "3 hours ago" in another. +// +// # Public Surface +// +// - **[TimeAgo](t)**: relative time vs `now`: +// "just now", "5 minutes ago", "3 hours ago", +// "yesterday", "3 days ago", "Mar 12". The +// break-points and phrasing match what most CLIs +// have converged on. +// - **[Duration](d)**: formats a `time.Duration` as +// "23m 14s" / "2h 5m" / "3d 4h" depending on +// magnitude. Drops the smaller unit when the +// larger is ≥ 10 (so "12h 0m" → "12h"). +// - **[DurationAgo](d)**: convenience; takes a +// duration and renders the [TimeAgo] form for "now +// minus d". +// - **[TruncateFirstLine](text, n)**: returns the +// first line of `text`, truncated to `n` runes +// (rune-aware, not byte-aware) with an ellipsis +// when truncation occurs. +// - **[Number](n)**: thousands-grouped integer +// formatting ("1,234,567"). Uses comma regardless +// of locale (ctx is English-only at present). +// +// # Design Choices +// +// - **Rune-aware truncation**: byte truncation +// would split multi-byte characters and produce +// mojibake. [TruncateFirstLine] counts runes. +// - **Stable break-points**: relative-time +// phrasing is deterministic per input, so a +// re-render after a small clock advance does not +// produce noisy diffs in journal output. +// - **No localization**: single-locale today; +// when localization arrives, the per-locale +// phrase tables will plug in here without +// changing call sites. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never +// race. package format diff --git a/internal/hub/cluster.go b/internal/hub/cluster.go index 8429c7425..050298b53 100644 --- a/internal/hub/cluster.go +++ b/internal/hub/cluster.go @@ -23,7 +23,7 @@ import ( // NewCluster creates a Raft cluster node for leader // election only. // -// Raft is NOT used for data consensus — entries are +// Raft is NOT used for data consensus; entries are // replicated via sequence-based gRPC sync. Raft only // determines which node is the current master. // diff --git a/internal/hub/doc.go b/internal/hub/doc.go index 8bd6ef166..a3474c8f7 100644 --- a/internal/hub/doc.go +++ b/internal/hub/doc.go @@ -4,17 +4,70 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package hub implements the ctx Hub server and client. +// Package hub implements the ctx Hub: a gRPC server +// that fans structured entries (decisions, learnings, +// conventions, tasks) across multiple ctx projects, +// plus the client primitives those projects use to +// talk to it. // -// The hub is a gRPC service that aggregates published entries -// (decisions, learnings, conventions) from multiple ctx instances -// and streams them to subscribers in real-time. +// # Architecture // -// Storage is append-only JSONL. Auth is token-based (admin token -// for registration, per-client tokens for RPCs). Connection config -// is encrypted locally using AES-256-GCM via [internal/crypto]. +// The package layers four concerns: // -// Key exports: [Store], [Entry], [Auth], [Server], [Client]. -// See source files for implementation details. -// Part of the internal subsystem. +// - Storage ([Store]): append-only JSONL with +// sequence numbers and per-client tokens. +// - Transport ([Server]): gRPC Register / Publish +// / Sync / Listen / Status RPCs. +// - Cluster ([Cluster]): HashiCorp Raft for leader +// election only (see Raft-Lite below). +// - Client ([Client]): connection registration, +// sync catch-up, push streaming, and ordered-peer +// failover. +// +// Supporting pillars: +// +// - Auth ([GenerateAdminToken], +// [GenerateClientToken]): bearer-token +// authentication on every RPC. +// - Validate ([ValidateEntry]): entry schema +// enforcement and provenance normalization. +// - Fan-out: internal broadcaster delivers each +// new entry to all live Listen subscribers. +// +// # Storage Model +// +// The store is append-only JSONL under a hub data +// directory: entries.jsonl (one [Entry] per line), +// clients.json (registered tokens and filters), and +// meta.json (schema version and admin token hash). +// Sequence numbers make replication and resume +// strictly idempotent. +// +// # Raft-Lite +// +// The package embeds HashiCorp Raft for leader +// election only, never for data consensus. Entry +// replication uses the sequence-based gRPC sync. +// Writes are durable on the leader at acceptance; +// followers catch up asynchronously. +// +// # Trust Model +// +// Every holder of a client token is trusted. Origin +// is self-asserted; there is no per-user attribution. +// The hub serves single-developer and small-team +// shapes, not public multi-tenant deployments. +// +// # Concurrency +// +// [Store] guards its indexes and appender with a +// single mutex. Listen streams subscribe to a +// fan-out channel; slow subscribers are dropped +// rather than blocking publishers. +// +// # Encryption +// +// Client-side connection state is encrypted at rest +// via AES-256-GCM using the same per-machine key +// that protects [internal/pad]. package hub diff --git a/internal/hub/failover.go b/internal/hub/failover.go index 812166be4..87129ceca 100644 --- a/internal/hub/failover.go +++ b/internal/hub/failover.go @@ -56,7 +56,7 @@ func newFailoverClient( if callErr != nil { _ = conn.Close() - // Fail fast on auth errors — same token + // Fail fast on auth errors; same token // won't work on other peers either. if authErr(callErr) { return nil, callErr diff --git a/internal/hub/fanout.go b/internal/hub/fanout.go index f67a7fb02..09b510269 100644 --- a/internal/hub/fanout.go +++ b/internal/hub/fanout.go @@ -59,7 +59,7 @@ func (f *fanOut) broadcast(entries []Entry) { select { case ch <- entries: default: - // Slow listener — disconnect to prevent loss. + // Slow listener: disconnect to prevent loss. delete(f.subs, ch) close(ch) f.dropped++ diff --git a/internal/hub/fsm.go b/internal/hub/fsm.go index f81eccbab..e9b60b98a 100644 --- a/internal/hub/fsm.go +++ b/internal/hub/fsm.go @@ -15,7 +15,7 @@ import ( // leaderFSM is a no-op finite state machine for Raft. // // Raft requires an FSM, but we only use Raft for leader -// election — data replication is handled separately via +// election; data replication is handled separately via // sequence-based gRPC sync. All FSM methods are no-ops. type leaderFSM struct{} diff --git a/internal/hub/types.go b/internal/hub/types.go index 3cfb8a80f..d18736cf3 100644 --- a/internal/hub/types.go +++ b/internal/hub/types.go @@ -19,7 +19,7 @@ import ( // Entry is the unit of sharing in the hub. // // Every published piece of context is an Entry. Entries are -// append-only — once published, never modified or deleted. +// append-only: once published, never modified or deleted. // Each entry gets a monotonically increasing sequence number // assigned by the hub. // diff --git a/internal/index/doc.go b/internal/index/doc.go index 9aab5f124..44ef09ba9 100644 --- a/internal/index/doc.go +++ b/internal/index/doc.go @@ -4,10 +4,60 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package index generates and parses indexes for context file entries. +// Package index generates and maintains the **quick-reference +// index tables** at the top of `DECISIONS.md` and +// `LEARNINGS.md`: the markdown tables wrapped in +// `` / `` markers that +// list every entry by ID, date, and title in chronological +// order. // -// Key exports: [ParseEntryBlocks], [ParseHeaders], -// [GenerateTable], [Update], [UpdateDecisions]. -// See source files for implementation details. -// Part of the internal subsystem. +// The index is the affordance that lets `ctx agent` send a +// **token-cheap** version of `DECISIONS.md` / `LEARNINGS.md` +// to the AI: instead of injecting the full prose for hundreds +// of entries, it injects only the index table. The agent +// scans the table, decides which entries it needs, and asks +// for those by ID. +// +// # The Index Format +// +// Each index row mirrors one entry block in the source file: +// +// | ID | Date | Title | +// |----|------|-------| +// | L-43 | 2026-04-12 | Lock acquisition order in fanout | +// +// Entry blocks in the source follow a strict shape: +// +// ## [YYYY-MM-DD-HHMMSS] Title text here +// +// [ParseHeaders] extracts the date + title pair from each +// `## [...]` header. [ParseEntryBlocks] returns full block +// metadata (start/end line, ID, date, title, body) so +// callers can grep, render, or rewrite individual entries. +// +// # Updating in Place +// +// [GenerateTable] turns a parsed entry list into the full +// markdown index (table header + rows). +// [Update](path, newTable) finds the marker pair in the +// existing file and replaces only the content between them, +// leaving the rest of the file untouched. If the markers are +// missing, [Update] inserts them under the H1 heading so the +// next run becomes idempotent. [UpdateDecisions] and the +// matching [UpdateLearnings] are convenience wrappers that +// know the canonical file paths. +// +// # Supersession +// +// An entry can be marked **superseded** by a later one +// (a body line starting with `**Status**: Superseded by +// L-99`). The parser tags such entries so renderers can +// gray-out / sort the index accordingly. +// +// # Concurrency +// +// The package is filesystem-IO at the boundary, pure data +// in the middle. Callers serialize updates externally +// (typically by holding the `.context/` directory +// implicitly through process-level execution). package index diff --git a/internal/index/index.go b/internal/index/index.go index 4e24b894a..55af9f079 100644 --- a/internal/index/index.go +++ b/internal/index/index.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package index provides index generation and parsing for context files. package index import ( diff --git a/internal/inspect/doc.go b/internal/inspect/doc.go index 9588c1e1c..fd6e656ae 100644 --- a/internal/inspect/doc.go +++ b/internal/inspect/doc.go @@ -4,10 +4,43 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package inspect provides general-purpose string predicates and position. +// Package inspect provides general-purpose string +// predicates and position-tracking utilities used +// throughout the ctx codebase for text scanning. // -// Key exports: [SkipNewline], [SkipWhitespace], [FindNewline], -// [EndsWithNewline], [Contains]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Position Helpers (pos.go) +// +// Functions that advance a cursor through a string, +// handling both LF and CRLF line endings: +// +// - [SkipNewline] advances past a newline character +// (CRLF or LF) at the current position. Returns +// the position unchanged if no newline is present. +// - [SkipWhitespace] advances past any sequence of +// spaces, tabs, and newlines. +// - [FindNewline] returns the index of the first +// newline in a string, or -1 if none exists. +// +// # String Predicates (predicate.go) +// +// Boolean checks and index lookups for common text +// patterns: +// +// - [EndsWithNewline] reports whether a string ends +// with a newline (CRLF or LF). +// - [Contains] reports whether a substring exists +// and returns its index. +// - [ContainsNewLine] checks for any newline and +// returns its index. +// - [ContainsEndComment] checks for a comment close +// marker and returns its index. +// - [StartsWithCtxMarker] reports whether a string +// starts with a ctx start or end marker comment. +// +// # Design +// +// All functions are pure and safe for concurrent use. +// They use token and marker constants from the config +// packages rather than hardcoded literals, ensuring +// consistency with the rest of the codebase. package inspect diff --git a/internal/io/append.go b/internal/io/append.go index 9e8364745..a86fe48d1 100644 --- a/internal/io/append.go +++ b/internal/io/append.go @@ -6,33 +6,38 @@ package io -import ( - "os" - - cfgWarn "github.com/ActiveMemory/ctx/internal/config/warn" - logWarn "github.com/ActiveMemory/ctx/internal/log/warn" -) +import "os" // AppendBytes opens path in append mode, writes data, and closes. -// Errors are logged to stderr via log/warn — this is a best-effort -// operation for JSONL event logs and session stats where failures -// should not interrupt the caller. +// Returns the first non-nil error encountered among open, write, and +// close. Callers decide whether to propagate, log, or absorb. +// +// Previously this helper logged errors to stderr and returned void +// (best-effort), which conflated "the write succeeded" with "the +// write failed but you'll only know if you scroll stderr". Audit +// trails that depend on the append landing (event.Append, stat +// rollups) need the error to propagate so callers can honour a +// log-first ordering: if the record can't be written, downstream +// side effects should not pretend the event happened. // // Parameters: // - path: file path to append to (created if missing) // - data: bytes to append // - perm: file permission bits for creation -func AppendBytes(path string, data []byte, perm os.FileMode) { +// +// Returns: +// - error: non-nil on open, write, or close failure. When write +// succeeds but close fails, the close error is returned so +// disk-flush / fsync problems surface. +func AppendBytes(path string, data []byte, perm os.FileMode) error { f, openErr := SafeAppendFile(path, perm) if openErr != nil { - return + return openErr } - defer func() { - if closeErr := f.Close(); closeErr != nil { - logWarn.Warn(cfgWarn.Close, path, closeErr) - } - }() - if _, writeErr := f.Write(data); writeErr != nil { - logWarn.Warn(cfgWarn.Write, path, writeErr) + _, writeErr := f.Write(data) + closeErr := f.Close() + if writeErr != nil { + return writeErr } + return closeErr } diff --git a/internal/io/doc.go b/internal/io/doc.go index cb273e06c..a3670b713 100644 --- a/internal/io/doc.go +++ b/internal/io/doc.go @@ -4,51 +4,59 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package line provides guarded file I/O wrappers for ctx. -// -// # What these functions guard against -// -// All Safe* functions apply two checks before touching the filesystem: -// -// - Path cleaning: filepath.Clean removes redundant separators, -// dot segments, and trailing slashes. -// - System prefix rejection: the resolved absolute path is checked -// against a deny list of system directories (/bin, /etc, /proc, -// /sys, /dev, /boot, /lib, /sbin, /usr/bin, /usr/lib, /usr/sbin, -// and the filesystem root itself). Any match returns an error -// before the underlying syscall executes. -// -// SafeReadFile additionally enforces containment: the resolved path -// must stay within the provided base directory. -// -// # What these functions do NOT guard against -// -// - Symlink attacks: a cleaned path that passes the prefix check -// could still resolve to a different location through a symlink -// in a parent directory. Use [validation.CheckSymlinks] separately -// when the directory tree is untrusted. -// - Race conditions (TOCTOU): the check and the I/O are not atomic. -// A malicious actor with write access to the parent directory -// could swap a path between validation and use. -// - Permission escalation: these wrappers run with the calling -// process's permissions. They do not drop privileges. -// - Content validation: the wrappers check where data is read from -// or written to, not what the data contains. -// - Windows paths: the deny list uses Unix prefixes. On Windows, -// the prefix check is effectively a no-op. -// -// # Assumptions -// -// Callers are expected to provide paths that are already logically -// correct (e.g., constructed from known config constants or user -// input that has been validated for format). These wrappers are a -// safety net against accidental system directory access, not a -// substitute for input validation at the application boundary. -// -// # When to use which function -// -// - SafeReadFile: path is base + filename (boundary-checked read) -// - SafeReadUserFile: single path from any source (deny-list read) -// - SafeOpenUserFile: single path, need a file handle (deny-list open) -// - SafeWriteFile: single path (deny-list write) +// Package io provides guarded file I/O and HTTP +// wrappers for ctx. +// +// # Filesystem Guards +// +// All Safe* functions apply two checks before +// touching the filesystem: +// +// - Path cleaning: filepath.Clean removes redundant +// separators, dot segments, and trailing slashes. +// - System prefix rejection: the resolved absolute +// path is checked against a deny list of system +// directories (/bin, /etc, /proc, /sys, /dev, +// /boot, /lib, /sbin, /usr/bin, /usr/lib, +// /usr/sbin, and root itself). Any match returns +// an error before the syscall executes. +// +// # File Operations +// +// - [SafeReadFile] reads a file with containment: +// the resolved path must stay within a base dir. +// - [SafeReadUserFile] reads after deny-list check. +// - [SafeOpenUserFile] opens for reading after +// deny-list check (caller must close). +// - [SafeAppendFile] opens in append mode, creating +// the file if missing. +// - [SafeCreateFile] creates or truncates a file. +// - [SafeWriteFile] writes data after deny-list +// check. +// - [SafeMkdirAll] creates a directory tree after +// deny-list check. +// - [SafeStat] returns file info after deny-list +// check. +// - [TouchFile] creates or updates an empty marker +// file (best-effort, errors logged). +// - [AppendBytes] appends data in append mode with +// best-effort error logging (for JSONL logs). +// +// # Formatted Output +// +// - [SafeFprintf] writes formatted output to a +// writer, logging errors to the warning sink. +// +// # HTTP +// +// - [SafePost] sends an HTTP POST with scheme +// validation (http/https only), redirect cap +// (max 3), and caller-specified timeout. +// +// # Limitations +// +// These wrappers do not guard against symlink attacks, +// TOCTOU race conditions, permission escalation, +// content validation, or Windows paths. See the +// function-level documentation for details. package io diff --git a/internal/journal/parser/copilot.go b/internal/journal/parser/copilot.go index 66ba9998a..1a551ce50 100644 --- a/internal/journal/parser/copilot.go +++ b/internal/journal/parser/copilot.go @@ -141,13 +141,13 @@ func (p *Copilot) ParseFile(path string) ([]*entity.Session, error) { sess = &s case copilotKindScalarPatch: - // Scalar property patch — apply to session + // Scalar property patch: apply to session if sess != nil { p.applyScalarPatch(sess, line.K, line.V) } case copilotKindObjectPatch: - // Array/object patch — apply to session + // Array/object patch: apply to session if sess != nil { p.applyPatch(sess, line.K, line.V) } diff --git a/internal/journal/parser/copilot_patch.go b/internal/journal/parser/copilot_patch.go index 2122d98be..3ec02f0e5 100644 --- a/internal/journal/parser/copilot_patch.go +++ b/internal/journal/parser/copilot_patch.go @@ -28,7 +28,7 @@ func (p *Copilot) applyScalarPatch( return } - // Handle requests..result patches — these contain token counts + // Handle requests..result patches: these contain token counts if path[0] == cfgCopilot.KeyRequests && len(path) == 3 && path[2] == cfgCopilot.KeyResult { idx, parseErr := strconv.Atoi(path[1]) diff --git a/internal/journal/parser/copilot_path.go b/internal/journal/parser/copilot_path.go index 237058ed5..0b06ac3cb 100644 --- a/internal/journal/parser/copilot_path.go +++ b/internal/journal/parser/copilot_path.go @@ -78,7 +78,7 @@ func fileURIToPath(uri string) string { decoded = path } - // On Windows, file URIs have /G:/... — strip the leading slash + // On Windows, file URIs have /G:/...; strip the leading slash if runtime.GOOS == env.OSWindows && len(decoded) > 2 && decoded[0] == '/' { decoded = decoded[1:] } diff --git a/internal/journal/parser/doc.go b/internal/journal/parser/doc.go index c67213c89..8910b5331 100644 --- a/internal/journal/parser/doc.go +++ b/internal/journal/parser/doc.go @@ -4,10 +4,101 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parser auto-detects and parses session files from multiple tools. +// Package parser auto-detects and parses AI-coding-assistant session +// transcripts from multiple tool formats into a single normalized +// [github.com/ActiveMemory/ctx/internal/entity.Session] type the rest +// of the journal pipeline can consume uniformly. // -// Key exports: [NewClaudeCode], [NewMarkdownSession], [ParseFile], -// [ScanDirectory], [ScanDirectoryWithErrors]. -// See source files for implementation details. -// Part of the journal subsystem. +// # Why a Parser Layer +// +// Every AI tool ctx integrates with stores its session history in a +// different on-disk format: +// +// - **Claude Code** writes one JSONL file per project under +// `~/.claude/projects//*.jsonl`, with multiple sessions +// interleaved by `sessionId` field. +// - **Copilot** (VS Code) keeps a binary-ish chunked store in +// the workspace state directory. +// - **Copilot CLI** writes a different, JSON-with-metadata layout +// under its own home tree. +// - **MarkdownSession** is the round-trip format ctx itself +// produces when an enriched journal entry is *re-imported*; it +// parses the YAML frontmatter + body that +// `ctx journal import` produced earlier. +// +// Downstream consumers (`ctx journal source`, `ctx journal import`, +// the journal site builder, the obsidian exporter) should never +// have to know which tool wrote a file. They get back +// `[]*entity.Session` and work with that. +// +// # Public Surface +// +// Three entry points cover the common use cases: +// +// - [ParseFile](path): parse one file; returns all +// sessions it contains (a JSONL file may interleave many). +// - [ScanDirectory](dir): recursively walk a tree, +// parse every parseable file, return sessions sorted +// newest-first; per-file errors are swallowed so one bad file +// does not abort the scan. +// - [ScanDirectoryWithErrors](dir): same walk, but also +// returns a slice of (path, err) pairs for every parse failure +// so callers can surface them to the user. +// +// Tool-specific constructors ([NewClaudeCode], [NewCopilot], +// [NewCopilotCLI], [NewMarkdownSession]) are exported for callers +// that need to operate on a known format directly (tests, format +// converters, the schema validator). +// +// # Dispatch Mechanism +// +// All tool implementations satisfy the unexported `Session` +// interface (Tool, Matches, ParseFile, ParseLine). The package-level +// `registeredParsers` slice holds one instance of each. Dispatch is +// first-match-wins: [ParseFile] iterates the slice and asks each +// parser whether it `Matches(path)`. Implementations may check +// extension, directory shape, or peek at the first line; order in +// the slice matters when a file could plausibly match more than one +// (in practice, the four formats are disjoint). +// +// **Adding a new tool**: implement the four interface methods on a +// new type, then append a constructor call to `registeredParsers` +// in `parser.go`. No other changes are required. +// +// # Output Shape +// +// Every parser yields `*entity.Session` values populated with: +// +// - identity: ID, Slug, Tool, SourceFile +// - context: CWD, Project (basename of CWD), GitBranch +// - timing: StartTime, EndTime, Duration +// - content: a flat []Message in chronological order +// - rollups: TurnCount, FirstUserMsg (preview, truncated at +// [config/session.PreviewMaxLen]) +// +// [ScanDirectory] sorts the aggregated slice by `StartTime` +// descending so the most recent session lands at index 0, the +// invariant the journal CLI and site generator both rely on. +// +// # Error Handling +// +// Errors fall into three buckets: +// +// - **No matching parser**: [ParseFile] returns +// [internal/err/parser.NoMatch] when no registered parser claims +// the file. Callers should treat this as "skip", not "fail"; +// the directory may legitimately contain unrelated files. +// - **Per-file parse errors**: malformed JSON, truncated stream, +// unexpected schema. [ScanDirectory] swallows these silently; +// [ScanDirectoryWithErrors] surfaces them paired with the path +// for the caller to log. +// - **Filesystem errors**: walk-time IO errors (permission, +// device) are returned directly from the Scan functions and +// terminate the walk. +// +// # Concurrency +// +// Parsers are stateless; the `registeredParsers` slice is read-only +// after package init. A single parser instance is reused across all +// calls. Concurrent [ParseFile] / [ScanDirectory] calls are safe. package parser diff --git a/internal/journal/parser/markdown_test.go b/internal/journal/parser/markdown_test.go index ed272a70a..7a6438c3f 100644 --- a/internal/journal/parser/markdown_test.go +++ b/internal/journal/parser/markdown_test.go @@ -12,7 +12,7 @@ import ( "testing" "github.com/ActiveMemory/ctx/internal/config/session" - "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) func TestMarkdownSessionParser_Tool(t *testing.T) { @@ -236,8 +236,7 @@ func TestIsSessionHeader_CustomPrefix(t *testing.T) { } t.Cleanup(func() { _ = os.Chdir(origDir) }) - rc.Reset() - t.Cleanup(rc.Reset) + testctx.Declare(t, ctxrcDir) tests := []struct { name string diff --git a/internal/journal/parser/types.go b/internal/journal/parser/types.go index 45b7db557..6498e0692 100644 --- a/internal/journal/parser/types.go +++ b/internal/journal/parser/types.go @@ -4,12 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parser provides JSONL session file parsing for the recall system. -// -// It parses AI coding assistant session transcripts into structured Go types -// that can be rendered, searched, and analyzed. The package -// uses a tool-agnostic -// Session output type with tool-specific parsers (e.g., ClaudeCode). package parser import ( diff --git a/internal/journal/schema/doc.go b/internal/journal/schema/doc.go index ee4b87152..ebadf3da7 100644 --- a/internal/journal/schema/doc.go +++ b/internal/journal/schema/doc.go @@ -1,19 +1,69 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package schema validates Claude Code JSONL session files. -// -// Claude Code stores sessions as JSONL files with an undocumented, -// unversioned format that changes across releases. This package -// defines the expected record shape (known fields, record types, -// content block types) derived from empirical analysis, and -// validates raw lines against it to detect drift. -// -// Validation is strictly informational: it accumulates findings -// into a Collector but never blocks imports or other operations. -// Findings include unknown fields, missing required fields, -// unknown record types, and unknown content block types. +// Package schema is ctx's defense against the **silent +// drift** of Claude Code's session-file format. +// +// Claude Code stores sessions as JSONL files under +// `~/.claude/projects//` with an **undocumented, +// unversioned** record format that changes between +// releases. There is no schema URL, no version tag, no +// compatibility commitment. The only way to know whether +// a new Claude Code release added a field, removed a type, +// or quietly renamed a property is to compare empirical +// reality to a frozen reference shape, which is what this +// package does. +// +// # The Reference Shape +// +// [schema.go] declares the **expected** record shape +// derived from analysis of real session files: the set of +// known top-level fields, the set of known record `type` +// values, the set of known content-block types within +// `assistant` records, and the per-type required-field +// list. +// +// [build.go] / [check.go] / [validate.go] walk an actual +// JSONL file and accumulate findings into a [Collector]: +// +// - **Unknown fields**: a key the reference shape does +// not list (Claude added a property). +// - **Missing required fields**: a key the reference +// shape requires but the record omits (Claude +// removed a property; we may now silently drop data +// downstream). +// - **Unknown record types**: a `type` value not in +// the reference set (a new record kind appeared). +// - **Unknown content block types**: same, but for +// content blocks inside `assistant` records. +// +// # Strictly Informational +// +// Validation **never blocks** an import or other +// operation. Findings flow into [Report] which formats a +// markdown drift report consumed by `ctx doctor` and the +// release-prep runbook. The intent is "tell me when the +// upstream shape moved so I can update the parser", not +// "refuse to ingest anything we have not pre-blessed". +// +// # Updating the Reference +// +// When a new Claude Code release introduces fields the +// drift report flags: +// +// 1. Inspect the new records to confirm semantics. +// 2. Update the reference declarations in [schema.go]. +// 3. Update [internal/journal/parser] if the new +// fields carry session-relevant data. +// 4. Add a learning to LEARNINGS.md so the change is +// not repeated when reviewing the next release. +// +// # Concurrency +// +// All exported functions are pure data transformations +// over byte slices and `[]Finding`. Concurrent callers +// never race. package schema diff --git a/internal/journal/state/doc.go b/internal/journal/state/doc.go index 5d9248d4c..2318012db 100644 --- a/internal/journal/state/doc.go +++ b/internal/journal/state/doc.go @@ -4,9 +4,61 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package state manages journal processing state via an external JSON file. +// Package state manages the **journal processing state** +// stored in `.context/journal/.state.json`, a denormalized +// JSON index that tracks, for each raw session source, where +// it is in the import → normalize → enrich → wrap pipeline +// and whether it is locked against re-import. // -// Key exports: [Load], [ValidStages]. -// See source files for implementation details. -// Part of the journal subsystem. +// # Why an External State File +// +// The original design embedded markers in the journal files +// themselves (``, etc.). That broke +// when a journal entry's body legitimately contained one of +// those marker strings; the parser saw a false positive and +// concluded the entry had been processed when it had not. +// +// Moving state out of the file body fixes the false-positive +// problem and gives the importer a fast index it can scan +// without parsing every entry. +// +// # The State File Shape +// +// `.state.json` is a `map[sourceID]Record` where each +// [Record] tracks: +// +// - **stage**: current pipeline stage (one of +// [ValidStages]: imported, normalized, enriched, +// wrapped, indexed). +// - **locked**: true when the entry is protected +// from re-import regeneration. +// - **part**: for multipart entries, which part this +// record refers to. +// - **filename**: the on-disk filename the importer +// produced for this source. +// +// # Public Surface +// +// - **[Load](journalDir)**: reads `.state.json` and +// returns the deserialized map. Returns an empty +// map (not an error) when the file is missing; +// fresh projects have no state yet. +// - **[ValidStages]**: canonical stage names in +// pipeline order. Stage advancement is forward-only +// in normal flow; re-import (`--regenerate`) resets +// to "imported". +// +// # Sync With Frontmatter +// +// Frontmatter is the source of truth for `locked:`; the +// state file is a denormalized cache. `ctx journal sync` +// reconciles drift between the two so users who edit +// frontmatter directly see the importer respect the +// change on next run. +// +// # Concurrency +// +// File reads are scoped per call. Writes from the +// importer use the atomic-rename pattern so a partial +// write never produces a malformed JSON file. package state diff --git a/internal/journal/state/state.go b/internal/journal/state/state.go index e853fed7d..e9610c4fe 100644 --- a/internal/journal/state/state.go +++ b/internal/journal/state/state.go @@ -4,11 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package state manages journal processing state via an external JSON file. -// -// Instead of embedding markers () inside journal -// files, which causes false positives when journal content includes those -// exact strings, state is tracked in .context/journal/.state.json. package state import ( diff --git a/internal/log/doc.go b/internal/log/doc.go index dddcf1729..2da3b8010 100644 --- a/internal/log/doc.go +++ b/internal/log/doc.go @@ -4,13 +4,32 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package log provides event logging and stderr warning subpackages. +// Package log provides event logging and stderr +// warning subpackages for ctx. // -// Subpackage [event] writes and queries timestamped JSONL event logs -// for hook lifecycle tracking with automatic rotation. Subpackage -// [warn] provides a centralized stderr sink for best-effort operations -// whose errors would otherwise be silently discarded. +// This package itself contains no exported symbols; +// all functionality lives in its subpackages. // -// This package itself contains no exported symbols; all functionality -// lives in the subpackages. +// # Subpackages +// +// [warn] provides a centralized stderr sink for +// best-effort operations whose errors would otherwise +// be silently discarded. Every non-fatal error in ctx +// flows through [warn.Warn] to keep warning output +// consistent across the codebase. +// +// The event subpackage writes and queries timestamped +// JSONL event logs for hook lifecycle tracking with +// automatic rotation. Events are written to +// .context/state/events.jsonl when event logging is +// enabled in .ctxrc. +// +// # Design Rationale +// +// ctx avoids the standard library's log package and +// third-party loggers. Instead, it uses structured +// JSONL for machine-readable events and a simple +// fprintf-to-stderr for human-readable warnings. +// This keeps the dependency surface small and gives +// each consumer explicit control over output format. package log diff --git a/internal/log/event/doc.go b/internal/log/event/doc.go index 9ab7ba4c9..300a197a3 100644 --- a/internal/log/event/doc.go +++ b/internal/log/event/doc.go @@ -1,17 +1,60 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package event provides JSONL event logging for hook lifecycle tracking. +// Package event implements the **JSONL hook event log**: +// the append-only on-disk record of every hook lifecycle +// event ctx generates so users can inspect, audit, or +// timeline what happened in a session. // -// [Append] writes timestamped entries to a rotating JSONL log file in -// the context state directory. [Query] reads entries back with optional -// filters for hook name, session ID, and count limits. Log rotation -// happens automatically when the file exceeds a size threshold. +// Two pieces of the system depend on it: // -// Key exports: [Append], [Query]. -// Used by hook handlers and the event query CLI to persist and retrieve -// lifecycle events. +// - **`ctx hook event`**: user-facing query: "what did +// the hooks do during the last session?". +// - **`ctx system check_persistence`** and friends: read +// the log to detect "you committed but never wrote a +// decision" patterns and nudge accordingly. +// +// # On-Disk Format +// +// The log lives at `.context/state/events.jsonl` and is +// **append-only JSONL**: one [Event] per line, written via +// [Append], rotated to `events.1.jsonl` when the file +// exceeds [config/event.LogMaxBytes] (1 MiB). At most one +// rotation generation is kept; older history is discarded. +// +// # Opt-In +// +// Logging is **disabled by default**; many users do not +// want hook activity persisted. [Append] is a noop when +// `event_log: false` in `.ctxrc`; setting it to `true` +// activates collection. The `ctx hook event` query +// gracefully reports "no events recorded" when the file is +// missing. +// +// # The Query Surface +// +// [Query](opts) reads both `events.jsonl` and the rotated +// `events.1.jsonl` (in chronological order), then applies +// the filters from [entity.EventQueryOpts]: +// +// - **Hook**: match a specific hook name +// (e.g. `check-persistence`). +// - **Session**: match a session ID prefix. +// - **Event**: match an event-type tag (`fired`, +// `relayed`, `blocked`, …). +// - **Last N**: keep only the most recent N matches +// (default [config/event.DefaultLast] = 50). +// +// # Concurrency +// +// [Append] uses an O_APPEND open which is atomic for +// small (sub-PIPE_BUF) writes on POSIX systems; the +// log line size we emit is well under that bound, so +// concurrent appenders interleave but never tear a line. +// [Query] reads a snapshot of the file; concurrent +// appends mid-read are tolerated (the worst case is a +// half-written final line that the JSONL decoder skips). package event diff --git a/internal/log/event/event.go b/internal/log/event/event.go index 0be6255f5..ae2798e1b 100644 --- a/internal/log/event/event.go +++ b/internal/log/event/event.go @@ -10,7 +10,6 @@ import ( "encoding/json" "os" "path/filepath" - "time" "github.com/ActiveMemory/ctx/internal/config/fs" "github.com/ActiveMemory/ctx/internal/config/project" @@ -24,56 +23,82 @@ import ( // Append writes a single event to the log file. // -// Noop when event logging is disabled in .ctxrc. Creates the state -// directory if it does not exist. Rotates the log when it exceeds -// EventLogMaxBytes. All errors are silently ignored: event logging -// must never break hook execution. +// # Log-First Principle +// +// The event log is the authoritative record of "what this hook did". +// Any hook path that emits an observable side effect (webhook, stdout +// marker, state mutation) must call Append FIRST and gate the side +// effect on the log landing. If the log write fails, the side effect +// must not fire: claiming success for an event we never recorded is +// the kind of silent drift this function used to produce before it +// returned an error. See docs/security/reporting.md → +// "Log-First Audit Trail" for the rationale and call-site pattern. +// +// Noop (nil) when event logging is disabled in .ctxrc. Creates the +// state directory if it does not exist. Rotates the log when it +// exceeds EventLogMaxBytes. // // Parameters: // - event: Event type (e.g., "relay", "nudge") // - message: Human-readable description // - sessionID: Claude session ID (may be empty) // - detail: Optional template reference (may be nil) -func Append(event, message, sessionID string, detail *entity.TemplateRef) { +// +// Returns: +// - error: non-nil on path resolution, state-dir creation, rotation, +// marshal, or append failure. Callers are expected to propagate +// this error and skip any downstream webhook / state / stdout +// side effects that would pretend the event happened. The Getwd +// failure path is the one intentional exception: it falls back +// to [project.FallbackName] and only warns to stderr, because +// the event itself is still recorded, just with a less specific +// project field. A missing CWD is never a reason to drop an +// event entry. +func Append( + event, message, sessionID string, + detail *entity.TemplateRef, +) error { if !rc.EventLog() { - return + return nil } - logPath := logFilePath() + logPath, pathErr := logFilePath() + if pathErr != nil { + return pathErr + } // Ensure state directory exists. stateDir := filepath.Dir(logPath) if mkErr := io.SafeMkdirAll(stateDir, fs.PermExec); mkErr != nil { - return + return mkErr } // Check rotation before appending. - rotate(logPath) + if rotateErr := rotate(logPath); rotateErr != nil { + return rotateErr + } projectName := project.FallbackName if cwd, cwdErr := os.Getwd(); cwdErr == nil { projectName = filepath.Base(cwd) } else { + // Documented fallback: record the event with a generic + // project name rather than dropping the entry entirely. logWarn.Warn(warn.Getwd, cwdErr) } - payload := entity.NotifyPayload{ - Event: event, - Message: message, - Detail: detail, - SessionID: sessionID, - Timestamp: time.Now().UTC().Format(time.RFC3339), - Project: projectName, - } + payload := entity.NewNotifyPayload( + event, message, sessionID, projectName, detail, + ) line, marshalErr := json.Marshal(payload) if marshalErr != nil { - return + return marshalErr } newline := token.NewlineLF[0] line = append(line, newline) - io.AppendBytes(logPath, line, fs.PermFile) + return io.AppendBytes(logPath, line, fs.PermFile) } // Query reads events from the log, applying filters. @@ -92,7 +117,10 @@ func Query(opts entity.EventQueryOpts) ([]entity.NotifyPayload, error) { // Read the rotated file first (older events) if requested. if opts.IncludeRotated { - prev := prevLogFilePath() + prev, prevErr := prevLogFilePath() + if prevErr != nil { + return []entity.NotifyPayload{}, nil + } events, readErr := readLogFile(prev) if readErr != nil { return nil, readErr @@ -101,7 +129,10 @@ func Query(opts entity.EventQueryOpts) ([]entity.NotifyPayload, error) { } // Read current log file. - current := logFilePath() + current, currentErr := logFilePath() + if currentErr != nil { + return []entity.NotifyPayload{}, nil + } events, readErr := readLogFile(current) if readErr != nil { return nil, readErr diff --git a/internal/log/event/event_test.go b/internal/log/event/event_test.go index 36d16ea99..5c65cf476 100644 --- a/internal/log/event/event_test.go +++ b/internal/log/event/event_test.go @@ -19,6 +19,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/fs" "github.com/ActiveMemory/ctx/internal/entity" "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) // setupTestDir creates a temporary directory, configures rc to use it, @@ -27,10 +28,8 @@ func setupTestDir(t *testing.T, enableLog bool) string { t.Helper() tmpDir := t.TempDir() - rc.Reset() - rc.OverrideContextDir(filepath.Join(tmpDir, dir.Context)) - - // Write .ctxrc to control event_log. + // Write .ctxrc at the project root (the parent of the .context/ + // that testctx will declare). rcContent := "event_log: false\n" if enableLog { rcContent = "event_log: true\n" @@ -41,12 +40,11 @@ func setupTestDir(t *testing.T, enableLog bool) string { t.Fatalf("failed to write .ctxrc: %v", writeErr) } - // Change to temp dir so rc loads the .ctxrc. origDir, _ := os.Getwd() if chErr := os.Chdir(tmpDir); chErr != nil { t.Fatalf("failed to chdir: %v", chErr) } - rc.Reset() // force reload with new cwd + testctx.Declare(t, tmpDir) t.Cleanup(func() { _ = os.Chdir(origDir) @@ -60,7 +58,9 @@ func TestAppend_Disabled(t *testing.T) { tmpDir := setupTestDir(t, false) logPath := filepath.Join(tmpDir, dir.Context, dir.State, event.FileLog) - Append("relay", "test message", "session-1", nil) + if err := Append("relay", "test message", "session-1", nil); err != nil { + t.Fatalf("Append: %v", err) + } if _, statErr := os.Stat(logPath); !os.IsNotExist(statErr) { t.Error("Append() created log file when event_log is disabled") @@ -72,7 +72,9 @@ func TestAppend_Basic(t *testing.T) { logPath := filepath.Join(tmpDir, dir.Context, dir.State, event.FileLog) detail := entity.NewTemplateRef("qa-reminder", "gate", nil) - Append("relay", "QA gate reminder", "session-1", detail) + if err := Append("relay", "QA gate reminder", "session-1", detail); err != nil { + t.Fatalf("Append: %v", err) + } data, readErr := os.ReadFile(logPath) //nolint:gosec // test file if readErr != nil { @@ -110,7 +112,9 @@ func TestAppend_CreatesStateDir(t *testing.T) { t.Fatal("state dir should not exist before AppendEvent") } - Append("nudge", "test", "", nil) + if err := Append("nudge", "test", "", nil); err != nil { + t.Fatalf("Append: %v", err) + } if _, statErr := os.Stat(stateDir); os.IsNotExist(statErr) { t.Error("Append() did not create state directory") @@ -138,7 +142,9 @@ func TestAppend_Rotation(t *testing.T) { } // AppendEvent should trigger rotation. - Append("relay", "after rotation", "", nil) + if err := Append("relay", "after rotation", "", nil); err != nil { + t.Fatalf("Append: %v", err) + } // Previous file should exist with the big content. if _, statErr := os.Stat(prevPath); os.IsNotExist(statErr) { @@ -185,7 +191,9 @@ func TestAppend_RotationOverwrite(t *testing.T) { t.Fatalf("failed to write big log: %v", writeErr) } - Append("relay", "new event", "", nil) + if err := Append("relay", "new event", "", nil); err != nil { + t.Fatalf("Append: %v", err) + } // The .1 file should now contain the rotated content, // not "old rotated content". @@ -213,12 +221,18 @@ func TestQuery_NoFile(t *testing.T) { func TestQuery_FilterHook(t *testing.T) { setupTestDir(t, true) - Append("relay", "qa gate", "s1", - entity.NewTemplateRef("qa-reminder", "gate", nil)) - Append("relay", "context load", "s1", - entity.NewTemplateRef("context-load-gate", "inject", nil)) - Append("nudge", "ceremonies", "s1", - entity.NewTemplateRef("check-ceremony", "both", nil)) + if err := Append("relay", "qa gate", "s1", + entity.NewTemplateRef("qa-reminder", "gate", nil)); err != nil { + t.Fatalf("Append: %v", err) + } + if err := Append("relay", "context load", "s1", + entity.NewTemplateRef("context-load-gate", "inject", nil)); err != nil { + t.Fatalf("Append: %v", err) + } + if err := Append("nudge", "ceremonies", "s1", + entity.NewTemplateRef("check-ceremony", "both", nil)); err != nil { + t.Fatalf("Append: %v", err) + } events, queryErr := Query(entity.EventQueryOpts{Hook: "qa-reminder"}) if queryErr != nil { @@ -235,9 +249,15 @@ func TestQuery_FilterHook(t *testing.T) { func TestQuery_FilterSession(t *testing.T) { setupTestDir(t, true) - Append("relay", "session one", "s1", nil) - Append("relay", "session two", "s2", nil) - Append("relay", "session one again", "s1", nil) + if err := Append("relay", "session one", "s1", nil); err != nil { + t.Fatalf("Append: %v", err) + } + if err := Append("relay", "session two", "s2", nil); err != nil { + t.Fatalf("Append: %v", err) + } + if err := Append("relay", "session one again", "s1", nil); err != nil { + t.Fatalf("Append: %v", err) + } events, queryErr := Query(entity.EventQueryOpts{Session: "s1"}) if queryErr != nil { @@ -252,7 +272,9 @@ func TestQuery_Last(t *testing.T) { setupTestDir(t, true) for i := 0; i < 20; i++ { - Append("relay", "event", "", nil) + if err := Append("relay", "event", "", nil); err != nil { + t.Fatalf("Append: %v", err) + } } events, queryErr := Query(entity.EventQueryOpts{Last: 5}) @@ -283,7 +305,9 @@ func TestQuery_IncludeRotated(t *testing.T) { } // Write event to current file. - Append("relay", "new event", "", nil) + if err := Append("relay", "new event", "", nil); err != nil { + t.Fatalf("Append: %v", err) + } // Without --all, only current events. events, _ := Query(entity.EventQueryOpts{}) diff --git a/internal/log/event/ops.go b/internal/log/event/ops.go index d5257314c..15a0afd2b 100644 --- a/internal/log/event/ops.go +++ b/internal/log/event/ops.go @@ -7,34 +7,49 @@ package event import ( + "errors" "os" "github.com/ActiveMemory/ctx/internal/config/event" - "github.com/ActiveMemory/ctx/internal/config/warn" - logWarn "github.com/ActiveMemory/ctx/internal/log/warn" ) // rotate checks the current log file size and renames it to the // previous-generation path when it exceeds [event.LogMaxBytes]. -// Best-effort: all errors are silently ignored so rotation never -// blocks event logging. +// +// Returns nil when there is nothing to do (log missing or under the +// size threshold), or when rotation succeeds. Any other failure is +// propagated so callers honour the log-first principle: if the log +// cannot be rotated, [Append] cannot safely continue writing and +// downstream side effects must not fire. // // Parameters: // - logPath: absolute path to the current event log -func rotate(logPath string) { +// +// Returns: +// - error: [os.ErrNotExist] from the Stat or Remove path is treated +// as "nothing to rotate" / "nothing to clean up" and returns nil. +// Any other stat, path, rename, or remove failure is surfaced. +func rotate(logPath string) error { info, statErr := os.Stat(logPath) if statErr != nil { - return // file doesn't exist yet, nothing to rotate + if errors.Is(statErr, os.ErrNotExist) { + return nil // nothing to rotate yet + } + return statErr } if info.Size() < int64(event.LogMaxBytes) { - return + return nil } - prevPath := prevLogFilePath() - if removeErr := os.Remove(prevPath); removeErr != nil { - logWarn.Warn(warn.Remove, prevPath, removeErr) + prevPath, prevErr := prevLogFilePath() + if prevErr != nil { + return prevErr } - if renameErr := os.Rename(logPath, prevPath); renameErr != nil { - logWarn.Warn(warn.Rename, logPath, renameErr) + if removeErr := os.Remove(prevPath); removeErr != nil { + if !errors.Is(removeErr, os.ErrNotExist) { + return removeErr + } + // ErrNotExist is fine: no previous generation to remove. } + return os.Rename(logPath, prevPath) } diff --git a/internal/log/event/path.go b/internal/log/event/path.go index 2951947eb..7f360103e 100644 --- a/internal/log/event/path.go +++ b/internal/log/event/path.go @@ -18,14 +18,24 @@ import ( // // Returns: // - string: path under the active context directory -func logFilePath() string { - return filepath.Join(rc.ContextDir(), dir.State, event.FileLog) +// - error: non-nil when the context directory is not declared +func logFilePath() (string, error) { + ctxDir, err := rc.ContextDir() + if err != nil { + return "", err + } + return filepath.Join(ctxDir, dir.State, event.FileLog), nil } // prevLogFilePath returns the absolute path to the rotated event log. // // Returns: // - string: path under the active context directory -func prevLogFilePath() string { - return filepath.Join(rc.ContextDir(), dir.State, event.FileLogPrev) +// - error: non-nil when the context directory is not declared +func prevLogFilePath() (string, error) { + ctxDir, err := rc.ContextDir() + if err != nil { + return "", err + } + return filepath.Join(ctxDir, dir.State, event.FileLogPrev), nil } diff --git a/internal/log/warn/doc.go b/internal/log/warn/doc.go index 2a7c0aedf..7efb54c40 100644 --- a/internal/log/warn/doc.go +++ b/internal/log/warn/doc.go @@ -4,13 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package warn provides a centralized stderr warning sink for best-effort -// operations whose errors would otherwise be silently discarded. +// Package warn provides a centralized stderr warning +// sink for best-effort operations whose errors would +// otherwise be silently discarded. // -// [Warn] formats and writes a message to stderr. The sink is -// replaceable for testing. Callers use this instead of log.Println -// to keep warning output consistent across the codebase. +// # The Problem It Solves // -// Key exports: [Warn]. -// Used throughout ctx for non-fatal error reporting. +// Many ctx operations are fire-and-forget: closing +// file handles, removing temporary files, writing +// state markers, and appending to JSONL logs. When +// these fail, the error is not actionable by the +// caller, but silently swallowing it makes debugging +// harder. This package provides a single function +// that formats and emits the warning consistently. +// +// # Public Surface +// +// - [Warn] formats a message with Printf-style +// arguments, prefixes it with "ctx: ", appends +// a newline, and writes to the sink. Sink write +// failures are silently dropped because there is +// nowhere else to report them. +// +// # Sink Replacement +// +// The sink variable defaults to os.Stderr. Tests +// replace it with io.Discard to suppress output +// during test runs. +// +// # Usage Pattern +// +// Callers throughout ctx use warn.Warn in place of +// log.Println or fmt.Fprintf(os.Stderr, ...) to keep +// all warning output prefixed and formatted +// identically. package warn diff --git a/internal/log/warn/warn.go b/internal/log/warn/warn.go index b6b12cc36..d88abfa84 100644 --- a/internal/log/warn/warn.go +++ b/internal/log/warn/warn.go @@ -25,7 +25,7 @@ var sink io.Writer = os.Stderr // not be silently swallowed (file close, remove, state writes). // // The output is prefixed with "ctx: " and terminated with a -// newline. sink write failures are silently dropped — there is +// newline. Sink write failures are silently dropped; there is // nowhere else to report them. // // Parameters: diff --git a/internal/mcp/README.md b/internal/mcp/README.md index c0976e1e6..757ee6155 100644 --- a/internal/mcp/README.md +++ b/internal/mcp/README.md @@ -1,4 +1,4 @@ -# internal/mcp — MCP Server +# internal/mcp: MCP Server JSON-RPC 2.0 server exposing ctx context to any MCP-compatible AI tool over stdin/stdout. See `doc.go` for the full resource, @@ -44,14 +44,14 @@ mcp/ Three files, always: -1. **Define** in `server/def/tool/tool.go` — add entry to `Defs` +1. **Define** in `server/def/tool/tool.go`: add entry to `Defs` array with name, description, and `InputSchema` (JSON Schema for parameters) -2. **Implement** in `handler/tool.go` — add method on `Handler` +2. **Implement** in `handler/tool.go`: add method on `Handler` with signature `func (h *Handler) ToolName(args...) (string, error)` -3. **Route** in `server/route/tool/tool.go` — add case in the +3. **Route** in `server/route/tool/tool.go`: add case in the dispatch switch calling your handler method, wrap result with `out.ToolResult()` @@ -59,36 +59,36 @@ Three files, always: Same pattern, three files: -1. **Define** in `server/def/prompt/prompt.go` — add entry to +1. **Define** in `server/def/prompt/prompt.go`: add entry to `Defs` array with name, description, and arguments -2. **Build** in `server/route/prompt/prompt.go` — add builder +2. **Build** in `server/route/prompt/prompt.go`: add builder function returning `[]proto.PromptMessage` -3. **Route** in `server/route/prompt/dispatch.go` — add case in +3. **Route** in `server/route/prompt/dispatch.go`: add case in the dispatch switch ## How To Add a New Resource -1. **Register** in `server/catalog/data.go` — add URI-to-file +1. **Register** in `server/catalog/data.go`: add URI-to-file mapping -2. **Handle** in `server/resource/resource.go` — if it needs +2. **Handle** in `server/resource/resource.go`: if it needs special assembly (like the agent packet), add a reader function ## Key Design Decisions -- **handler/ has no JSON-RPC coupling** — all tool methods take +- **handler/ has no JSON-RPC coupling**: all tool methods take typed args and return `(string, error)`. Protocol translation happens in server/route/. This makes handler/ testable without stdin/stdout. -- **Single-threaded main loop** — one request at a time. Poller +- **Single-threaded main loop**: one request at a time. Poller runs in a background goroutine. Thread safety via mutex on stdout writer only. -- **Governance is advisory** — session state tracks tool calls and +- **Governance is advisory**: session state tracks tool calls and nudges (drift check, persist reminder) but never blocks execution. -- **Protocol version** — 2024-11-05. Capabilities advertised: +- **Protocol version**: 2024-11-05. Capabilities advertised: resources (subscribe=true), tools, prompts. diff --git a/internal/mcp/doc.go b/internal/mcp/doc.go index e8e2fea57..7968e4218 100644 --- a/internal/mcp/doc.go +++ b/internal/mcp/doc.go @@ -4,74 +4,78 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mcp implements a Model Context Protocol (MCP) server for ctx. -// -// MCP is a standard protocol (JSON-RPC 2.0 over stdin/stdout) that allows -// AI tools to discover and consume context from external sources. This -// package exposes ctx's context files as MCP resources and ctx commands -// as MCP tools, enabling any MCP-compatible AI tool (Claude Desktop, -// Cursor, Windsurf, VS Code Copilot, etc.) to access project context -// without tool-specific integrations. +// Package mcp implements a Model Context Protocol server +// for ctx. +// +// MCP is a standard protocol (JSON-RPC 2.0 over +// stdin/stdout) that allows AI tools to discover and +// consume context from external sources. This package +// exposes ctx's context files as MCP resources and ctx +// commands as MCP tools, enabling any MCP-compatible AI +// tool (Claude Desktop, Cursor, Windsurf, VS Code +// Copilot, etc.) to access project context without +// tool-specific integrations. // // # Architecture // -// AI Tool → stdin → MCP Server → ctx internals -// AI Tool ← stdout ← MCP Server ← ctx internals +// AI Tool -> stdin -> MCP Server -> ctx internals +// AI Tool <- stdout <- MCP Server <- ctx internals // -// The server communicates via JSON-RPC 2.0 over stdin/stdout. +// The server communicates via JSON-RPC 2.0 over +// stdin/stdout. // // # Resources // // Resources expose context files as read-only content: // -// ctx://context/tasks → TASKS.md -// ctx://context/decisions → DECISIONS.md -// ctx://context/conventions → CONVENTIONS.md -// ctx://context/constitution → CONSTITUTION.md -// ctx://context/architecture → ARCHITECTURE.md -// ctx://context/learnings → LEARNINGS.md -// ctx://context/glossary → GLOSSARY.md -// ctx://context/agent → All files assembled in read order +// ctx://context/tasks -> TASKS.md +// ctx://context/decisions -> DECISIONS.md +// ctx://context/conventions -> CONVENTIONS.md +// ctx://context/constitution -> CONSTITUTION.md +// ctx://context/architecture -> ARCHITECTURE.md +// ctx://context/learnings -> LEARNINGS.md +// ctx://context/glossary -> GLOSSARY.md +// ctx://context/agent -> All files assembled // // # Tools // // Tools expose ctx commands as callable operations: // -// ctx_status → Context health summary -// ctx_add → Add a task, decision, learning, or convention -// ctx_complete → Mark a task as done -// ctx_drift → Detect stale or invalid context -// ctx_journal_source → Query past session history -// ctx_watch_update → Apply structured context updates to files -// ctx_compact → Move completed tasks to archive -// ctx_next → Get the next pending task -// ctx_check_task_completion → Nudge when a recent action may complete a task -// ctx_session_event → Signal session start/end lifecycle -// ctx_remind → List active reminders +// ctx_status -> Context health summary +// ctx_add -> Add a task, decision, etc. +// ctx_complete -> Mark a task as done +// ctx_drift -> Detect stale context +// ctx_journal_source -> Query session history +// ctx_watch_update -> Apply structured updates +// ctx_compact -> Archive completed tasks +// ctx_next -> Get next pending task +// ctx_check_task_completion -> Nudge on completion +// ctx_session_event -> Signal session lifecycle +// ctx_remind -> List active reminders // // # Prompts // -// Prompts provide pre-built templates for common workflows: +// Prompts provide pre-built templates: // -// ctx-session-start → Load full context at session start -// ctx-decision-add → Format an architectural decision entry -// ctx-learning-add → Format a learning entry -// ctx-reflect → Guide end-of-session reflection -// ctx-checkpoint → Report session statistics +// ctx-session-start -> Load full context +// ctx-decision-add -> Format a decision entry +// ctx-learning-add -> Format a learning entry +// ctx-reflect -> Guide reflection +// ctx-checkpoint -> Report session statistics // // # Usage // // server := mcp.New(contextDir, version) -// server.Serve() // blocks, reads stdin, writes stdout +// server.Serve() // blocks on stdin/stdout // // # Design Invariants // -// This implementation preserves all six ctx design invariants: +// This implementation preserves all ctx invariants: // -// - Markdown-on-filesystem: all state remains in .context/ files -// - Zero runtime dependencies: no external services required -// - Deterministic assembly: same files + budget = same output -// - Human authority: tools propose changes through file writes -// - Local-first: no network required for core operation -// - No telemetry: no data leaves the local machine +// - Markdown-on-filesystem: state in .context/ +// - Zero runtime dependencies +// - Deterministic assembly +// - Human authority +// - Local-first: no network required +// - No telemetry package mcp diff --git a/internal/mcp/handler/doc.go b/internal/mcp/handler/doc.go index e3dbe3f4c..83d1f2b7a 100644 --- a/internal/mcp/handler/doc.go +++ b/internal/mcp/handler/doc.go @@ -4,10 +4,97 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package handler contains domain logic for MCP tool operations. +// Package handler holds the **domain logic** behind every MCP +// (Model Context Protocol) tool that ctx exposes to Claude +// Code and other MCP-compatible clients. // -// Functions accept typed Go parameters and return (string, error) pairs. -// The server package handles JSON-RPC protocol translation, argument -// extraction from MCP maps, and response wrapping. This separation -// keeps domain logic testable without protocol coupling. +// The package is intentionally protocol-free. Every exported +// function takes typed Go parameters (a `*entity.MCPDeps`, a +// path, a string, a struct) and returns `(string, error)`: +// the formatted user-facing reply and a Go error. The sister +// package [internal/mcp/server] handles JSON-RPC framing, +// argument extraction from `map[string]any`, and response +// wrapping. This split keeps the domain logic +// **unit-testable without standing up a server** and makes it +// reusable from non-MCP callers (notably the CLI's +// `ctx agent`). +// +// # The Tool Surface +// +// The functions in [tool.go] correspond one-to-one with the +// MCP tools advertised by the server. A non-exhaustive +// inventory: +// +// - [Status]: context summary (file list, +// token counts, drift signals). +// - **`ctx_add`**: add a task / decision / +// learning / convention. +// - **`ctx_complete`**: flip a task from `[ ]` to +// `[x]` via [taskComplete]. +// - **`ctx_compact`**: invoke [tidy] to archive +// done work. +// - **`ctx_drift`**: run [drift.Detect] and +// render the report. +// - **`ctx_journal_source`**: list raw session +// transcripts via [journal/parser]. +// - **`ctx_search`**: text search across context +// files via [internal/entry]. +// - **`ctx_remind`**: read/dismiss reminders via +// [remindStore]. +// - **`ctx_session_*`**: `session_start`, +// `session_end`, `session_event` lifecycle plumbing +// (covered in [session_hooks.go]). +// - **`ctx_steering_get`**: surface matched steering +// files via [steering.go] (see [internal/steering]). +// - **`ctx_check_task_completion`**: match recent file +// edits to open tasks. +// - **`ctx_watch_update`**: apply context updates the +// agent emits in `` blocks. +// +// Each function loads context fresh via [load.Do] when it +// needs current state; there is no per-tool cache. This +// keeps the response correct after edits the agent itself +// just made. +// +// # Governance: The Append-on-Every-Reply Layer +// +// [governance.go] implements the **governance trailer**: +// short, structured warnings that ride along with every MCP +// reply when the session has accumulated overdue work. +// [CheckGovernance] is invoked by the server **after** the +// tool has produced its answer; it consults the per-session +// state on `entity.MCPDeps`, drains the VS Code extension's +// violations file ([violations.go]), and assembles a +// newline-separated banner of nudges to append. +// +// The function is a free function rather than a method on +// `MCPSession` precisely because it does I/O (reading the +// violations file). `toolName` is passed in so the function +// can suppress redundant warnings, e.g. the drift warning +// is not appended to a `ctx_drift` response, since the user +// is already looking at it. +// +// # Violations Drain +// +// The Claude Code VS Code extension records hook-detected +// violations to a JSON file under the context dir. The +// handler reads it with [readViolations], surfaces the +// entries, and **truncates the file** so each violation +// surfaces exactly once. The JSON shape is +// [violationsData] / [violation]. +// +// # Session Hooks +// +// [session_hooks.go] implements the three lifecycle tools +// (`session_start`, `session_end`, `session_event`) the MCP +// client calls to mark transitions. They write to per-session +// state files under `state/` and emit nudge messages when the +// configured ceremonies have been skipped. +// +// # Concurrency +// +// Handler functions are reentrant; they hold no module-level +// state. Per-session state lives on [entity.MCPDeps] (passed +// in by the server) and on the per-session files in `state/`, +// which are written through the package's own append helpers. package handler diff --git a/internal/mcp/handler/governance.go b/internal/mcp/handler/governance.go index 0d0c53272..b600bb987 100644 --- a/internal/mcp/handler/governance.go +++ b/internal/mcp/handler/governance.go @@ -72,7 +72,7 @@ func CheckGovernance(d *entity.MCPDeps, toolName string) string { } } - // 4. Persist nudge — no context writes in a while + // 4. Persist nudge: no context writes in a while if ss.SessionStarted && ss.CallsSinceWrite >= governance.PersistNudgeAfter && toolName != tool.Add && toolName != tool.WatchUpdate && toolName != tool.Complete && toolName != tool.Compact && diff --git a/internal/mcp/handler/task/doc.go b/internal/mcp/handler/task/doc.go index b265a6f43..c27fbd505 100644 --- a/internal/mcp/handler/task/doc.go +++ b/internal/mcp/handler/task/doc.go @@ -4,9 +4,43 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package task iterates pending tasks from TASKS.md for MCP tool responses. +// Package task iterates pending tasks from TASKS.md +// for MCP tool responses. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [ForEachPending], [ContainsOverlap]. -// Exports: [ForEachPending], [ContainsOverlap]. +// This package parses TASKS.md lines and extracts +// top-level pending items, skipping completed sections +// and subtasks. It also provides word-overlap detection +// to check whether a recent action matches an existing +// task. +// +// # Iteration +// +// ForEachPending walks lines from TASKS.md, skipping +// the "## Completed" section and any subtask lines. +// Each top-level pending task is delivered to a visitor +// function. The visitor can return true to stop early. +// +// task.ForEachPending(lines, func(p Pending) bool { +// fmt.Println(p.Index, p.Content) +// return false // continue +// }) +// +// # Overlap Detection +// +// ContainsOverlap uses word-set intersection to check +// if a recent action description shares meaningful +// words with a task. It requires at least two +// significant words (length >= MinWordLen) to overlap +// before returning true. +// +// matched := task.ContainsOverlap( +// "added auth validation", +// "implement auth validation logic", +// ) +// +// # Types +// +// Pending holds the one-based index and content text +// of a pending top-level task discovered during +// iteration. package task diff --git a/internal/mcp/handler/tool.go b/internal/mcp/handler/tool.go index 0952631e6..694995482 100644 --- a/internal/mcp/handler/tool.go +++ b/internal/mcp/handler/tool.go @@ -33,7 +33,6 @@ import ( "github.com/ActiveMemory/ctx/internal/mcp/handler/task" "github.com/ActiveMemory/ctx/internal/mcp/server/stat" "github.com/ActiveMemory/ctx/internal/tidy" - "github.com/ActiveMemory/ctx/internal/validate" ) // Status loads context and returns a status summary. @@ -88,17 +87,11 @@ func Status(d *entity.MCPDeps) (string, error) { // // Returns: // - string: confirmation message with entry type and target file -// - error: boundary, validation, or write error +// - error: validation or write error func Add( d *entity.MCPDeps, entryType, content string, opts entity.EntryOpts, ) (string, error) { - if boundaryErr := validate.Boundary( - d.ContextDir, - ); boundaryErr != nil { - return "", boundaryErr - } - if writeErr := entry.ValidateAndWrite(entity.EntryParams{ Type: entryType, Content: content, @@ -131,14 +124,8 @@ func Add( // // Returns: // - string: confirmation message with completed task text -// - error: boundary or completion error +// - error: completion error func Complete(d *entity.MCPDeps, query string) (string, error) { - if boundaryErr := validate.Boundary( - d.ContextDir, - ); boundaryErr != nil { - return "", boundaryErr - } - completedTask, _, completeErr := taskComplete.Complete( query, d.ContextDir, ) @@ -295,16 +282,11 @@ func Recall( // // Returns: // - string: confirmation with file name and review status -// - error: boundary, validation, or write error +// - error: validation or write error func WatchUpdate( d *entity.MCPDeps, entryType, content string, opts entity.EntryOpts, ) (string, error) { - boundaryErr := validate.Boundary(d.ContextDir) - if boundaryErr != nil { - return "", boundaryErr - } - // Handle the "complete" type as a special case. if entryType == cfgEntry.Complete { completedTask, _, completeErr := taskComplete.Complete( @@ -367,14 +349,8 @@ func WatchUpdate( // // Returns: // - string: summary of moved tasks and cleaned sections -// - error: boundary, context load, or write error +// - error: context load or write error func Compact(d *entity.MCPDeps, archive bool) (string, error) { - if boundaryErr := validate.Boundary( - d.ContextDir, - ); boundaryErr != nil { - return "", boundaryErr - } - ctx, loadErr := load.Do(d.ContextDir) if loadErr != nil { return "", loadErr diff --git a/internal/mcp/proto/doc.go b/internal/mcp/proto/doc.go index 6f501ae88..4b8d412ff 100644 --- a/internal/mcp/proto/doc.go +++ b/internal/mcp/proto/doc.go @@ -4,9 +4,57 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package proto defines JSON-RPC 2.0 message types and MCP protocol. +// Package proto defines the **JSON-RPC 2.0** wire types and +// the **MCP** (Model Context Protocol) extension types ctx +// speaks with Claude Code and other MCP-compatible clients. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Provides constants and definitions for proto operations. -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). +// The package is wire-protocol-only: structs with `json:` +// tags, sentinel constants for method names and error codes, +// no logic. Domain behavior lives in +// [internal/mcp/handler]; transport in [internal/mcp/server]. +// +// # JSON-RPC 2.0 +// +// - **[Request]**: `jsonrpc`, `id`, `method`, +// `params`. `id` may be string, number, or null per +// the spec. +// - **[Response]**: `jsonrpc`, `id`, plus exactly +// one of `result` / `error`. +// - **[Error]**: `code`, `message`, optional `data`. +// The standard error codes ([CodeParseError], +// [CodeInvalidRequest], etc.) are exported as +// constants. +// - **[Notification]**: `Request` without an `id`, +// used for one-way messages (logging, progress). +// +// # MCP Extensions +// +// MCP layers these methods on top of JSON-RPC: +// +// - **`tools/list` / `tools/call`**: for tool +// dispatch. +// - **`prompts/list` / `prompts/get`**: for +// server-curated prompts. +// - **`resources/list` / `resources/read` / +// `resources/subscribe`**: for server-exposed +// resources. +// +// Each method has a typed request and response struct +// in this package: [ToolsCallRequest], +// [ToolsCallResponse], [Tool], [PromptsGetResponse], +// etc. +// +// # Stability +// +// The wire shape is fixed by external specifications +// (JSON-RPC 2.0 and the MCP spec). Changes here +// require coordinated client updates and should not +// happen casually. The audit suite watches for +// accidental field renames. +// +// # Concurrency +// +// All exports are immutable types. Encoding / +// decoding is goroutine-safe at the +// `encoding/json` boundary. package proto diff --git a/internal/mcp/server/catalog/doc.go b/internal/mcp/server/catalog/doc.go index f03a8fb36..20e030058 100644 --- a/internal/mcp/server/catalog/doc.go +++ b/internal/mcp/server/catalog/doc.go @@ -4,9 +4,48 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package catalog maps context files to MCP resource URIs and builds. +// Package catalog maps context files to MCP resource +// URIs and builds the resource list returned by the +// resources/list method. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Init], [URI], [AgentURI], [FileForURI], [ToList]. -// Exports: [Init], [URI], [AgentURI], [FileForURI], [ToList]. +// # Initialization +// +// Init must be called once during server bootstrap +// before FileForURI is used. It populates an internal +// lookup map from the static resource table. +// +// catalog.Init() +// +// # URI Construction +// +// URI builds a full resource URI from a name suffix +// by prepending the configured URI prefix: +// +// catalog.URI("tasks") +// // => "ctx://context/tasks" +// +// AgentURI returns the URI for the assembled agent +// packet, which combines all context files into a +// single response. +// +// # Lookup +// +// FileForURI returns the context file name for a +// given resource URI, or empty string if the URI +// does not correspond to a known file resource. +// +// name := catalog.FileForURI(uri) +// +// # Resource List +// +// ToList constructs the immutable resource list that +// the server returns for resources/list requests. It +// includes all individual file resources plus the +// agent packet resource. +// +// # Types +// +// The mapping type pairs a context file name with its +// MCP resource name and human-readable description. +// The static table variable holds all known resources. package catalog diff --git a/internal/mcp/server/def/prompt/doc.go b/internal/mcp/server/def/prompt/doc.go index 0f47316be..f265d6b4e 100644 --- a/internal/mcp/server/def/prompt/doc.go +++ b/internal/mcp/server/def/prompt/doc.go @@ -4,9 +4,46 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package prompt defines MCP prompt definitions and entry builders. +// Package prompt defines MCP prompt definitions and +// their argument schemas. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Defs]. -// Provides constants and definitions for prompt operations. +// The Defs variable holds all available MCP prompts +// that the server advertises through the prompts/list +// method. Each prompt has a name, description, and +// optional list of required arguments. +// +// # Available Prompts +// +// The following prompts are defined: +// +// - ctx-session-start: loads full project context +// at the beginning of a session. Takes no args. +// - ctx-decision-add: formats an architectural +// decision entry. Requires content, context, +// rationale, and consequence arguments. +// - ctx-learning-add: formats a learning entry. +// Requires content, context, lesson, and +// application arguments. +// - ctx-reflect: guides end-of-session reflection. +// Takes no arguments. +// - ctx-checkpoint: reports session statistics. +// Takes no arguments. +// +// # Argument Schema +// +// Prompts that accept arguments use PromptArgument +// structs with name, description, and required flag. +// The argument names match the CLI attribute names +// (e.g., "context", "rationale") so that values +// can flow between MCP clients and ctx commands. +// +// # Usage +// +// The Defs slice is consumed by the prompt list +// dispatcher, which returns it as-is in response to +// prompts/list requests. +// +// for _, p := range prompt.Defs { +// fmt.Println(p.Name, p.Description) +// } package prompt diff --git a/internal/mcp/server/def/tool/doc.go b/internal/mcp/server/def/tool/doc.go index e54a06e4e..f190961a2 100644 --- a/internal/mcp/server/def/tool/doc.go +++ b/internal/mcp/server/def/tool/doc.go @@ -1,12 +1,46 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package tool defines MCP tool definitions and shared property builders. +// Package tool defines the **MCP tool catalog** ctx +// advertises in `tools/list` responses: every callable +// MCP tool's schema, parameter definitions, and the +// shared property builders that keep parameter shapes +// consistent across tools. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [MergeProps], [EntryAttrProps], [Defs]. -// Exports: [MergeProps], [EntryAttrProps]. +// The package is the *catalog declaration*; the +// dispatch is in [internal/mcp/server/route/tool] and +// the actual logic is in [internal/mcp/handler]. +// +// # Public Surface +// +// - **[Defs]**: the slice of tool definitions +// advertised in `tools/list`. Each definition +// carries a name, description, JSON-schema +// parameters, and an "annotations" map for +// UI hints. +// - **[MergeProps](base, extra)**: composes two +// property maps so a tool can layer its +// specific arguments on top of the shared +// entry-attribute boilerplate. +// - **[EntryAttrProps]**: the canonical property +// map shared by `ctx_add` variants (priority, +// branch, commit, session-id, etc.) so the +// four entry-add tools have an identical +// argument shape. +// +// # Why a Definitions Package +// +// MCP clients consume `tools/list` once at session +// start and cache the schemas. Centralizing the +// declarations makes the surface stable across +// versions: dispatch and handler refactors do not +// change what the client sees. +// +// # Concurrency +// +// All exports are immutable. Safe for concurrent +// reads. package tool diff --git a/internal/mcp/server/dispatch/doc.go b/internal/mcp/server/dispatch/doc.go index b5b04ef28..0e56caf04 100644 --- a/internal/mcp/server/dispatch/doc.go +++ b/internal/mcp/server/dispatch/doc.go @@ -4,9 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package dispatch routes MCP requests to domain-specific handlers. +// Package dispatch routes incoming MCP requests to +// domain-specific handlers based on the JSON-RPC +// method name. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Do]. -// Exports: [Do]. +// # Routing +// +// Do is the single entry point for request dispatch. +// It receives a parsed JSON-RPC request and delegates +// to the appropriate handler: +// +// - initialize -> initialize.Dispatch +// - ping -> ping.Dispatch +// - resources/list -> resource.DispatchList +// - resources/read -> resource.DispatchRead +// - resources/subscribe -> resource.DispatchSubscribe +// - resources/unsubscribe -> resource.DispatchUnsubscribe +// - tools/list -> tool.DispatchList +// - tools/call -> tool.DispatchCall +// - prompts/list -> prompt.DispatchList +// - prompts/get -> prompt.DispatchGet +// +// Unrecognized methods fall through to the fallback +// handler, which returns a method-not-found error. +// +// # Dependencies +// +// Do accepts an entity.MCPDeps struct that carries +// runtime dependencies (context directory, token +// budget, session info) needed by domain handlers. +// It also takes the pre-built resource list and a +// poller for resource subscriptions. +// +// # Usage +// +// resp := dispatch.Do( +// version, deps, resList, poller, req, +// ) package dispatch diff --git a/internal/mcp/server/dispatch/poll/doc.go b/internal/mcp/server/dispatch/poll/doc.go index d67accb84..d4aa41dca 100644 --- a/internal/mcp/server/dispatch/poll/doc.go +++ b/internal/mcp/server/dispatch/poll/doc.go @@ -1,12 +1,45 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package poll watches subscribed resources for file changes and. +// Package poll implements the **resource-change watcher** +// behind the MCP `resources/subscribe` notification. When +// a client subscribes to one or more `.context/` files, +// this package polls their mtimes and emits a +// `notifications/resources/updated` JSON-RPC message +// when any of them changes. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [NewPoller]. -// Exports: [NewPoller]. +// MCP supports change notifications, but the underlying +// substrate (a polling watcher) is opaque to the client. +// This package is that substrate. +// +// # Public Surface +// +// - **[NewPoller](paths, intervalMs)**: builds a +// poller for the given file paths with the +// given polling interval. +// - **Poller methods**: `Start(ctx, ch)` to +// begin emitting change events on `ch`, +// `Stop()` to cease, `Update(paths)` to swap +// the watch set without restart. +// +// # Why Polling, Not fsnotify +// +// Polling at ~1 Hz is reliable across every +// platform ctx supports (Linux, macOS, Windows) +// without per-platform watcher quirks (file +// renames, fsync timing, cross-FS edge cases). +// MCP's notification cadence does not need +// sub-second precision; "saw a change within a +// second" is enough. +// +// # Concurrency +// +// `Start` spawns a single goroutine that ticks +// on the configured interval; `Stop` signals it +// via context cancellation. Concurrent calls to +// `Update` are serialized through the poller's +// mutex. package poll diff --git a/internal/mcp/server/doc.go b/internal/mcp/server/doc.go index 8b5347c83..8973c8bdb 100644 --- a/internal/mcp/server/doc.go +++ b/internal/mcp/server/doc.go @@ -1,12 +1,85 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package server implements the MCP server that exposes ctx context. +// Package server implements the **Model Context Protocol +// (MCP) server** that exposes ctx context, commands, and +// session-lifecycle hooks to MCP-compatible AI clients, +// primarily Claude Code, but also any other tool that speaks +// the same JSON-RPC 2.0 dialect. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [New]. -// Exports: [New]. +// The server runs over **stdin/stdout** as a sub-process +// launched by the AI client; it does not bind a network port. +// Spawn behavior is configured by the client's MCP block (see +// [internal/cli/setup] for what `ctx setup` writes into each +// tool's config). +// +// # Wire Protocol +// +// MCP is JSON-RPC 2.0 with three core verbs ctx implements: +// +// - **`tools/list`**: advertise the catalog of MCP tools +// this server provides (`ctx_status`, `ctx_add`, +// `ctx_complete`, `ctx_drift`, `ctx_journal_source`, +// `ctx_search`, `ctx_steering_get`, `ctx_remind`, +// `ctx_session_*`, `ctx_check_task_completion`, +// `ctx_watch_update`). +// - **`tools/call`**: invoke one tool with a typed +// arguments map. +// - **`prompts/list` / `prompts/get`**: surface +// ctx-curated prompts (e.g. the session-start +// ceremony prompt) as first-class MCP prompts. +// +// Wire types live in [internal/mcp/proto]; this package +// concerns itself with **dispatch** and **state**. +// +// # Architecture +// +// The package layers four sub-concerns: +// +// - **[New]**: constructs a server bound to a +// [entity.MCPDeps] (paths, runtime config). The +// server is single-threaded by design; Claude Code +// spawns one sub-process per session and does not +// pipeline requests. +// - **Routing**: [route/tool], [route/prompt], +// [route/resource] register handlers per MCP verb. +// - **Dispatch**: [dispatch/poll] reads one +// JSON-RPC message at a time from stdin and routes +// it to the right handler. +// - **Catalog** ([catalog/data.go]): the static +// tool/prompt/resource definitions surfaced via +// `*/list` calls. +// +// All actual domain logic (what `ctx_drift` *does*, what +// `ctx_search` returns) lives in [internal/mcp/handler]. +// This package is the protocol-aware shell around it. +// +// # Per-Session State +// +// Each running server instance owns one +// [entity.MCPSession] (turn counter, last-loaded context +// snapshot, governance flags). The session is created on +// the first `tools/call` and persists for the lifetime of +// the sub-process. The handler layer reads/mutates it +// through [entity.MCPDeps]. +// +// # Governance Trailers +// +// After every `tools/call`, the dispatcher invokes +// [internal/mcp/handler.CheckGovernance] to append any +// session-overdue nudges (drift, persistence, journal +// import) to the response. The trailers are appended +// inside the JSON-RPC `result` envelope so they reach the +// AI without changing the protocol shape. +// +// # Concurrency +// +// One goroutine reads from stdin; one goroutine writes +// to stdout; tool dispatch runs in the read goroutine +// to preserve request ordering. Long-running tools +// (currently none) would need to spawn a goroutine and +// signal completion through a channel. package server diff --git a/internal/mcp/server/extract/doc.go b/internal/mcp/server/extract/doc.go index fbb8e270e..f738436e0 100644 --- a/internal/mcp/server/extract/doc.go +++ b/internal/mcp/server/extract/doc.go @@ -4,9 +4,33 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package extract converts raw MCP tool arguments into typed Go values. +// Package extract converts raw MCP tool arguments +// into typed Go values for use by tool handlers. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [EntryArgs], [Opts]. -// Exports: [EntryArgs], [Opts]. +// # Entry Arguments +// +// EntryArgs extracts the required "type" and "content" +// fields from an MCP tool argument map. It returns an +// error if either field is missing or empty. +// +// entryType, content, err := extract.EntryArgs(args) +// +// # Options +// +// Opts builds an entity.EntryOpts struct from an MCP +// argument map by extracting optional fields such as +// priority, section, context, rationale, consequence, +// lesson, application, session ID, branch, and commit. +// Missing fields are left as zero values. +// +// opts := extract.Opts(args) +// // opts.Priority, opts.Section, etc. +// +// # Design +// +// Both functions perform safe type assertions on the +// interface{} values in the argument map, returning +// zero values for fields that are absent or have +// unexpected types. This avoids panics from malformed +// MCP requests. package extract diff --git a/internal/mcp/server/io/doc.go b/internal/mcp/server/io/doc.go index 7b33d4b86..f01a2a0c3 100644 --- a/internal/mcp/server/io/doc.go +++ b/internal/mcp/server/io/doc.go @@ -4,9 +4,32 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package io writes serialized JSON-RPC messages to a line.Writer. +// Package io provides thread-safe JSON-RPC message +// writing over an io.Writer. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [WriteJSON]. -// Exports: [WriteJSON]. +// # Writer +// +// Writer wraps an io.Writer with a mutex so that +// concurrent goroutines can safely emit JSON-RPC +// messages without interleaving. Each call to +// WriteJSON marshals the value, appends a newline, +// and writes the result as a single atomic operation. +// +// w := io.NewWriter(os.Stdout) +// err := w.WriteJSON(response) +// +// # Thread Safety +// +// The internal mutex serializes all writes. This is +// required because the MCP server may send responses +// and notifications from different goroutines (for +// example, resource change notifications alongside +// tool call responses). +// +// # Message Format +// +// Each message is marshaled to compact JSON and +// terminated with a single LF newline. This matches +// the JSON-RPC 2.0 line-delimited transport used by +// MCP over stdin/stdout. package io diff --git a/internal/mcp/server/out/doc.go b/internal/mcp/server/out/doc.go index 9cad24796..b666d753d 100644 --- a/internal/mcp/server/out/doc.go +++ b/internal/mcp/server/out/doc.go @@ -4,9 +4,42 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package out builds JSON-RPC response structs for success, error,. +// Package out builds JSON-RPC 2.0 response structs +// for the MCP server. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [OkResponse], [ErrResponse], [ToolOK], -// [ToolError], [ToolResult], [Call]. +// # Response Constructors +// +// OkResponse creates a success response with the +// given result payload. ErrResponse creates an error +// response with a code and message. +// +// resp := out.OkResponse(id, result) +// resp := out.ErrResponse(id, code, msg) +// +// # Tool Result Helpers +// +// ToolOK builds a successful tool result containing +// text content. ToolError builds a tool result with +// the IsError flag set. ToolResult dispatches between +// the two based on whether an error is present. +// +// resp := out.ToolOK(id, "done") +// resp := out.ToolError(id, "failed") +// resp := out.ToolResult(id, text, err) +// +// # Handler Wrapper +// +// Call invokes a no-argument handler function and +// wraps its (string, error) return into a response. +// This eliminates boilerplate in tool dispatchers. +// +// resp := out.Call(id, func() (string, error) { +// return "ok", nil +// }) +// +// # Content Types +// +// All tool results use the "text" content type from +// the mime config package. The JSON-RPC version string +// comes from the server config package. package out diff --git a/internal/mcp/server/parse/doc.go b/internal/mcp/server/parse/doc.go index 3a791b18f..d11b96baa 100644 --- a/internal/mcp/server/parse/doc.go +++ b/internal/mcp/server/parse/doc.go @@ -4,9 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parse unmarshals raw JSON bytes into MCP request structs,. +// Package parse unmarshals raw JSON bytes into MCP +// request structs. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Request]. -// Exports: [Request]. +// # Request Parsing +// +// Request takes raw JSON bytes from stdin and returns +// a parsed proto.Request. It handles three cases: +// +// - Valid request with ID: returns the parsed +// request and nil error response. +// - Valid notification (no ID): returns nil for +// both, since notifications expect no response. +// - Malformed JSON: returns nil request and a +// parse-error response ready to send back. +// +// # Usage +// +// req, errResp := parse.Request(data) +// if errResp != nil { +// // send error response +// } +// if req == nil { +// // notification, skip +// } +// +// # Error Codes +// +// Parse errors use the standard JSON-RPC parse error +// code from the schema config package. The error +// message comes from the embedded description text. package parse diff --git a/internal/mcp/server/ping/doc.go b/internal/mcp/server/ping/doc.go index 076b3df6c..d392c87f7 100644 --- a/internal/mcp/server/ping/doc.go +++ b/internal/mcp/server/ping/doc.go @@ -4,9 +4,22 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package ping responds to MCP ping requests with an empty success result. +// Package ping responds to MCP ping requests with an +// empty success result. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Dispatch]. -// Exports: [Dispatch]. +// # Handler +// +// Dispatch handles the "ping" JSON-RPC method by +// returning an empty success response. MCP clients +// use ping to verify that the server is alive and +// responsive. +// +// resp := ping.Dispatch(req) +// +// # Protocol +// +// The ping method belongs to the MCP base protocol. +// It takes no parameters and returns an empty object. +// The response echoes the request ID so the client +// can correlate it with the original request. package ping diff --git a/internal/mcp/server/resource/doc.go b/internal/mcp/server/resource/doc.go index 4707ccc27..7d7eac42c 100644 --- a/internal/mcp/server/resource/doc.go +++ b/internal/mcp/server/resource/doc.go @@ -4,13 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package resource handles MCP resource requests including list, -// read, subscribe, and unsubscribe operations. +// Package resource handles MCP resource requests +// including list, read, subscribe, and unsubscribe +// operations. // -// Each dispatcher validates parameters, delegates to the -// appropriate handler, and returns JSON-RPC 2.0 responses. +// # Dispatchers // -// Key exports: [DispatchList], [DispatchRead], -// [DispatchSubscribe], [DispatchUnsubscribe]. -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). +// DispatchList returns the pre-built resource list +// that the catalog package constructed at startup. +// +// DispatchRead loads context from disk and returns +// the requested resource content. It handles two +// kinds of resources: +// +// - Individual file resources: looked up via +// catalog.FileForURI and returned as-is. +// - Agent packet: assembled from all context files +// in read order, respecting the token budget. +// Files that exceed the budget are listed as +// "Also noted" summaries instead. +// +// DispatchSubscribe and DispatchUnsubscribe parse +// the subscription params and delegate to a callback +// function with the validated URI. +// +// # Agent Packet Assembly +// +// The readAgentPacket function assembles context files +// in priority order. Each file is formatted as a +// labeled section. When the cumulative token count +// exceeds the budget, remaining files are omitted +// and listed as summaries. +// +// # Subscription Handling +// +// The applySubscription helper validates params for +// both subscribe and unsubscribe, then calls the +// provided callback with the extracted URI. package resource diff --git a/internal/mcp/server/route/fallback/doc.go b/internal/mcp/server/route/fallback/doc.go index da89955f5..ada5ee983 100644 --- a/internal/mcp/server/route/fallback/doc.go +++ b/internal/mcp/server/route/fallback/doc.go @@ -4,9 +4,23 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package fallback returns method-not-found errors for unrecognized. +// Package fallback returns method-not-found errors +// for unrecognized MCP method names. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [DispatchErr]. -// Exports: [DispatchErr]. +// # Handler +// +// DispatchErr builds a JSON-RPC error response with +// the standard method-not-found error code. It +// includes the unrecognized method name in the error +// message so the client can diagnose the issue. +// +// resp := fallback.DispatchErr(req) +// +// # Role in Dispatch +// +// The main dispatch package calls DispatchErr as the +// default case in its method switch. Any method that +// does not match a known handler (initialize, ping, +// resources/*, tools/*, prompts/*) falls through to +// this package. package fallback diff --git a/internal/mcp/server/route/initialize/doc.go b/internal/mcp/server/route/initialize/doc.go index 62fe9d063..c0ea17861 100644 --- a/internal/mcp/server/route/initialize/doc.go +++ b/internal/mcp/server/route/initialize/doc.go @@ -4,9 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package initialize handles the MCP initialize handshake and. +// Package initialize handles the MCP initialize +// handshake that establishes the server session. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [Dispatch]. -// Exports: [Dispatch]. +// # Handler +// +// Dispatch responds to the "initialize" JSON-RPC +// method by returning the server's capabilities and +// version information. The response includes: +// +// - ProtocolVersion: the MCP protocol version +// supported by this server. +// - Capabilities: resource subscriptions, tools, +// and prompts support flags. +// - ServerInfo: the server name and version string. +// +// # Usage +// +// resp := initialize.Dispatch(version, req) +// +// # Protocol +// +// The initialize method is the first request an MCP +// client sends. It must complete before the client +// can call any other methods. The server advertises +// which capabilities it supports so the client knows +// what methods are available. package initialize diff --git a/internal/mcp/server/route/prompt/doc.go b/internal/mcp/server/route/prompt/doc.go index ce772f3b4..ab096ac51 100644 --- a/internal/mcp/server/route/prompt/doc.go +++ b/internal/mcp/server/route/prompt/doc.go @@ -1,12 +1,37 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package prompt dispatches MCP prompt list and get requests to the. +// Package prompt is the **MCP `prompts/*` dispatcher**: +// the layer that takes a JSON-RPC request, validates +// the parameters, and routes to the right handler in +// [internal/mcp/handler] (or, for static catalog +// queries, returns the cached catalog directly). // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [DispatchList], [DispatchGet]. -// Exports: [DispatchList], [DispatchGet]. +// MCP exposes two prompt RPCs: +// +// - **`prompts/list`**: return the catalog of +// server-curated prompts the client may invoke. +// - **`prompts/get`**: render one prompt by name +// with its argument values filled in. +// +// # Public Surface +// +// - **[DispatchList](req, deps)**: handles +// `prompts/list`. Returns the static catalog +// of ctx-curated prompts (session-start +// ceremony, decision-add wizard, etc.). +// - **[DispatchGet](req, deps)**: handles +// `prompts/get`. Validates the requested +// prompt name + arguments, calls into +// [internal/mcp/handler] for the rendering. +// +// # Concurrency +// +// Each request runs in the read goroutine of +// [internal/mcp/server]; concurrent requests +// against the same `MCPDeps` are sequential by +// MCP design. package prompt diff --git a/internal/mcp/server/route/tool/doc.go b/internal/mcp/server/route/tool/doc.go index 135a03bc3..66d815300 100644 --- a/internal/mcp/server/route/tool/doc.go +++ b/internal/mcp/server/route/tool/doc.go @@ -1,12 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package tool dispatches MCP tool list and call requests to the. +// Package tool is the **MCP `tools/*` dispatcher**: the +// layer that takes a JSON-RPC `tools/list` or +// `tools/call` request, validates parameters against +// the per-tool schema in [internal/mcp/server/def/tool], +// and routes to the right handler in +// [internal/mcp/handler]. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [DispatchList], [DispatchCall]. -// Exports: [DispatchList], [DispatchCall]. +// # Public Surface +// +// - **[DispatchList](req, deps)**: returns the +// full tool catalog from +// [internal/mcp/server/def/tool.Defs]. +// - **[DispatchCall](req, deps)**: extracts the +// tool name and arguments map from the JSON-RPC +// params, dispatches to the matching handler, +// wraps the handler's `(string, error)` return +// into the MCP response envelope, then runs +// [handler.CheckGovernance] to append any +// overdue-work nudges. +// +// # Argument Extraction +// +// MCP tool arguments arrive as `map[string]any` +// (raw JSON). This package owns the typed +// extraction (`mustString`, `optionalInt`, etc.) +// so the handlers see typed Go values, not +// `any`. +// +// # Error Mapping +// +// Handler errors map to JSON-RPC error codes: +// +// - **InvalidParams**: typed validation errors. +// - **InternalError**: anything else. +// +// The original error message is included in the +// `data` field so the client can surface it to +// the user / agent. +// +// # Concurrency +// +// Sequential per server instance; see +// [internal/mcp/server]. package tool diff --git a/internal/mcp/server/server_test.go b/internal/mcp/server/server_test.go index e1aceb694..010599fe0 100644 --- a/internal/mcp/server/server_test.go +++ b/internal/mcp/server/server_test.go @@ -20,6 +20,7 @@ import ( cfgSchema "github.com/ActiveMemory/ctx/internal/config/mcp/schema" "github.com/ActiveMemory/ctx/internal/mcp/proto" mcpIO "github.com/ActiveMemory/ctx/internal/mcp/server/io" + "github.com/ActiveMemory/ctx/internal/rc" ) func newTestServer(t *testing.T) (*Server, string) { @@ -40,6 +41,12 @@ func newTestServer(t *testing.T) (*Server, string) { if err := os.MkdirAll(contextDir, 0o755); err != nil { t.Fatalf("mkdir: %v", err) } + // Tools dispatched through the MCP server call rc.ContextDir() + // for paths under .context/; declare it so they resolve without + // the "context directory not declared" error. + t.Setenv("CTX_DIR", contextDir) + rc.Reset() + t.Cleanup(rc.Reset) files := map[string]string{ ctx.Constitution: "# Constitution\n\n- Rule 1: Never break things\n", ctx.Task: "# Tasks\n\n- [ ] Build MCP server\n- [ ] Write tests\n", diff --git a/internal/mcp/server/stat/doc.go b/internal/mcp/server/stat/doc.go index 407cbd34b..6229da8ac 100644 --- a/internal/mcp/server/stat/doc.go +++ b/internal/mcp/server/stat/doc.go @@ -4,9 +4,26 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package stat provides session statistics helpers for the MCP server. +// Package stat provides session statistics helpers +// for the MCP server. // -// Part of the MCP server (JSON-RPC 2.0 over stdin/stdout). -// Exports: [TotalAdds]. -// Exports: [TotalAdds]. +// # Counting +// +// TotalAdds sums all entry-add counts from a map +// keyed by entry type. This is used during session +// checkpoint reporting to produce a single total +// across all types (tasks, decisions, learnings, +// conventions, etc.). +// +// counts := map[string]int{ +// "task": 3, "decision": 1, +// } +// total := stat.TotalAdds(counts) // => 4 +// +// # Design +// +// The function accepts a generic map[string]int so +// it remains decoupled from the specific entry types +// defined elsewhere. Callers maintain their own count +// maps and pass them in when needed. package stat diff --git a/internal/memory/doc.go b/internal/memory/doc.go index ed984259e..38ac34a0f 100644 --- a/internal/memory/doc.go +++ b/internal/memory/doc.go @@ -4,17 +4,59 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package memory bridges Claude Code's auto memory (MEMORY.md) into -// the .context/ directory with discovery, mirroring, and drift detection. +// Package memory bridges Claude Code's per-project +// auto memory file (MEMORY.md) into a project's +// .context/ directory so that memory written by +// Claude becomes git-tracked, version-controlled, +// drift-checkable, and importable into the structured +// context files (DECISIONS / LEARNINGS / CONVENTIONS). // -// Claude Code maintains per-project auto memory at -// ~/.claude/projects//memory/MEMORY.md. This package locates that -// file from the project root, mirrors it into .context/memory/mirror.md -// (git-tracked), and archives previous versions before each sync. +// # The Problem It Solves // -// Discovery encodes the project root path into the Claude Code slug -// format: absolute path with "/" replaced by "-", prefixed with "-". +// Claude Code's auto memory lives at +// ~/.claude/projects//memory/MEMORY.md. That +// path is not in the project repo (no peer review, +// no git history), is per-machine (teammates see +// different files), and silently grows as Claude +// takes notes, drifting from .context/ over time. // -// Sync state is tracked in .context/state/memory-import.json to support -// drift detection and future import/publish phases. +// # Pipeline Stages +// +// - **discover** ([DiscoverPath], [ProjectSlug]): +// encodes the project root into Claude Code's +// slug format and resolves the auto-memory file. +// - **sync** ([Sync], [Archive]): copies the +// source into .context/memory/mirror.md, archiving +// the previous mirror before overwrite. +// - **diff** ([Diff], [HasDrift]): line-level diff +// between source and mirror; surfaces what Claude +// wrote since the last sync. +// - **parse** ([Entries]): splits MEMORY.md +// content into discrete [Entry] blocks by headers, +// blank lines, and list items. +// - **classify** ([Classify]): routes each entry +// to the matching .context/ file based on keyword +// heuristics from .ctxrc classify_rules. +// - **promote** ([Promote]): writes a classified +// entry to its target .context/ file. +// - **publish** ([Publish], [SelectContent], +// [MergePublished], [RemovePublished]): the +// inverse direction: promotes .context/ entries +// into MEMORY.md so future Claude sessions see +// them up front. +// +// # State Tracking +// +// Sync and import state lives in +// .context/state/memory-import.json ([LoadState], +// [SaveState]). The [State] struct tracks last-synced +// timestamps, imported entry hashes ([EntryHash]), +// and import/publish progress. +// +// # Concurrency and Idempotency +// +// All operations are read or write-once with no +// long-lived goroutines. [Sync] is idempotent: an +// unchanged source produces no archive entry and no +// mirror write. package memory diff --git a/internal/memory/promote_test.go b/internal/memory/promote_test.go index 3d92dc897..8f61eb587 100644 --- a/internal/memory/promote_test.go +++ b/internal/memory/promote_test.go @@ -15,7 +15,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/ctx" "github.com/ActiveMemory/ctx/internal/config/dir" "github.com/ActiveMemory/ctx/internal/config/entry" - "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) // setupContextDir creates a minimal .context/ for promotion tests. @@ -24,7 +24,7 @@ func setupContextDir(t *testing.T) (string, func()) { workDir := t.TempDir() origDir, _ := os.Getwd() _ = os.Chdir(workDir) - rc.Reset() + testctx.Declare(t, workDir) contextDir := filepath.Join(workDir, dir.Context) if mkErr := os.MkdirAll(contextDir, 0o755); mkErr != nil { diff --git a/internal/notify/doc.go b/internal/notify/doc.go index 4cb2de627..377df1c99 100644 --- a/internal/notify/doc.go +++ b/internal/notify/doc.go @@ -4,10 +4,73 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package notify provides fire-and-forget webhook notifications with. +// Package notify implements **fire-and-forget webhook +// notifications**: ctx posts a small JSON payload to a +// user-configured URL when something interesting happens +// (loop completion, hook nudge, version mismatch, key-rotation +// reminder, etc.) and never blocks the caller waiting for the +// response. // -// Key exports: [LoadWebhook], [SaveWebhook], -// [EventAllowed], [Send], [PostJSON]. -// See source files for implementation details. -// Part of the internal subsystem. +// The package is what backs `ctx hook notify`, +// `ctx hook notify setup`, and `ctx hook notify test` on the +// CLI side, plus the in-process callers like the autonomous +// loop runner. +// +// # End-to-End Flow +// +// 1. **Setup** ([SaveWebhook]) encrypts a webhook URL with +// AES-256-GCM ([internal/crypto]) and writes +// `.context/.notify.enc`. The same per-machine key +// protects the scratchpad; a fresh key is generated and +// saved on first use if none exists. +// 2. **Send** ([Send]) loads + decrypts the URL via +// [LoadWebhook], gates on the configured event filter +// via [EventAllowed], builds an [entity.NotifyPayload], +// and ships it to [PostJSON]. +// 3. **PostJSON** does the actual HTTP: short timeout, +// `Content-Type: application/json`, single attempt, no +// retry. The intent is "best-effort signal", not "guaranteed +// delivery". +// +// All three functions return cleanly when nothing is +// configured: `("", nil)` from [LoadWebhook] when either +// the key or the encrypted URL file is missing, and a +// silent noop from [Send]. +// +// # Event Filter +// +// `notify.events` in `.ctxrc` is **opt-in**: empty list +// means **no events fire** (not "all events"). Recognized +// events: `loop`, `nudge`, `relay`, `heartbeat`. The filter +// is enforced by [EventAllowed]. +// +// **`ctx hook notify test` bypasses the filter** as a +// special case so users can verify connectivity without +// having to subscribe their target event first; the test +// path warns when an unfiltered event would normally have +// been dropped. +// +// # Template References +// +// Some emitters attach a [entity.TemplateRef] (hook name + +// variant) to the payload so downstream relays can render a +// canonical message. [template_ref.go] holds the helpers +// that resolve a [TemplateRef] to its rendered string at +// the receiving end (used by integrations that re-emit +// via Slack/Discord/ntfy.sh). +// +// # Encryption Key +// +// The encryption key is shared by both `ctx pad` and +// `ctx hook notify`. Rotating it (every +// `key_rotation_days`, default 90) requires re-running +// `ctx pad init` *and* `ctx hook notify setup`. The +// rotation nudge fires from +// `internal/cli/system/cmd/check_version`. +// +// # Concurrency +// +// All exported functions are safe to call concurrently; +// they hold no module-level state. The HTTP client is the +// stdlib default, connection-pooled and goroutine-safe. package notify diff --git a/internal/notify/notify.go b/internal/notify/notify.go index 586cd776e..b350a8b47 100644 --- a/internal/notify/notify.go +++ b/internal/notify/notify.go @@ -4,11 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package notify provides fire-and-forget webhook notifications. -// -// The webhook URL is stored encrypted in .context/.notify.enc using the -// same AES-256-GCM key as the scratchpad (resolved via rc.KeyPath()). -// When no webhook is configured, all operations are silent noops. package notify import ( @@ -32,15 +27,32 @@ import ( // LoadWebhook reads and decrypts the webhook URL from .context/.notify.enc. // -// Returns ("", nil) if either the key file or encrypted file is missing -// (silent noop: webhook not configured). +// Returns ("", nil) when: +// - the key file is missing (key was never generated), +// - the encrypted file is missing (webhook never configured). +// +// Any resolver or I/O failure is propagated (including +// [errCtx.ErrDirNotDeclared]) so callers can distinguish +// "no context dir" from "no webhook configured" rather than +// being forced to treat them identically. [Send] treats any error +// as "no webhook, silently skip"; interactive callers (e.g. +// `ctx notify test`) can use [errors.Is] to surface a clearer +// message when the project is not set up yet. // // Returns: // - string: the decrypted webhook URL, or "" if not configured -// - error: non-nil only if decryption fails (missing files are silent) +// - error: non-nil on any resolver failure or decryption failure; +// missing key / encrypted file are silent func LoadWebhook() (string, error) { - kp := rc.KeyPath() - encPath := filepath.Join(rc.ContextDir(), cfgCrypto.NotifyEnc) + kp, kpErr := rc.KeyPath() + if kpErr != nil { + return "", kpErr + } + ctxDir, pathErr := rc.ContextDir() + if pathErr != nil { + return "", pathErr + } + encPath := filepath.Join(ctxDir, cfgCrypto.NotifyEnc) key, loadErr := crypto.LoadKey(kp) if loadErr != nil { @@ -76,8 +88,15 @@ func LoadWebhook() (string, error) { // Returns: // - error: non-nil if key generation, encryption, or file write fails func SaveWebhook(url string) error { - kp := rc.KeyPath() - encPath := filepath.Join(rc.ContextDir(), cfgCrypto.NotifyEnc) + kp, kpErr := rc.KeyPath() + if kpErr != nil { + return kpErr + } + ctxDir, ctxErr := rc.ContextDir() + if ctxErr != nil { + return ctxErr + } + encPath := filepath.Join(ctxDir, cfgCrypto.NotifyEnc) key, loadErr := crypto.LoadKey(kp) if loadErr != nil { @@ -157,14 +176,9 @@ func Send(event, message, sessionID string, detail *entity.TemplateRef) error { logWarn.Warn(cfgWarn.Getwd, cwdErr) } - payload := entity.NotifyPayload{ - Event: event, - Message: message, - Detail: detail, - SessionID: sessionID, - Timestamp: time.Now().UTC().Format(time.RFC3339), - Project: projectName, - } + payload := entity.NewNotifyPayload( + event, message, sessionID, projectName, detail, + ) body, marshalErr := json.Marshal(payload) if marshalErr != nil { diff --git a/internal/notify/notify_test.go b/internal/notify/notify_test.go index ab2ad11b5..1738129c3 100644 --- a/internal/notify/notify_test.go +++ b/internal/notify/notify_test.go @@ -17,6 +17,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/crypto" "github.com/ActiveMemory/ctx/internal/entity" "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/testutil/testctx" ) func setupTestDir(t *testing.T) (string, func()) { @@ -26,8 +27,7 @@ func setupTestDir(t *testing.T) (string, func()) { _ = os.Chdir(tempDir) _ = os.MkdirAll(filepath.Join(tempDir, ".context"), 0o750) - // Point rc to this temp dir's .context - rc.Reset() + testctx.Declare(t, tempDir) return tempDir, func() { _ = os.Chdir(origDir) diff --git a/internal/parse/doc.go b/internal/parse/doc.go index cf836616e..829afb879 100644 --- a/internal/parse/doc.go +++ b/internal/parse/doc.go @@ -4,9 +4,45 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package parse provides shared text-to-typed-value conversion functions. +// Package parse holds the small **string-to-typed-value** +// converters that more than one ctx package needs: dates, +// section ranges, frontmatter splits, system-reminder +// stripping, and word-set helpers. Each function is a thin, +// well-tested wrapper around standard-library or +// canonical-format primitives so callers do not have to +// duplicate the same edge-case handling. // -// Functions here convert string inputs (dates, durations, identifiers) -// into Go types. They are thin wrappers that handle empty inputs and -// use canonical format constants from the config package. +// # Functions +// +// - **[Date](s)**: parses `YYYY-MM-DD` into a +// `time.Time` at midnight UTC. Empty input returns the +// zero time with no error so callers can branch on +// `.IsZero()` instead of comparing strings. +// - **[SplitFrontmatter](data)**: splits a `---`-fenced +// YAML frontmatter from the markdown body and returns +// the two byte slices plus a parse error. Used by the +// skill, steering, and journal-entry parsers. +// - **[StripSystemReminders](text)**: Claude Code injects +// `` tags into tool results that the +// user did not write. This function strips them so the +// journal pipeline records what the user actually said. +// - **[FixCodeFenceSpacing](text)**: users often type +// `text: ```code` without proper line spacing around +// the fence; this function normalizes the spacing so +// the renderer treats it as a code block. +// - **[WordSet](words)**: builds a `map[string]struct{}` +// from a slice for O(1) membership; used by the +// classifier and several lint helpers. +// +// # Why a Shared Package +// +// Every one of these conversions sat in two or three +// places before it was hoisted here. The package's +// existence is enforced by the audit suite: a duplicate +// implementation in another package fails CI. +// +// # Concurrency +// +// All functions are pure and stateless. Concurrent +// callers never race. package parse diff --git a/internal/rc/candidates.go b/internal/rc/candidates.go new file mode 100644 index 000000000..983f4f048 --- /dev/null +++ b/internal/rc/candidates.go @@ -0,0 +1,50 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package rc + +import ( + "os" + "path/filepath" + + "github.com/ActiveMemory/ctx/internal/config/dir" +) + +// ScanCandidates walks upward from start collecting every directory +// whose basename matches the canonical context directory name +// (`.context`). The scan is read-only: it does not resolve, bind, or +// select a context directory. It exists so error messages and the +// `ctx activate` subcommand can share the same candidate enumeration +// without reintroducing walk-up resolution elsewhere. +// +// The scan always uses the canonical `.context` basename, independent +// of any `.ctxrc` configuration. Under the explicit-declaration model, +// a custom name is only ever reached via an explicit --context-dir or +// CTX_DIR, so a rename-aware scan would be surplus machinery. +// +// Parameters: +// - start: directory to begin the upward walk from; typically the +// current working directory returned by os.Getwd. +// +// Returns: +// - []string: absolute paths of every matching directory found, +// ordered innermost-first (closest to start first). Empty when +// no candidates are visible on the upward path. +func ScanCandidates(start string) []string { + var out []string + cur := start + for { + path := filepath.Join(cur, dir.Context) + if info, err := os.Stat(path); err == nil && info.IsDir() { + out = append(out, path) + } + parent := filepath.Dir(cur) + if parent == cur { + return out + } + cur = parent + } +} diff --git a/internal/rc/doc.go b/internal/rc/doc.go index c7e49162e..42f683a91 100644 --- a/internal/rc/doc.go +++ b/internal/rc/doc.go @@ -4,9 +4,52 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package rc loads and manages runtime configuration from .ctxrc files. +// Package rc loads, caches, and exposes the runtime configuration +// every other ctx package depends on. It is the single source of +// truth for context directory location, token budget, encryption +// settings, and the dozens of other knobs that shape ctx behavior. // -// Key exports: [Default], [RC], [ContextDir], [TokenBudget], [PriorityOrder]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Context-Directory Resolution (explicit-only) +// +// Under the explicit-context-dir model +// (spec: specs/explicit-context-dir.md), rc does NOT walk the +// filesystem looking for a .context/ directory. Every non-exempt +// command must declare the target explicitly. +// +// [ContextDir] returns the declared path or the empty string: +// +// 1. CLI override set via [OverrideContextDir] (--context-dir +// flag) wins if present. +// 2. CTX_DIR environment variable is consulted next. +// 3. Otherwise the empty string is returned. Exempt callers +// (ctx init, activate, deactivate, system bootstrap) handle +// empty themselves; every other command should call +// [RequireContextDir] instead, which returns a tailored error +// whose message depends on how many .context/ candidates are +// visible from CWD. +// +// [ScanCandidates] is a read-only upward scan used by the +// `ctx activate` subcommand and by [RequireContextDir]'s error +// formatter. It does not resolve, bind, or select a directory. +// +// # Configuration File (.ctxrc) +// +// Once [ContextDir] is declared, [load] reads `.ctxrc` from +// `filepath.Dir(ContextDir())`: the project root, which by contract +// is the parent of [ContextDir]. CWD has no say. When no context +// directory is declared, `.ctxrc` is not read at all and defaults +// apply. +// +// Environment overrides (CTX_TOKEN_BUDGET) are applied after the +// YAML merge so users can tune per-session without editing the +// file. +// +// The singleton [CtxRC] returned by [RC] is memoized via +// sync.Once so YAML is parsed at most once per process. +// +// # Concurrency +// +// [RC] serializes initialization through rcOnce. Read accessors +// hold an RLock; the only writer is the test-only [Reset]. CLI +// override mutation goes through a brief Lock(). package rc diff --git a/internal/rc/load.go b/internal/rc/load.go index c9f47efa8..801af3ad7 100644 --- a/internal/rc/load.go +++ b/internal/rc/load.go @@ -7,37 +7,85 @@ package rc import ( + "errors" "os" + "path/filepath" "strconv" "gopkg.in/yaml.v3" "github.com/ActiveMemory/ctx/internal/config/env" "github.com/ActiveMemory/ctx/internal/config/file" + cfgWarn "github.com/ActiveMemory/ctx/internal/config/warn" + errCtx "github.com/ActiveMemory/ctx/internal/err/context" ctxIo "github.com/ActiveMemory/ctx/internal/io" + logWarn "github.com/ActiveMemory/ctx/internal/log/warn" writeRC "github.com/ActiveMemory/ctx/internal/write/rc" ) -// load loads configuration from the .ctxrc file and applies env -// overrides. +// load builds the runtime configuration under the +// single-source-anchor model +// (spec: specs/single-source-context-anchor.md). +// +// Lookup rules: +// +// - When a context directory has been declared via CTX_DIR, +// `.ctxrc` is read from +// `filepath.Dir(ContextDir()) + "/.ctxrc"`: the project root, +// which by contract is the parent of [ContextDir]. CWD has no +// say. This is the "configuration belongs to the project root" +// rule. +// - When no context directory is declared, `.ctxrc` is not read +// at all: there is no project to configure. Defaults apply. +// - Environment overrides (CTX_TOKEN_BUDGET) are applied after the +// YAML merge so users can tune per-session without editing the +// file. // // Returns: -// - *CtxRC: Configuration with file values and env overrides applied +// - *CtxRC: Configuration with file values (when .ctxrc is +// readable) and environment overrides applied. func load() *CtxRC { cfg := Default() - // Try to load .ctxrc from the current directory - data, readErr := ctxIo.SafeReadUserFile(file.CtxRC) - if readErr == nil { - if yamlErr := yaml.Unmarshal(data, cfg); yamlErr != nil { - writeRC.ParseWarning(file.CtxRC, yamlErr) + rcPath, pathErr := ctxrcPath() + switch { + case pathErr == nil: + data, readErr := ctxIo.SafeReadUserFile(rcPath) + if readErr == nil { + if yamlErr := yaml.Unmarshal(data, cfg); yamlErr != nil { + writeRC.ParseWarning(rcPath, yamlErr) + } } + case errors.Is(pathErr, errCtx.ErrDirNotDeclared): + // CTX_DIR not declared. **Expected** for exempt commands + // (ctx init, activate, deactivate, doctor, version, + // hub *, etc.) that legitimately call accessors before + // any project exists; defaults are the right answer for + // them. **Unexpected** for operating commands, which + // should have been gated by [bootstrap/cmd.go]'s + // PersistentPreRunE call to RequireContextDir before + // reaching any RC accessor. + // + // If an operating command ever slips past that gate, this + // branch would silently hand back default config + // (token_budget = 8000, auto_archive = true, etc.) and + // the user's .ctxrc settings would be invisibly ignored. + // Emit a stderr breadcrumb so the silence is visible: + // loud enough to surface during a missed-gate regression + // in dev / CI, quiet enough to ignore in legitimate + // exempt flows. Defaults still apply so the command can + // keep running. + logWarn.Warn(cfgWarn.RCNoContextDir) + default: + // Unexpected resolver failure (relative path, + // non-canonical basename, etc.). Surface loudly rather + // than swallowing; defaults still apply so commands that + // do not require a project can still boot. Same noisy-TUI + // principle documented on resolve.DirLine / + // resolve.AppendDir. + logWarn.Warn(cfgWarn.ContextDirResolve, pathErr) } - // Apply environment variable overrides - if envDir := os.Getenv(env.CtxDir); envDir != "" { - cfg.ContextDir = envDir - } if envBudget := os.Getenv(env.CtxTokenBudget); envBudget != "" { budget, parseErr := strconv.Atoi(envBudget) if parseErr == nil && budget > 0 { @@ -47,3 +95,20 @@ func load() *CtxRC { return cfg } + +// ctxrcPath returns the absolute path to the `.ctxrc` file adjacent +// to the declared context directory. +// +// Returns: +// - string: Absolute path to .ctxrc on success; "" on error. +// - error: errCtx.ErrDirNotDeclared when no context directory has +// been declared; any other resolver error from ContextDir is +// propagated unchanged so the caller decides policy rather than +// this helper silently returning an empty path. +func ctxrcPath() (string, error) { + ctxDir, err := ContextDir() + if err != nil { + return "", err + } + return filepath.Join(filepath.Dir(ctxDir), file.CtxRC), nil +} diff --git a/internal/rc/lock.go b/internal/rc/lock.go index c219675c9..84870c2a5 100644 --- a/internal/rc/lock.go +++ b/internal/rc/lock.go @@ -8,15 +8,13 @@ package rc import "sync" -// rc, rcOnce, rcOverrideDir, and rcMu hold the singleton runtime -// configuration loaded once from .ctxrc via sync.Once. +// rc, rcOnce, and rcMu hold the singleton runtime configuration +// loaded once from .ctxrc via sync.Once. var ( // rc holds the singleton runtime configuration. rc *CtxRC // rcOnce guards one-time configuration loading. rcOnce sync.Once - // rcOverrideDir overrides the config search directory. - rcOverrideDir string - // rcMu protects concurrent access to rc and rcOverrideDir. + // rcMu protects concurrent access to rc. rcMu sync.RWMutex ) diff --git a/internal/rc/rc.go b/internal/rc/rc.go index d999f5012..7d0052a7f 100644 --- a/internal/rc/rc.go +++ b/internal/rc/rc.go @@ -4,19 +4,21 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package rc provides runtime configuration loading from .ctxrc files. package rc import ( + "os" "path/filepath" "sync" "github.com/ActiveMemory/ctx/internal/config/ctx" "github.com/ActiveMemory/ctx/internal/config/dir" cfgEntry "github.com/ActiveMemory/ctx/internal/config/entry" + "github.com/ActiveMemory/ctx/internal/config/env" cfgMemory "github.com/ActiveMemory/ctx/internal/config/memory" "github.com/ActiveMemory/ctx/internal/config/parser" "github.com/ActiveMemory/ctx/internal/crypto" + errCtx "github.com/ActiveMemory/ctx/internal/err/context" ) // Default returns a new CtxRC with hardcoded default values. @@ -26,7 +28,6 @@ import ( // (8000 token budget, 7-day archive, etc.) func Default() *CtxRC { return &CtxRC{ - ContextDir: dir.Context, TokenBudget: DefaultTokenBudget, PriorityOrder: nil, // nil means use config.ReadOrder AutoArchive: true, @@ -41,10 +42,16 @@ func Default() *CtxRC { } } -// RC returns the loaded configuration, initializing it on the first call. +// RC returns the loaded configuration, initializing it on the first +// call. // -// It loads from .ctxrc if present, then applies environment overrides. -// The result is cached for subsequent calls. +// Under the single-source-anchor resolution model +// (spec: specs/single-source-context-anchor.md), `.ctxrc` is read +// from `filepath.Dir(ContextDir())/.ctxrc`: the project root, which +// by contract is the parent of [ContextDir]. CWD has no say. When +// no context directory is declared, `.ctxrc` is not read and +// defaults apply. Environment overrides (CTX_TOKEN_BUDGET) are +// applied afterward. The result is cached for subsequent calls. // // Returns: // - *CtxRC: The loaded and cached configuration @@ -55,34 +62,59 @@ func RC() *CtxRC { return rc } -// ContextDir returns the configured context directory as an absolute path. -// -// Resolution order: -// 1. CLI override (rcOverrideDir): returned as absolute, no walk. -// 2. Configured absolute path (.ctxrc or env var): returned as-is. -// 3. Upward walk from CWD: the first ancestor containing an existing -// directory whose basename matches the configured name wins. -// 4. Fallback: filepath.Join(cwd, configuredName) as absolute. Preserves -// ctx init's ability to create a new context directory at CWD. -// -// The walk allows commands and hooks invoked from project subdirectories -// to resolve the project-root context dir instead of creating stray state -// files inside the subdirectory. The walk result is cached for the life -// of the process; tests can call Reset to invalidate the cache. -// -// Returns: -// - string: Absolute path to the context directory -func ContextDir() string { - rcMu.RLock() - override := rcOverrideDir - rcMu.RUnlock() - if override != "" { - if abs, err := filepath.Abs(override); err == nil { - return abs - } - return override +// ContextDir returns the context directory as a cleaned absolute +// path after validating its declaration *shape*. +// +// This is the **declaration shape** validator: it observes [env.CtxDir] +// and checks the value is set, absolute, and canonically named. It +// performs **no filesystem syscalls**. Diagnostic callers that must +// describe declared state without erroring on broken state (for +// example, the `check-anchor-drift` hook) use this directly. +// +// Operating callers that need a usable directory should call +// [RequireContextDir] instead; it adds the boundary stat/IsDir +// checks. Mixing the two is a convention violation: an operating +// caller getting a shape-valid but non-existent path here would +// surface as a confusing downstream error +// (`open .../TASKS.md: no such file or directory`) instead of the +// friendly tailored not-found error from [RequireContextDir]. +// +// Rejection conditions, in order: +// +// 1. Unset/empty: [errCtx.ErrDirNotDeclared]. +// 2. Relative path (not [filepath.IsAbs]): +// [errCtx.ErrRelativeNotAllowed]. Absolute-only is a hardline: +// `filepath.Abs` *would* absolutize via cwd, exactly the silent +// cwd-dependency this resolver is meant to eliminate. +// 3. Cleaned basename != [dir.Context]: [errCtx.ErrNonCanonicalBasename]. +// Catches the common footgun `export CTX_DIR=$(pwd)` (project +// root instead of the `.context` subdirectory) on first use +// rather than letting init deposit canonical files in the +// project root. +// +// [filepath.Clean] runs unconditionally to normalize separators, +// dot segments, and trailing slashes, but the input itself must be +// absolute. Symlinks are not resolved: the basename guard checks +// the *declared* name, not the symlink target name. +// +// Returns: +// - string: cleaned absolute path when declared and shape-valid; +// "" on error. +// - error: [errCtx.ErrDirNotDeclared] / [errCtx.ErrRelativeNotAllowed] +// / [errCtx.ErrNonCanonicalBasename] depending on what failed. +func ContextDir() (string, error) { + raw := os.Getenv(env.CtxDir) + if raw == "" { + return "", errCtx.ErrDirNotDeclared + } + if !filepath.IsAbs(raw) { + return "", errCtx.RelativeNotAllowed(raw) } - return walkForContextDir(RC().ContextDir) + abs := filepath.Clean(raw) + if filepath.Base(abs) != dir.Context { + return "", errCtx.NonCanonicalBasename(filepath.Base(abs)) + } + return abs, nil } // TokenBudget returns the configured default token budget. @@ -219,19 +251,35 @@ func NotifyEvents() []string { // KeyPath returns the resolved encryption key file path. // -// Priority: key_path in .ctxrc (explicit) > project-local +// Under the explicit-context-dir model the caller must have a +// declared context directory. The previous implementation silently +// handed "" to [crypto.ResolveKeyPath] when ContextDir failed, which +// either filepath.Join'd a CWD-relative `.ctx.key` path or fell +// through to the global `~/.ctx/.ctx.key`: exactly the class of +// silent-wrong-location / wrong-key-rotation bug this branch aims +// to eliminate. The error is propagated instead so callers handle +// the absence of a project rather than rotating encryption against +// a surprise key. // -// (.context/.ctx.key) > global (~/.ctx/.ctx.key). +// Within ResolveKeyPath the existing priority still applies: +// key_path in .ctxrc (explicit) > project-local +// (.context/.ctx.key) > global (~/.ctx/.ctx.key). // // Returns: // - string: Resolved path to the encryption key file -func KeyPath() string { - return crypto.ResolveKeyPath(ContextDir(), RC().KeyPathOverride) +// - error: [errCtx.ErrDirNotDeclared] or any other ContextDir +// resolver failure, propagated unchanged +func KeyPath() (string, error) { + ctxDir, err := ContextDir() + if err != nil { + return "", err + } + return crypto.ResolveKeyPath(ctxDir, RC().KeyPathOverride), nil } // KeyRotationDays returns the configured key rotation threshold in days. // -// The encryption key is shared by both ctx pad and ctx notify, so the +// The encryption key is shared by both ctx pad and ctx hook notify, so the // rotation threshold is a project-wide setting. // // Priority: top-level key_rotation_days > @@ -480,28 +528,6 @@ func HooksEnabled() bool { return true } -// AllowOutsideCwd returns whether boundary validation should be skipped. -// -// Returns false (default) when the field is not set in .ctxrc. -// -// Returns: -// - bool: True if the context directory is allowed outside the project root -func AllowOutsideCwd() bool { - return RC().AllowOutsideCwd -} - -// OverrideContextDir sets a CLI-provided override for the context directory. -// -// This takes precedence over all other configuration sources. -// -// Parameters: -// - ctxDir: Directory path to use as an override -func OverrideContextDir(ctxDir string) { - rcMu.Lock() - defer rcMu.Unlock() - rcOverrideDir = ctxDir -} - // Reset clears the cached configuration, forcing // reload on the next access. func Reset() { @@ -509,7 +535,6 @@ func Reset() { defer rcMu.Unlock() rcOnce = sync.Once{} rc = nil - rcOverrideDir = "" } // FilePriority returns the priority of a context file. diff --git a/internal/rc/rc_test.go b/internal/rc/rc_test.go index d91a5fc71..15fa567dd 100644 --- a/internal/rc/rc_test.go +++ b/internal/rc/rc_test.go @@ -7,6 +7,7 @@ package rc import ( + "errors" "os" "path/filepath" "testing" @@ -14,14 +15,44 @@ import ( "github.com/ActiveMemory/ctx/internal/config/ctx" "github.com/ActiveMemory/ctx/internal/config/dir" "github.com/ActiveMemory/ctx/internal/config/env" + errCtx "github.com/ActiveMemory/ctx/internal/err/context" ) +// declareContext sets up a tempDir layout with a .context/ directory +// and a .ctxrc at the project root (the parent of CTX_DIR), declares +// CTX_DIR via t.Setenv, and resets the rc singleton. The helper +// matches the single-source-anchor resolution model +// (spec: specs/single-source-context-anchor.md): .ctxrc is read from +// filepath.Dir(ContextDir())/.ctxrc, not CWD. +// +// Parameters: +// - t: test handle for Setenv/TempDir/Cleanup wiring. +// - content: YAML body to write into .ctxrc; empty for "no file". +// +// Returns: +// - string: absolute path of the declared .context/ directory. +func declareContext(t *testing.T, content string) string { + t.Helper() + tempDir := t.TempDir() + ctxDir := filepath.Join(tempDir, dir.Context) + if mkErr := os.MkdirAll(ctxDir, 0700); mkErr != nil { + t.Fatalf("mkdir .context: %v", mkErr) + } + if content != "" { + rcPath := filepath.Join(tempDir, ".ctxrc") + if wrErr := os.WriteFile(rcPath, []byte(content), 0600); wrErr != nil { + t.Fatalf("write .ctxrc: %v", wrErr) + } + } + t.Setenv(env.CtxDir, ctxDir) + Reset() + t.Cleanup(Reset) + return ctxDir +} + func TestDefaultRC(t *testing.T) { rc := Default() - if rc.ContextDir != dir.Context { - t.Errorf("ContextDir = %q, want %q", rc.ContextDir, dir.Context) - } if rc.TokenBudget != DefaultTokenBudget { t.Errorf("TokenBudget = %d, want %d", rc.TokenBudget, DefaultTokenBudget) } @@ -39,49 +70,40 @@ func TestDefaultRC(t *testing.T) { } } +// TestGetRC_NoFile: no CTX_DIR declared and no .ctxrc anywhere → +// defaults apply. func TestGetRC_NoFile(t *testing.T) { - // Change to temp directory with no .ctxrc tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() + t.Chdir(tempDir) + // Ensure no env leak from other tests. + t.Setenv(env.CtxDir, "") Reset() + t.Cleanup(Reset) rc := RC() - if rc.ContextDir != dir.Context { - t.Errorf("ContextDir = %q, want %q", rc.ContextDir, dir.Context) - } if rc.TokenBudget != DefaultTokenBudget { t.Errorf("TokenBudget = %d, want %d", rc.TokenBudget, DefaultTokenBudget) } + if !rc.AutoArchive { + t.Error("AutoArchive = false, want true (default)") + } } +// TestGetRC_WithFile: CTX_DIR declared, .ctxrc adjacent → values +// picked up. func TestGetRC_WithFile(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - // Create .ctxrc file - rcContent := `context_dir: custom-context -token_budget: 4000 + declareContext(t, `token_budget: 4000 priority_order: - TASKS.md - DECISIONS.md auto_archive: false archive_after_days: 14 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() +`) rc := RC() - if rc.ContextDir != "custom-context" { - t.Errorf("ContextDir = %q, want %q", rc.ContextDir, "custom-context") - } if rc.TokenBudget != 4000 { t.Errorf("TokenBudget = %d, want %d", rc.TokenBudget, 4000) } @@ -96,97 +118,355 @@ archive_after_days: 14 } } -func TestGetRC_EnvOverrides(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - // Create .ctxrc file - rcContent := `context_dir: file-context -token_budget: 4000 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - // Set environment variables (t.Setenv auto-restores after test) - t.Setenv(env.CtxDir, "env-context") +// TestGetRC_TokenBudgetEnvOverride: CTX_TOKEN_BUDGET beats .ctxrc. +func TestGetRC_TokenBudgetEnvOverride(t *testing.T) { + declareContext(t, `token_budget: 4000`) t.Setenv(env.CtxTokenBudget, "2000") - Reset() rc := RC() - - // Env should override file - if rc.ContextDir != "env-context" { - t.Errorf( - "ContextDir = %q, want %q (env override)", - rc.ContextDir, "env-context", - ) - } if rc.TokenBudget != 2000 { t.Errorf("TokenBudget = %d, want %d (env override)", rc.TokenBudget, 2000) } } -func TestGetContextDir_CLIOverride(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() +// TestContextDir_RejectsUnset: CTX_DIR unset → ErrDirNotDeclared. +func TestContextDir_RejectsUnset(t *testing.T) { + t.Setenv(env.CtxDir, "") + Reset() + t.Cleanup(Reset) - // Create .ctxrc file - rcContent := `context_dir: file-context` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + got, err := ContextDir() + if !errors.Is(err, errCtx.ErrDirNotDeclared) { + t.Errorf("ContextDir() err = %v, want ErrDirNotDeclared", err) + } + if got != "" { + t.Errorf("ContextDir() = %q, want \"\"", got) + } +} - // Set env override (t.Setenv auto-restores after test) - t.Setenv(env.CtxDir, "env-context") +// TestContextDir_RejectsEmpty: CTX_DIR set to empty string is +// treated as unset. Spec contract: declared-or-not, no +// in-between. +func TestContextDir_RejectsEmpty(t *testing.T) { + t.Setenv(env.CtxDir, "") + Reset() + t.Cleanup(Reset) + + _, err := ContextDir() + if !errors.Is(err, errCtx.ErrDirNotDeclared) { + t.Errorf("ContextDir() err = %v, want ErrDirNotDeclared", err) + } +} +// TestContextDir_RejectsRelative_DotContext: critical regression +// guard against silent cwd-dependency. Without IsAbs check, +// CTX_DIR=.context would be cwd-absolutized via filepath.Abs and +// pass the basename guard, defeating the resolver. +func TestContextDir_RejectsRelative_DotContext(t *testing.T) { + t.Setenv(env.CtxDir, ".context") Reset() + t.Cleanup(Reset) + + got, err := ContextDir() + if !errors.Is(err, errCtx.ErrRelativeNotAllowed) { + t.Errorf("ContextDir() err = %v, want ErrRelativeNotAllowed", err) + } + if got != "" { + t.Errorf("ContextDir() = %q, want \"\"", got) + } +} - // CLI override takes precedence over all - OverrideContextDir("cli-context") - defer Reset() +// TestContextDir_RejectsRelative_DotSlashContext: another shape of +// relative path, same expected error. +func TestContextDir_RejectsRelative_DotSlashContext(t *testing.T) { + t.Setenv(env.CtxDir, "./.context") + Reset() + t.Cleanup(Reset) - got := ContextDir() - // Contract: ContextDir() always returns an absolute path. - // A relative CLI override is resolved against the current working - // directory. - wantAbs, _ := filepath.Abs("cli-context") - if got != wantAbs { - t.Errorf("ContextDir() = %q, want %q (CLI override)", got, wantAbs) + _, err := ContextDir() + if !errors.Is(err, errCtx.ErrRelativeNotAllowed) { + t.Errorf("ContextDir() err = %v, want ErrRelativeNotAllowed", err) } } -func TestGetTokenBudget(t *testing.T) { +// TestContextDir_RejectsRelative_DotDot: dot-dot relative path +// also rejected. +func TestContextDir_RejectsRelative_DotDot(t *testing.T) { + t.Setenv(env.CtxDir, "../foo/.context") + Reset() + t.Cleanup(Reset) + + _, err := ContextDir() + if !errors.Is(err, errCtx.ErrRelativeNotAllowed) { + t.Errorf("ContextDir() err = %v, want ErrRelativeNotAllowed", err) + } +} + +// TestContextDir_RejectsNonCanonicalBasename: catches the common +// `export CTX_DIR=$(pwd)` footgun on first use rather than +// letting init deposit canonical files in the project root. +func TestContextDir_RejectsNonCanonicalBasename(t *testing.T) { + t.Setenv(env.CtxDir, "/tmp/notdotcontext") + Reset() + t.Cleanup(Reset) + + _, err := ContextDir() + if !errors.Is(err, errCtx.ErrNonCanonicalBasename) { + t.Errorf("ContextDir() err = %v, want ErrNonCanonicalBasename", err) + } + if err != nil && !contains(err.Error(), "notdotcontext") { + t.Errorf("err message %q should include offending basename", err.Error()) + } +} + +// TestContextDir_RejectsRoot: filepath.Base("/") returns "/", not +// ".context", so root path is rejected by the basename guard. +func TestContextDir_RejectsRoot(t *testing.T) { + t.Setenv(env.CtxDir, "/") + Reset() + t.Cleanup(Reset) + + _, err := ContextDir() + if !errors.Is(err, errCtx.ErrNonCanonicalBasename) { + t.Errorf("ContextDir() err = %v, want ErrNonCanonicalBasename", err) + } +} + +// TestContextDir_AcceptsCanonical: canonical absolute `.context` +// path is the happy path. +func TestContextDir_AcceptsCanonical(t *testing.T) { + t.Setenv(env.CtxDir, "/tmp/.context") + Reset() + t.Cleanup(Reset) + + got, err := ContextDir() + if err != nil { + t.Fatalf("ContextDir() err = %v, want nil", err) + } + if got != "/tmp/.context" { + t.Errorf("ContextDir() = %q, want %q", got, "/tmp/.context") + } +} + +// TestContextDir_NormalizesTrailingSlash: filepath.Clean strips +// trailing slash; basename guard still passes. +func TestContextDir_NormalizesTrailingSlash(t *testing.T) { + t.Setenv(env.CtxDir, "/tmp/.context/") + Reset() + t.Cleanup(Reset) + + got, err := ContextDir() + if err != nil { + t.Fatalf("ContextDir() err = %v, want nil", err) + } + if got != "/tmp/.context" { + t.Errorf("ContextDir() = %q, want %q", got, "/tmp/.context") + } +} + +// TestContextDir_NormalizesDotSegments: filepath.Clean +// canonicalizes dot segments. +func TestContextDir_NormalizesDotSegments(t *testing.T) { + t.Setenv(env.CtxDir, "/tmp/./.context") + Reset() + t.Cleanup(Reset) + + got, err := ContextDir() + if err != nil { + t.Fatalf("ContextDir() err = %v, want nil", err) + } + if got != "/tmp/.context" { + t.Errorf("ContextDir() = %q, want %q", got, "/tmp/.context") + } +} + +// TestContextDir_AcceptsSymlinkNamedDotContext: a symlink whose +// basename is `.context` (regardless of where it points) passes +// the basename guard. The resolver checks the *declared* name, +// not the symlink target name. +func TestContextDir_AcceptsSymlinkNamedDotContext(t *testing.T) { tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() + target := filepath.Join(tempDir, "actual-target") + if err := os.MkdirAll(target, 0700); err != nil { + t.Fatalf("mkdir target: %v", err) + } + link := filepath.Join(tempDir, dir.Context) + if err := os.Symlink(target, link); err != nil { + t.Skipf("symlink unsupported: %v", err) + } + t.Setenv(env.CtxDir, link) + Reset() + t.Cleanup(Reset) + got, err := ContextDir() + if err != nil { + t.Fatalf("ContextDir() err = %v, want nil", err) + } + if got != link { + t.Errorf("ContextDir() = %q, want %q (declared symlink path)", got, link) + } +} + +// contains is a small helper for substring checks in error +// messages. Avoids pulling strings.Contains everywhere. +func contains(haystack, needle string) bool { + for i := 0; i+len(needle) <= len(haystack); i++ { + if haystack[i:i+len(needle)] == needle { + return true + } + } + return false +} + +// TestContextDir_Unset: no env declaration → errCtx.ErrDirNotDeclared. +// Under the single-source-anchor model, unset is a valid signal used by +// exempt commands and rc.RequireContextDir's error path. +func TestContextDir_Unset(t *testing.T) { + tempDir := t.TempDir() + t.Chdir(tempDir) + t.Setenv(env.CtxDir, "") Reset() + t.Cleanup(Reset) - // Default value - budget := TokenBudget() - if budget != DefaultTokenBudget { - t.Errorf("TokenBudget() = %d, want %d", budget, DefaultTokenBudget) + got, err := ContextDir() + if err == nil { + t.Errorf("ContextDir() err = nil, want errCtx.ErrDirNotDeclared") + } + if got != "" { + t.Errorf("ContextDir() = %q, want \"\" (unset)", got) } } -func TestGetRC_InvalidYAML(t *testing.T) { +// TestContextDir_EnvOnly: CTX_DIR env set with canonical absolute +// `.context` path → resolves to that path. +func TestContextDir_EnvOnly(t *testing.T) { tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() + target := filepath.Join(tempDir, dir.Context) + _ = os.MkdirAll(target, 0700) + t.Setenv(env.CtxDir, target) + Reset() + t.Cleanup(Reset) + + got, err := ContextDir() + if err != nil { + t.Fatalf("ContextDir() err = %v, want nil", err) + } + if !filepath.IsAbs(got) { + t.Errorf("ContextDir() = %q, want absolute path", got) + } + gotResolved, _ := filepath.EvalSymlinks(got) + wantResolved, _ := filepath.EvalSymlinks(target) + if gotResolved != wantResolved { + t.Errorf("ContextDir() = %q, want %q (env)", gotResolved, wantResolved) + } +} - // Create invalid .ctxrc file - _ = os.WriteFile( - filepath.Join(tempDir, ".ctxrc"), - []byte("invalid: [yaml: content"), 0600, - ) +// TestRequireContextDir_Declared: a declared CTX_DIR yields the +// path and no error. +func TestRequireContextDir_Declared(t *testing.T) { + ctxDir := declareContext(t, "") + + got, err := RequireContextDir() + if err != nil { + t.Fatalf("RequireContextDir() err = %v, want nil", err) + } + gotResolved, _ := filepath.EvalSymlinks(got) + wantResolved, _ := filepath.EvalSymlinks(ctxDir) + if gotResolved != wantResolved { + t.Errorf("RequireContextDir() = %q, want %q", gotResolved, wantResolved) + } +} +// TestRequireContextDir_Undeclared: no override, no env → error +// with a tailored, non-empty message. +func TestRequireContextDir_Undeclared(t *testing.T) { + tempDir := t.TempDir() + t.Chdir(tempDir) + t.Setenv(env.CtxDir, "") Reset() + t.Cleanup(Reset) - // Should return defaults on invalid YAML + got, err := RequireContextDir() + if err == nil { + t.Fatalf("RequireContextDir() err = nil, want non-nil") + } + if got != "" { + t.Errorf("RequireContextDir() path = %q, want \"\" on error", got) + } + if msg := err.Error(); msg == "" { + t.Error("RequireContextDir() returned empty error message") + } +} + +// TestScanCandidates_NoMatches: empty tree → empty slice. +func TestScanCandidates_NoMatches(t *testing.T) { + tempDir := t.TempDir() + t.Chdir(tempDir) + + got := ScanCandidates(tempDir) + if len(got) != 0 { + t.Errorf("ScanCandidates() = %v, want []", got) + } +} + +// TestScanCandidates_SelfMatch: .context/ exists at start dir → +// one candidate, same path. +func TestScanCandidates_SelfMatch(t *testing.T) { + tempDir := t.TempDir() + ctxPath := filepath.Join(tempDir, dir.Context) + _ = os.MkdirAll(ctxPath, 0700) + + got := ScanCandidates(tempDir) + if len(got) != 1 { + t.Fatalf("ScanCandidates() len = %d, want 1", len(got)) + } + + wantResolved, _ := filepath.EvalSymlinks(ctxPath) + gotResolved, _ := filepath.EvalSymlinks(got[0]) + if gotResolved != wantResolved { + t.Errorf("ScanCandidates()[0] = %q, want %q", gotResolved, wantResolved) + } +} + +// TestScanCandidates_ManyAncestors: nested .context/ dirs upward +// are all returned, innermost first. +func TestScanCandidates_ManyAncestors(t *testing.T) { + tempDir := t.TempDir() + inner := filepath.Join(tempDir, "inner", "deep") + innerCtx := filepath.Join(tempDir, "inner", dir.Context) + outerCtx := filepath.Join(tempDir, dir.Context) + + for _, d := range []string{inner, innerCtx, outerCtx} { + if mkErr := os.MkdirAll(d, 0700); mkErr != nil { + t.Fatalf("mkdir %s: %v", d, mkErr) + } + } + + got := ScanCandidates(inner) + if len(got) < 2 { + t.Fatalf("ScanCandidates() len = %d, want >= 2", len(got)) + } + + // Innermost first: the first candidate must be in the parent of + // the start dir (i.e., inner/.context). + innerResolved, _ := filepath.EvalSymlinks(innerCtx) + gotInner, _ := filepath.EvalSymlinks(got[0]) + if gotInner != innerResolved { + t.Errorf("ScanCandidates()[0] = %q, want %q (innermost)", gotInner, innerResolved) + } +} + +func TestGetTokenBudget(t *testing.T) { + declareContext(t, "") + budget := TokenBudget() + if budget != DefaultTokenBudget { + t.Errorf("TokenBudget() = %d, want %d", budget, DefaultTokenBudget) + } +} + +func TestGetRC_InvalidYAML(t *testing.T) { + declareContext(t, "invalid: [yaml: content") rc := RC() if rc.TokenBudget != DefaultTokenBudget { t.Errorf( @@ -197,40 +477,21 @@ func TestGetRC_InvalidYAML(t *testing.T) { } func TestGetRC_PartialConfig(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - // Create .ctxrc with only some fields - rcContent := `token_budget: 5000` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - + declareContext(t, `token_budget: 5000`) rc := RC() - - // Specified value should be used if rc.TokenBudget != 5000 { t.Errorf("TokenBudget = %d, want %d", rc.TokenBudget, 5000) } - // Unspecified values should use defaults - if rc.ContextDir != dir.Context { - t.Errorf("ContextDir = %q, want %q (default)", rc.ContextDir, dir.Context) + if rc.ArchiveAfterDays != DefaultArchiveAfterDays { + t.Errorf("ArchiveAfterDays = %d, want default", rc.ArchiveAfterDays) } } func TestGetRC_InvalidEnvBudget(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - + declareContext(t, "") t.Setenv(env.CtxTokenBudget, "not-a-number") - Reset() - // Invalid env should be ignored, use default rc := RC() if rc.TokenBudget != DefaultTokenBudget { t.Errorf( @@ -240,51 +501,42 @@ func TestGetRC_InvalidEnvBudget(t *testing.T) { } } -func TestGetRC_Singleton(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - +func TestGetRC_NegativeEnvBudget(t *testing.T) { + declareContext(t, "") + t.Setenv(env.CtxTokenBudget, "-100") Reset() + rc := RC() + if rc.TokenBudget != DefaultTokenBudget { + t.Errorf( + "TokenBudget = %d, want %d (default on negative env)", + rc.TokenBudget, DefaultTokenBudget, + ) + } +} + +func TestGetRC_Singleton(t *testing.T) { + declareContext(t, "") rc1 := RC() rc2 := RC() - if rc1 != rc2 { t.Error("RC() should return same instance") } } func TestPriorityOrder(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default has nil PriorityOrder - order := PriorityOrder() - if order != nil { + declareContext(t, "") + if order := PriorityOrder(); order != nil { t.Errorf("PriorityOrder() = %v, want nil", order) } } func TestPriorityOrder_Custom(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `priority_order: + declareContext(t, `priority_order: - TASKS.md - DECISIONS.md - LEARNINGS.md -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() +`) order := PriorityOrder() if len(order) != 3 { @@ -296,43 +548,21 @@ func TestPriorityOrder_Custom(t *testing.T) { } func TestAutoArchive(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default is true + declareContext(t, "") if !AutoArchive() { t.Error("AutoArchive() = false, want true") } } func TestAutoArchive_Disabled(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `auto_archive: false` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - + declareContext(t, `auto_archive: false`) if AutoArchive() { t.Error("AutoArchive() = true, want false") } } func TestArchiveAfterDays(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - + declareContext(t, "") days := ArchiveAfterDays() if days != DefaultArchiveAfterDays { t.Errorf("ArchiveAfterDays() = %d, want %d", days, DefaultArchiveAfterDays) @@ -340,16 +570,7 @@ func TestArchiveAfterDays(t *testing.T) { } func TestArchiveAfterDays_Custom(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `archive_after_days: 30` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - + declareContext(t, `archive_after_days: 30`) days := ArchiveAfterDays() if days != 30 { t.Errorf("ArchiveAfterDays() = %d, want %d", days, 30) @@ -357,196 +578,70 @@ func TestArchiveAfterDays_Custom(t *testing.T) { } func TestScratchpadEncrypt_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default (nil pointer) should return true + declareContext(t, "") if !ScratchpadEncrypt() { t.Error("ScratchpadEncrypt() = false, want true (default)") } } func TestScratchpadEncrypt_Explicit(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `scratchpad_encrypt: false` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - + declareContext(t, `scratchpad_encrypt: false`) if ScratchpadEncrypt() { t.Error("ScratchpadEncrypt() = true, want false") } } func TestScratchpadEncrypt_ExplicitTrue(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `scratchpad_encrypt: true` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - + declareContext(t, `scratchpad_encrypt: true`) if !ScratchpadEncrypt() { t.Error("ScratchpadEncrypt() = false, want true") } } func TestFilePriority_DefaultOrder(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() + declareContext(t, "") - Reset() - - // CONSTITUTION.md should be first in default ReadOrder - p := FilePriority(ctx.Constitution) - if p != 1 { + if p := FilePriority(ctx.Constitution); p != 1 { t.Errorf("FilePriority(%q) = %d, want 1", ctx.Constitution, p) } - - // TASKS.md should be second - p = FilePriority(ctx.Task) - if p != 2 { + if p := FilePriority(ctx.Task); p != 2 { t.Errorf("FilePriority(%q) = %d, want 2", ctx.Task, p) } - - // Unknown file gets 100 - p = FilePriority("UNKNOWN.md") - if p != 100 { + if p := FilePriority("UNKNOWN.md"); p != 100 { t.Errorf("FilePriority(%q) = %d, want 100", "UNKNOWN.md", p) } } func TestFilePriority_CustomOrder(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `priority_order: + declareContext(t, `priority_order: - DECISIONS.md - TASKS.md -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() +`) - // DECISIONS.md should be first in custom order - p := FilePriority(ctx.Decision) - if p != 1 { + if p := FilePriority(ctx.Decision); p != 1 { t.Errorf("FilePriority(%q) = %d, want 1", ctx.Decision, p) } - - // TASKS.md should be second - p = FilePriority(ctx.Task) - if p != 2 { + if p := FilePriority(ctx.Task); p != 2 { t.Errorf("FilePriority(%q) = %d, want 2", ctx.Task, p) } - - // File not in custom order gets 100 - p = FilePriority("UNKNOWN.md") - if p != 100 { + if p := FilePriority("UNKNOWN.md"); p != 100 { t.Errorf("FilePriority(%q) = %d, want 100", "UNKNOWN.md", p) } } -func TestContextDir_NoOverride(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - got := ContextDir() - - // Contract: when no .context/ exists upward, ContextDir() falls - // back to filepath.Join(cwd, dir.Context) as an absolute path. - wantResolved, _ := filepath.EvalSymlinks(tempDir) - gotParent, _ := filepath.EvalSymlinks(filepath.Dir(got)) - - if gotParent != wantResolved { - t.Errorf("ContextDir() parent = %q, want %q", gotParent, wantResolved) - } - if filepath.Base(got) != dir.Context { - t.Errorf( - "ContextDir() base = %q, want %q", - filepath.Base(got), dir.Context, - ) - } -} - -func TestAllowOutsideCwd_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default is false - if AllowOutsideCwd() { - t.Error("AllowOutsideCwd() = true, want false (default)") - } -} - -func TestAllowOutsideCwd_Enabled(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `allow_outside_cwd: true` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - if !AllowOutsideCwd() { - t.Error("AllowOutsideCwd() = false, want true") - } -} - func TestNotifyEvents_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default (nil Notify) returns nil - events := NotifyEvents() - if events != nil { + declareContext(t, "") + if events := NotifyEvents(); events != nil { t.Errorf("NotifyEvents() = %v, want nil", events) } } func TestNotifyEvents_Configured(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `notify: + declareContext(t, `notify: events: - loop - nudge -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() +`) events := NotifyEvents() if len(events) != 2 || events[0] != "loop" || events[1] != "nudge" { @@ -555,72 +650,35 @@ func TestNotifyEvents_Configured(t *testing.T) { } func TestKeyRotationDays_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - days := KeyRotationDays() - if days != DefaultKeyRotationDays { + declareContext(t, "") + if days := KeyRotationDays(); days != DefaultKeyRotationDays { t.Errorf("KeyRotationDays() = %d, want %d", days, DefaultKeyRotationDays) } } func TestKeyRotationDays_Custom(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `key_rotation_days: 30 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - days := KeyRotationDays() - if days != 30 { + declareContext(t, `key_rotation_days: 30 +`) + if days := KeyRotationDays(); days != 30 { t.Errorf("KeyRotationDays() = %d, want %d", days, 30) } } func TestKeyRotationDays_LegacyNotify(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `notify: + declareContext(t, `notify: key_rotation_days: 45 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - days := KeyRotationDays() - if days != 45 { +`) + if days := KeyRotationDays(); days != 45 { t.Errorf("KeyRotationDays() = %d, want %d (legacy notify fallback)", days, 45) } } func TestKeyRotationDays_TopLevelTakesPrecedence(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `key_rotation_days: 60 + declareContext(t, `key_rotation_days: 60 notify: key_rotation_days: 45 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - days := KeyRotationDays() - if days != 60 { +`) + if days := KeyRotationDays(); days != 60 { t.Errorf( "KeyRotationDays() = %d, want %d (top-level takes precedence)", days, 60, @@ -629,13 +687,7 @@ notify: } func TestSessionPrefixes_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - + declareContext(t, "") prefixes := SessionPrefixes() if len(prefixes) != 1 || prefixes[0] != "Session:" { t.Errorf("SessionPrefixes() = %v, want [Session:]", prefixes) @@ -643,44 +695,22 @@ func TestSessionPrefixes_Default(t *testing.T) { } func TestSessionPrefixes_Custom(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := "session_prefixes:\n" + - " - \"Session:\"\n" + - " - \"セッション:\"\n" + - " - \"Sesión:\"\n" - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() + declareContext(t, "session_prefixes:\n"+ + " - \"Session:\"\n"+ + " - \"セッション:\"\n"+ + " - \"Sesión:\"\n") prefixes := SessionPrefixes() if len(prefixes) != 3 { t.Fatalf("SessionPrefixes() len = %d, want 3", len(prefixes)) } - if prefixes[0] != "Session:" { - t.Errorf("SessionPrefixes()[0] = %q, want %q", prefixes[0], "Session:") - } - if prefixes[1] != "セッション:" { - t.Errorf("SessionPrefixes()[1] = %q, want %q", prefixes[1], "セッション:") - } - if prefixes[2] != "Sesión:" { - t.Errorf("SessionPrefixes()[2] = %q, want %q", prefixes[2], "Sesión:") + if prefixes[0] != "Session:" || prefixes[1] != "セッション:" || prefixes[2] != "Sesión:" { + t.Errorf("SessionPrefixes() = %v", prefixes) } } func TestSessionPrefixes_EmptyFallsBackToDefault(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := "session_prefixes: []\n" - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() + declareContext(t, "session_prefixes: []\n") prefixes := SessionPrefixes() if len(prefixes) != 1 || prefixes[0] != "Session:" { @@ -691,270 +721,80 @@ func TestSessionPrefixes_EmptyFallsBackToDefault(t *testing.T) { } } -func TestGetRC_NegativeEnvBudget(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - t.Setenv(env.CtxTokenBudget, "-100") - - Reset() - - // Negative budget should be ignored (budget > 0 check) - rc := RC() - if rc.TokenBudget != DefaultTokenBudget { - t.Errorf( - "TokenBudget = %d, want %d (default on negative env)", - rc.TokenBudget, DefaultTokenBudget, - ) - } -} - -// --- Hooks & Steering RC field tests --- -// Validates: Requirements 19.8 - func TestTool_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default is empty string when not configured - tool := Tool() - if tool != "" { + declareContext(t, "") + if tool := Tool(); tool != "" { t.Errorf("Tool() = %q, want %q", tool, "") } } func TestTool_Configured(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `tool: kiro` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - tool := Tool() - if tool != "kiro" { + declareContext(t, `tool: kiro`) + if tool := Tool(); tool != "kiro" { t.Errorf("Tool() = %q, want %q", tool, "kiro") } } func TestSteeringDir_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - dir := SteeringDir() - if dir != DefaultSteeringDir { - t.Errorf("SteeringDir() = %q, want %q", dir, DefaultSteeringDir) + declareContext(t, "") + if d := SteeringDir(); d != DefaultSteeringDir { + t.Errorf("SteeringDir() = %q, want %q", d, DefaultSteeringDir) } } func TestSteeringDir_Configured(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `steering: + declareContext(t, `steering: dir: custom/steering -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - dir := SteeringDir() - if dir != "custom/steering" { - t.Errorf("SteeringDir() = %q, want %q", dir, "custom/steering") +`) + if d := SteeringDir(); d != "custom/steering" { + t.Errorf("SteeringDir() = %q, want %q", d, "custom/steering") } } func TestHooksDir_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - dir := HooksDir() - if dir != DefaultHooksDir { - t.Errorf("HooksDir() = %q, want %q", dir, DefaultHooksDir) + declareContext(t, "") + if d := HooksDir(); d != DefaultHooksDir { + t.Errorf("HooksDir() = %q, want %q", d, DefaultHooksDir) } } func TestHooksDir_Configured(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `hooks: + declareContext(t, `hooks: dir: custom/hooks -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - dir := HooksDir() - if dir != "custom/hooks" { - t.Errorf("HooksDir() = %q, want %q", dir, "custom/hooks") +`) + if d := HooksDir(); d != "custom/hooks" { + t.Errorf("HooksDir() = %q, want %q", d, "custom/hooks") } } func TestHookTimeout_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - timeout := HookTimeout() - if timeout != DefaultHookTimeout { + declareContext(t, "") + if timeout := HookTimeout(); timeout != DefaultHookTimeout { t.Errorf("HookTimeout() = %d, want %d", timeout, DefaultHookTimeout) } } func TestHookTimeout_Configured(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `hooks: + declareContext(t, `hooks: timeout: 30 -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - - timeout := HookTimeout() - if timeout != 30 { +`) + if timeout := HookTimeout(); timeout != 30 { t.Errorf("HookTimeout() = %d, want %d", timeout, 30) } } func TestHooksEnabled_Default(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - // Default (nil Hooks pointer) should return true + declareContext(t, "") if !HooksEnabled() { t.Error("HooksEnabled() = false, want true (default)") } } func TestHooksEnabled_ExplicitFalse(t *testing.T) { - tempDir := t.TempDir() - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - rcContent := `hooks: + declareContext(t, `hooks: enabled: false -` - _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) - - Reset() - +`) if HooksEnabled() { t.Error("HooksEnabled() = true, want false") } } - -func TestContextDir_UpwardWalkFromSubdir(t *testing.T) { - tempDir := t.TempDir() - - // Project root layout: - // /project/.git/ - // /project/.context/ - // /project/deep/nested/ - projectRoot := filepath.Join(tempDir, "project") - gitPath := filepath.Join(projectRoot, ".git") - contextPath := filepath.Join(projectRoot, dir.Context) - deepSubdir := filepath.Join(projectRoot, "deep", "nested") - - for _, d := range []string{gitPath, contextPath, deepSubdir} { - if mkErr := os.MkdirAll(d, 0700); mkErr != nil { - t.Fatalf("mkdir %s: %v", d, mkErr) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(deepSubdir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - got := ContextDir() - - // Resolve symlinks so /tmp vs /private/tmp on macOS compares equal. - wantResolved, _ := filepath.EvalSymlinks(contextPath) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf( - "ContextDir() from subdir = %q, want %q", - gotResolved, wantResolved, - ) - } - - // Explicit regression guard: the returned path must NOT be the - // stray-dir fallback that the bug would have produced. - strayPath := filepath.Join(deepSubdir, dir.Context) - strayResolved, _ := filepath.EvalSymlinks(filepath.Dir(strayPath)) - if gotResolved == filepath.Join(strayResolved, dir.Context) { - t.Errorf( - "ContextDir() resolved to stray subdir path %q — "+ - "upward walk regressed", - got, - ) - } -} - -func TestContextDir_FallbackWhenNotFound(t *testing.T) { - tempDir := t.TempDir() - - origDir, _ := os.Getwd() - _ = os.Chdir(tempDir) - defer func() { _ = os.Chdir(origDir) }() - - Reset() - - got := ContextDir() - - // Fallback path: filepath.Join(cwd, dir.Context), absolute. - wantResolved, _ := filepath.EvalSymlinks(tempDir) - gotDir, _ := filepath.EvalSymlinks(filepath.Dir(got)) - - if gotDir != wantResolved { - t.Errorf( - "ContextDir() fallback parent = %q, want %q", - gotDir, wantResolved, - ) - } - if filepath.Base(got) != dir.Context { - t.Errorf( - "ContextDir() fallback base = %q, want %q", - filepath.Base(got), dir.Context, - ) - } - if !filepath.IsAbs(got) { - t.Errorf("ContextDir() fallback %q is not absolute", got) - } -} diff --git a/internal/rc/require.go b/internal/rc/require.go new file mode 100644 index 000000000..315b164a5 --- /dev/null +++ b/internal/rc/require.go @@ -0,0 +1,94 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package rc + +import ( + "errors" + "os" + + errCtx "github.com/ActiveMemory/ctx/internal/err/context" +) + +// RequireContextDir returns the declared context directory after +// validating both its declaration shape (via [ContextDir]) and that +// the path actually exists on disk as a directory. +// +// This is the **operating-command boundary**: every non-exempt +// command calls it at the start of its Run function (or via +// [PersistentPreRunE]). Diagnostic and exempt callers (init, +// activate, bootstrap, hooks like check-anchor-drift) must use +// [ContextDir] directly so they observe declared state without +// erroring on broken state. +// +// Convention: operating callers use this; only diagnostic / exempt +// callers may use raw [ContextDir]. Without that rule, operating +// callers would receive shape-valid but non-existent paths and +// surface confusing downstream errors instead of the friendly +// tailored not-found message. +// +// Rejection conditions: +// +// 1. CTX_DIR truly unset ([errCtx.ErrDirNotDeclared]) is rewrapped +// as [errCtx.NotDeclared] tailored to how many .context/ +// candidates are visible from CWD. The user said "I haven't +// told you anything yet"; the message offers a next step. +// 2. CTX_DIR set to a relative or non-canonical-basename value +// ([errCtx.ErrRelativeNotAllowed] / [errCtx.ErrNonCanonicalBasename]) +// is propagated unchanged. The user told us a specific value; +// the diagnostic should name what's wrong with that value +// ("must be absolute, got '...'", "basename must be '.context', +// got 'tmp'") rather than pretend nothing was declared. +// 3. Path does not exist: [errCtx.ErrContextDirNotFound] (wrapped +// via [errCtx.Missing]). +// 4. Stat failed for a reason other than not-exist (permission +// denied, I/O error): [errCtx.ErrContextDirStat] (wrapped via +// [errCtx.StatFailed]). +// 5. Path exists but is not a directory: +// [errCtx.ErrContextDirNotADirectory]. +// +// Exempt commands (ctx init, ctx activate, ctx deactivate, +// ctx version, ctx help, ctx system bootstrap) must not call this +// helper; they handle the unset case themselves, either by creating +// the directory (init), walking to emit shell integration (activate), +// or reporting resolution state for diagnostics (bootstrap). +// +// Returns: +// - string: absolute path to the declared context directory. +// - error: non-nil with a multi-line actionable message when the +// context directory has not been declared, does not exist, or +// does not name a directory; the error is already formatted +// for direct return from a Cobra Run function. +func RequireContextDir() (string, error) { + path, err := ContextDir() + if err != nil { + // Discriminate by error kind: only truly-unset gets the + // tailored multi-line "no context directory specified" + // message with candidate hints. Relative-path and + // non-canonical-basename errors are propagated with their + // precise "what's wrong with the value you gave us" + // message; collapsing them into the unset form would tell + // the user "you didn't declare it" when they did declare + // it (just to the wrong shape): exactly the silent / + // confusing diagnostic the spec was meant to eliminate. + if errors.Is(err, errCtx.ErrDirNotDeclared) { + cwd, _ := os.Getwd() + return "", errCtx.NotDeclared(ScanCandidates(cwd)) + } + return "", err + } + info, statErr := os.Stat(path) + if statErr != nil { + if errors.Is(statErr, os.ErrNotExist) { + return "", errCtx.Missing(path) + } + return "", errCtx.StatFailed(path, statErr) + } + if !info.IsDir() { + return "", errCtx.NotADir(path) + } + return path, nil +} diff --git a/internal/rc/require_test.go b/internal/rc/require_test.go new file mode 100644 index 000000000..aa4cb5967 --- /dev/null +++ b/internal/rc/require_test.go @@ -0,0 +1,197 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package rc + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/config/env" + errCtx "github.com/ActiveMemory/ctx/internal/err/context" +) + +// TestRequireContextDir_PathDoesNotExist: shape-valid declaration +// pointing at a path that doesn't exist on disk → ErrContextDirNotFound. +func TestRequireContextDir_PathDoesNotExist(t *testing.T) { + t.Setenv(env.CtxDir, "/nonexistent-test-dir/.context") + Reset() + t.Cleanup(Reset) + + got, err := RequireContextDir() + if !errors.Is(err, errCtx.ErrContextDirNotFound) { + t.Errorf("RequireContextDir() err = %v, want ErrContextDirNotFound", + err) + } + if got != "" { + t.Errorf("RequireContextDir() = %q, want \"\"", got) + } +} + +// TestRequireContextDir_PathIsAFile: CTX_DIR points at an existing +// regular file → ErrContextDirNotADirectory. +func TestRequireContextDir_PathIsAFile(t *testing.T) { + tempDir := t.TempDir() + filePath := filepath.Join(tempDir, dir.Context) + if err := os.WriteFile(filePath, []byte("not a dir"), 0o600); err != nil { + t.Fatalf("write: %v", err) + } + t.Setenv(env.CtxDir, filePath) + Reset() + t.Cleanup(Reset) + + _, err := RequireContextDir() + if !errors.Is(err, errCtx.ErrContextDirNotADirectory) { + t.Errorf("RequireContextDir() err = %v, want ErrContextDirNotADirectory", + err) + } +} + +// TestRequireContextDir_StatPermissionDenied: stat fails for a +// reason other than not-exist → ErrContextDirStat. Skipped on +// platforms where chmod 000 doesn't block stat (Windows) or where +// the test runs as root. +func TestRequireContextDir_StatPermissionDenied(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("permission semantics differ on windows") + } + if os.Geteuid() == 0 { + t.Skip("root bypasses permission checks") + } + tempDir := t.TempDir() + parent := filepath.Join(tempDir, "locked") + if err := os.MkdirAll(parent, 0o700); err != nil { + t.Fatalf("mkdir: %v", err) + } + target := filepath.Join(parent, dir.Context) + if err := os.MkdirAll(target, 0o700); err != nil { + t.Fatalf("mkdir target: %v", err) + } + if err := os.Chmod(parent, 0); err != nil { + t.Fatalf("chmod: %v", err) + } + t.Cleanup(func() { + // Restore rwx so t.TempDir's recursive cleanup can + // remove the directory. gosec G302 flags 0o700 as too + // permissive for files; it is fine for an in-test + // directory chmod that needs read+write+execute for + // cleanup to succeed. + _ = os.Chmod(parent, 0o700) //nolint:gosec // dir needs rwx for cleanup + }) + + t.Setenv(env.CtxDir, target) + Reset() + t.Cleanup(Reset) + + _, err := RequireContextDir() + if err == nil { + t.Fatal("RequireContextDir() err = nil, want non-nil") + } + // Either ErrContextDirNotFound or ErrContextDirStat depending on + // the underlying syscall: macOS often returns ENOENT through a + // chmod-0 parent because lookup short-circuits, while Linux + // typically surfaces EACCES. Both are acceptable diagnostics for + // the user. + if !errors.Is(err, errCtx.ErrContextDirStat) && + !errors.Is(err, errCtx.ErrContextDirNotFound) { + t.Errorf( + "RequireContextDir() err = %v, want ErrContextDirStat or ErrContextDirNotFound", + err) + } +} + +// TestRequireContextDir_HappyPath: existing dir, canonical name → +// returns absolute path, nil error. +func TestRequireContextDir_HappyPath(t *testing.T) { + tempDir := t.TempDir() + target := filepath.Join(tempDir, dir.Context) + if err := os.MkdirAll(target, 0o700); err != nil { + t.Fatalf("mkdir: %v", err) + } + t.Setenv(env.CtxDir, target) + Reset() + t.Cleanup(Reset) + + got, err := RequireContextDir() + if err != nil { + t.Fatalf("RequireContextDir() err = %v, want nil", err) + } + gotResolved, _ := filepath.EvalSymlinks(got) + wantResolved, _ := filepath.EvalSymlinks(target) + if gotResolved != wantResolved { + t.Errorf("RequireContextDir() = %q, want %q", gotResolved, wantResolved) + } +} + +// TestRequireContextDir_DelegatesShapeChecks: ContextDir shape +// errors flow through with their precise meaning preserved. Only +// the truly-unset case gets rewrapped as the tailored +// "no context directory specified" message with candidate hints; +// relative and non-canonical-basename errors propagate unchanged so +// the user sees what's wrong with the value they declared instead +// of "you didn't declare it" when they actually did. +func TestRequireContextDir_DelegatesShapeChecks(t *testing.T) { + cases := []struct { + name string + val string + wantSentinel error + wantMsgContain string + }{ + { + name: "unset", + val: "", + wantSentinel: errCtx.ErrDirNotDeclared, + wantMsgContain: "no context directory specified", + }, + { + name: "relative", + val: "relative-path", + wantSentinel: errCtx.ErrRelativeNotAllowed, + wantMsgContain: "absolute", + }, + { + name: "non-canonical", + val: "/tmp/notdotcontext", + wantSentinel: errCtx.ErrNonCanonicalBasename, + wantMsgContain: "notdotcontext", + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + t.Setenv(env.CtxDir, c.val) + Reset() + t.Cleanup(Reset) + + got, err := RequireContextDir() + if err == nil { + t.Fatalf("RequireContextDir() err = nil, want non-nil for %q", + c.val) + } + if got != "" { + t.Errorf("RequireContextDir() = %q, want \"\"", got) + } + // "unset" gets rewrapped into a tailored message that no + // longer wraps the original sentinel. The other two + // shape errors propagate the sentinel unchanged. + if c.name != "unset" && !errors.Is(err, c.wantSentinel) { + t.Errorf("RequireContextDir() err = %v, want errors.Is matching %v", + err, c.wantSentinel) + } + if msg := err.Error(); msg == "" { + t.Error("RequireContextDir() returned empty error message") + } + if !strings.Contains(err.Error(), c.wantMsgContain) { + t.Errorf("RequireContextDir() msg = %q; want substring %q", + err.Error(), c.wantMsgContain) + } + }) + } +} diff --git a/internal/rc/testmain_test.go b/internal/rc/testmain_test.go new file mode 100644 index 000000000..381eac0e7 --- /dev/null +++ b/internal/rc/testmain_test.go @@ -0,0 +1,22 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package rc + +import ( + "os" + "testing" + + "github.com/ActiveMemory/ctx/internal/assets/read/lookup" +) + +// TestMain initializes the embedded text-asset lookup so that error +// factories (internal/err/context.NotDeclared, etc.) resolve their +// DescKey-based messages instead of returning empty strings. +func TestMain(m *testing.M) { + lookup.Init() + os.Exit(m.Run()) +} diff --git a/internal/rc/types.go b/internal/rc/types.go index df327c610..ea96fe453 100644 --- a/internal/rc/types.go +++ b/internal/rc/types.go @@ -11,14 +11,11 @@ import cfgMemory "github.com/ActiveMemory/ctx/internal/config/memory" // CtxRC represents the configuration from the .ctxrc file. // // Fields: -// - ContextDir: Name of the context directory (default ".context") // - TokenBudget: Default token budget for context assembly (default 8000) // - PriorityOrder: Custom file loading priority order // - AutoArchive: Whether to auto-archive completed tasks (default true) // - ArchiveAfterDays: Days before archiving completed tasks (default 7) // - ScratchpadEncrypt: Whether to encrypt the scratchpad (default true) -// - AllowOutsideCwd: Skip boundary validation for -// external context dirs (default false) // - InjectionTokenWarn: Token threshold for oversize // injection warning (default 15000, 0 = disabled) // - ContextWindow: Context window size in tokens for @@ -63,13 +60,11 @@ import cfgMemory "github.com/ActiveMemory/ctx/internal/config/memory" type CtxRC struct { Profile string `yaml:"profile"` Tool string `yaml:"tool"` - ContextDir string `yaml:"context_dir"` TokenBudget int `yaml:"token_budget"` PriorityOrder []string `yaml:"priority_order"` AutoArchive bool `yaml:"auto_archive"` ArchiveAfterDays int `yaml:"archive_after_days"` ScratchpadEncrypt *bool `yaml:"scratchpad_encrypt"` - AllowOutsideCwd bool `yaml:"allow_outside_cwd"` EntryCountLearnings int `yaml:"entry_count_learnings"` EntryCountDecisions int `yaml:"entry_count_decisions"` ConventionLineCount int `yaml:"convention_line_count"` diff --git a/internal/rc/validate_test.go b/internal/rc/validate_test.go index abeff5bdb..748c32006 100644 --- a/internal/rc/validate_test.go +++ b/internal/rc/validate_test.go @@ -81,15 +81,13 @@ func TestValidate_EmptyFile(t *testing.T) { } func TestValidate_FullValidConfig(t *testing.T) { - data := []byte(`context_dir: .context -token_budget: 8000 + data := []byte(`token_budget: 8000 priority_order: - TASKS.md - DECISIONS.md auto_archive: true archive_after_days: 7 scratchpad_encrypt: true -allow_outside_cwd: false entry_count_learnings: 30 entry_count_decisions: 20 convention_line_count: 200 diff --git a/internal/rc/walk.go b/internal/rc/walk.go deleted file mode 100644 index 4c40b6910..000000000 --- a/internal/rc/walk.go +++ /dev/null @@ -1,118 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package rc - -import ( - "os" - "path/filepath" - "strings" - - cfgGit "github.com/ActiveMemory/ctx/internal/config/git" -) - -// walkForContextDir walks upward from the current working directory -// looking for an existing directory whose basename matches name. -// -// When a candidate is found above CWD, it is validated against the -// git root (if any). If the candidate falls outside the git root, -// it belongs to a different project and is discarded — the git root -// is used as the anchor instead. -// -// Absolute configured names skip the walk entirely. When no matching -// directory is found upward, returns the context directory anchored -// to the git root (if found) or filepath.Join(cwd, name) as an -// absolute path so that ctx init can create a fresh context directory -// at the current location. -// -// Parameters: -// - name: Configured context directory name (may be relative or absolute) -// -// Returns: -// - string: Absolute path to the resolved context directory -func walkForContextDir(name string) string { - if filepath.IsAbs(name) { - return name - } - - cwd, cwdErr := os.Getwd() - if cwdErr != nil { - return name - } - - // Walk upward looking for an existing context directory. - var candidate string - cur := cwd - for { - path := filepath.Join(cur, name) - if info, statErr := os.Stat(path); statErr == nil && info.IsDir() { - candidate = path - break - } - parent := filepath.Dir(cur) - if parent == cur { - break - } - cur = parent - } - - gitRoot := findGitRoot(cwd) - - // No candidate found — anchor to git root or CWD. - if candidate == "" { - if gitRoot != "" { - return filepath.Join(gitRoot, name) - } - return filepath.Join(cwd, name) - } - - // Candidate found in CWD itself — always valid. - candidateParent := filepath.Dir(candidate) - if candidateParent == cwd { - return candidate - } - - // Candidate found above CWD — validate against git root. - if gitRoot == "" { - // No git root to confirm ownership; don't trust the ancestor. - return filepath.Join(cwd, name) - } - - // Check whether the candidate is within the git root. - // Append separator to avoid "/foo/bar" matching "/foo/b". - root := gitRoot + string(os.PathSeparator) - if candidateParent == gitRoot || strings.HasPrefix(candidateParent, root) { - return candidate - } - - // Candidate is outside the git root — belongs to a different project. - // Anchor to the git root instead. - return filepath.Join(gitRoot, name) -} - -// findGitRoot walks upward from start looking for a .git entry -// (directory or file, to support worktrees). Returns the parent -// directory of the .git entry, or "" if none is found. -// -// Parameters: -// - start: Directory to start searching from -// -// Returns: -// - string: Absolute path to the git root, or "" if not found -func findGitRoot(start string) string { - cur := start - for { - gitPath := filepath.Join(cur, cfgGit.DotDir) - if _, statErr := os.Stat(gitPath); statErr == nil { - return cur - } - parent := filepath.Dir(cur) - if parent == cur { - return "" - } - cur = parent - } -} diff --git a/internal/rc/walk_test.go b/internal/rc/walk_test.go deleted file mode 100644 index 6c2b3198d..000000000 --- a/internal/rc/walk_test.go +++ /dev/null @@ -1,295 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package rc - -import ( - "os" - "path/filepath" - "testing" - - "github.com/ActiveMemory/ctx/internal/config/dir" -) - -func TestWalkForContextDir_GitAnchor(t *testing.T) { - // Parent workspace has .context, child project has .git but no .context. - // Walk should discard parent's .context and anchor to child's git root. - // - // workspace/ - // .context/ ← parent's context (should be ignored) - // child-project/ - // .git/ ← child's git root - // src/ ← CWD - tmp := t.TempDir() - workspace := filepath.Join(tmp, "workspace") - parentCtx := filepath.Join(workspace, dir.Context) - childProject := filepath.Join(workspace, "child-project") - childGit := filepath.Join(childProject, ".git") - childSrc := filepath.Join(childProject, "src") - - for _, d := range []string{parentCtx, childGit, childSrc} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(childSrc) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - // Should anchor to child-project, not use parent's .context. - wantResolved, _ := filepath.EvalSymlinks(filepath.Join(childProject, dir.Context)) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_NoGit(t *testing.T) { - // No .git anywhere, parent has .context. - // Walk should fall through to cwd/.context. - // - // workspace/ - // .context/ ← parent's context (no git to confirm) - // child/ ← CWD - tmp := t.TempDir() - workspace := filepath.Join(tmp, "workspace") - parentCtx := filepath.Join(workspace, dir.Context) - child := filepath.Join(workspace, "child") - - for _, d := range []string{parentCtx, child} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(child) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - wantResolved, _ := filepath.EvalSymlinks(filepath.Join(child, dir.Context)) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_SameGitRoot(t *testing.T) { - // .context and CWD share the same git root. - // Walk should return the found .context. - // - // project/ - // .git/ - // .context/ - // src/deep/ ← CWD - tmp := t.TempDir() - project := filepath.Join(tmp, "project") - projectGit := filepath.Join(project, ".git") - projectCtx := filepath.Join(project, dir.Context) - deep := filepath.Join(project, "src", "deep") - - for _, d := range []string{projectGit, projectCtx, deep} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(deep) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - wantResolved, _ := filepath.EvalSymlinks(projectCtx) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_GitWorktreeFile(t *testing.T) { - // .git is a file (worktree), not a directory. - // Should still be detected as git root. - // - // project/ - // .git ← file (worktree marker) - // .context/ - // src/ ← CWD - tmp := t.TempDir() - project := filepath.Join(tmp, "project") - projectCtx := filepath.Join(project, dir.Context) - src := filepath.Join(project, "src") - - for _, d := range []string{projectCtx, src} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - // Create .git as a file (like git worktrees do). - gitFile := filepath.Join(project, ".git") - if err := os.WriteFile(gitFile, []byte("gitdir: /some/other/path\n"), 0600); err != nil { - t.Fatalf("write .git file: %v", err) - } - - origDir, _ := os.Getwd() - _ = os.Chdir(src) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - wantResolved, _ := filepath.EvalSymlinks(projectCtx) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_NothingFound_GitRoot(t *testing.T) { - // No .context anywhere, but .git exists. - // Walk should anchor to git root. - // - // project/ - // .git/ - // src/ ← CWD - tmp := t.TempDir() - project := filepath.Join(tmp, "project") - projectGit := filepath.Join(project, ".git") - src := filepath.Join(project, "src") - - for _, d := range []string{projectGit, src} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(src) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - wantResolved, _ := filepath.EvalSymlinks(filepath.Join(project, dir.Context)) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_CWDHasContext(t *testing.T) { - // .context exists in CWD — should always use it regardless of git. - // - // workspace/ - // .context/ - // child/ - // .context/ ← CWD has its own - tmp := t.TempDir() - workspace := filepath.Join(tmp, "workspace") - parentCtx := filepath.Join(workspace, dir.Context) - child := filepath.Join(workspace, "child") - childCtx := filepath.Join(child, dir.Context) - - for _, d := range []string{parentCtx, childCtx} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(child) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - wantResolved, _ := filepath.EvalSymlinks(childCtx) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestWalkForContextDir_NestedGitRepos(t *testing.T) { - // Inner git repo (like a submodule) should use inner git root, - // rejecting outer project's .context. - // - // outer/ - // .git/ - // .context/ - // vendor/inner/ - // .git/ ← inner git root - // src/ ← CWD - tmp := t.TempDir() - outer := filepath.Join(tmp, "outer") - outerGit := filepath.Join(outer, ".git") - outerCtx := filepath.Join(outer, dir.Context) - inner := filepath.Join(outer, "vendor", "inner") - innerGit := filepath.Join(inner, ".git") - innerSrc := filepath.Join(inner, "src") - - for _, d := range []string{outerGit, outerCtx, innerGit, innerSrc} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - origDir, _ := os.Getwd() - _ = os.Chdir(innerSrc) - defer func() { _ = os.Chdir(origDir) }() - - got := walkForContextDir(dir.Context) - - // Should anchor to inner git root, not use outer's .context. - wantResolved, _ := filepath.EvalSymlinks(filepath.Join(inner, dir.Context)) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("walkForContextDir() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestFindGitRoot_Found(t *testing.T) { - tmp := t.TempDir() - project := filepath.Join(tmp, "project") - gitDir := filepath.Join(project, ".git") - deep := filepath.Join(project, "a", "b", "c") - - for _, d := range []string{gitDir, deep} { - if err := os.MkdirAll(d, 0700); err != nil { - t.Fatalf("mkdir %s: %v", d, err) - } - } - - got := findGitRoot(deep) - wantResolved, _ := filepath.EvalSymlinks(project) - gotResolved, _ := filepath.EvalSymlinks(got) - - if gotResolved != wantResolved { - t.Errorf("findGitRoot() = %q, want %q", gotResolved, wantResolved) - } -} - -func TestFindGitRoot_NotFound(t *testing.T) { - tmp := t.TempDir() - noGit := filepath.Join(tmp, "no-git", "deep") - if err := os.MkdirAll(noGit, 0700); err != nil { - t.Fatalf("mkdir: %v", err) - } - - got := findGitRoot(noGit) - if got != "" { - t.Errorf("findGitRoot() = %q, want empty", got) - } -} diff --git a/internal/sanitize/doc.go b/internal/sanitize/doc.go index e4be5cf17..426ec4798 100644 --- a/internal/sanitize/doc.go +++ b/internal/sanitize/doc.go @@ -4,10 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sanitize transforms untrusted input into safe values. +// Package sanitize transforms untrusted input into +// safe values suitable for use as filesystem names. // -// Unlike validation (which rejects bad input), sanitization mutates -// input to conform to constraints. [Filename] converts arbitrary -// strings into safe filename components. -// Part of the internal subsystem. +// Unlike validation (which rejects bad input), +// sanitization mutates input to conform to +// constraints. The result is always usable; the +// caller never needs to handle an error. +// +// # Public Surface +// +// - [Filename] converts an arbitrary topic string +// into a safe filename component: replaces spaces +// and special characters with hyphens via +// [regex.FileNameChar], strips leading and +// trailing hyphens, converts to lowercase, and +// limits the result to 50 characters. Returns +// "session" if the input is empty after cleaning. +// +// # Design +// +// The function is idempotent: sanitizing an already- +// safe string returns it unchanged. It uses config +// constants for the replacement character, max length, +// and default fallback rather than hardcoded literals. +// +// All functions are pure and safe for concurrent use. package sanitize diff --git a/internal/skill/doc.go b/internal/skill/doc.go index 94fdf6958..16a0897b2 100644 --- a/internal/skill/doc.go +++ b/internal/skill/doc.go @@ -4,9 +4,70 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package skill manages reusable instruction bundles with YAML frontmatter. +// Package skill manages **reusable instruction bundles**: +// the `SKILL.md` + supporting-files trees that ship under +// `.claude/skills//` and tell an AI tool how to perform +// a recurring workflow. // -// Key exports: [Install], [LoadAll], [Load], [Remove]. -// See source files for implementation details. -// Part of the internal subsystem. +// A skill is a self-contained directory: +// +// skills// +// SKILL.md # YAML frontmatter + instructions +// references/... # optional supporting docs +// # optional supporting script +// +// The package's job is to **install, list, load, and remove** +// these bundles. It does not execute them; execution is the +// AI tool's responsibility (Claude Code, Copilot CLI, etc.). +// +// # The Frontmatter Schema +// +// Each `SKILL.md` declares a [Manifest] in YAML +// frontmatter: +// +// - **name**: globally unique identifier; used as the +// directory name and as the slash-command alias. +// - **description**: one-line trigger phrase the AI uses +// to decide when to invoke the skill. +// - **tools**: Copilot-style allowed-tools list (`bash`, +// `read`, `write`, `edit`, `glob`, `grep`). +// - **allowed-tools**: Claude-Code-style permission +// scopes (`Bash(ctx:*)`, `Read`, etc.). +// +// [manifest.go] parses and validates the frontmatter; +// missing required fields produce a typed error from +// [internal/err/skill] that names the file path. +// +// # Public Surface +// +// - **[Install]**: copies a source skill directory into +// the target `skillsDir//`. Refuses to overwrite +// an existing skill (the user must `Remove` first); use +// `--force` at the CLI for replacement. +// - **[Load]**: reads one skill by name, returns its +// full [Skill] with manifest + body + path. +// - **[LoadAll]**: walks the skills directory, returns +// every loadable skill. Skills that fail to parse are +// reported in the error slice rather than aborting the +// load. +// - **[Remove]**: deletes a skill directory after +// verifying it lives under the canonical skills +// directory (boundary check guards against `..` +// escape). +// +// # File-Copy Semantics +// +// [copy.go] does the recursive copy with three rules: +// +// 1. **Preserve mode bits**: executable scripts stay +// executable. +// 2. **Skip dotfiles at the source root**: +// `.DS_Store`, `.git`, etc. never end up installed. +// 3. **Validate the destination** lies within the +// skills-dir boundary. +// +// # Concurrency +// +// All operations are filesystem-bound and stateless. +// Callers serialize through process-level execution. package skill diff --git a/internal/steering/doc.go b/internal/steering/doc.go index 5bf9d212b..dfd1f1429 100644 --- a/internal/steering/doc.go +++ b/internal/steering/doc.go @@ -4,9 +4,120 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package steering manages behavioral guidance files with YAML frontmatter. +// Package steering parses, scores, and synchronizes +// **steering files**, the small frontmattered Markdown +// documents under `.context/steering/` that tell each +// configured AI tool *how to behave* when a specific kind of +// prompt arrives. // -// Key exports: [Parse], [Print], [Filter], [LoadAll], [SyncTool], [SyncAll]. -// See source files for implementation details. -// Part of the internal subsystem. +// Steering is the declarative half of ctx's behavior layer +// (the imperative half is [internal/trigger]: scripts that +// *do* things on lifecycle events). A steering file says +// "when the user asks about Y, prepend these rules to the +// prompt"; a trigger says "when X happens, run this code." +// +// # The Steering File +// +// Each `.md` file under the steering directory is a +// [SteeringFile]: a short YAML frontmatter block followed by +// a Markdown body. The schema: +// +// - **name**: unique identifier; matches the manual +// selector in `ctx steering preview --names ...`. +// - **description**: one-line summary; doubles as the +// match phrase for [cfgSteering.InclusionAuto]. +// - **inclusion**: `always` | `auto` | `manual` +// ([cfgSteering.InclusionMode]). Default `manual`. +// - **tools**: list of AI-tool IDs the file applies to; +// empty/nil means "all tools". +// - **priority**: injection order; lower priority is +// injected earlier (default 50). +// +// [Parse] reads bytes + a path and returns a fully populated +// [SteeringFile] with defaults applied; YAML errors are wrapped +// via [internal/err/steering] so the file path is always part +// of the message. [LoadAll] is the bulk variant that walks a +// directory. +// +// # The Inclusion Modes +// +// Three modes determine when a file's body is appended to the +// next prompt: +// +// - **always**: every prompt, every turn, no questions. +// Heaviest on context budget; reserve for genuinely +// foundational rules. +// - **auto**: included when the lowercased prompt contains +// the file's lowercased description (substring match; +// simple, deterministic, fast). The most common mode for +// project-specific guidance. +// - **manual**: only when the file's name appears in the +// `manualNames` argument to [Filter] / [matchInclusion]. +// Used by `ctx steering preview --names ...` and by the +// MCP `steering_get` tool. +// +// [matchInclusion] does the per-file decision; [matchTool] +// adds tool-scope filtering on top. [Filter] composes the two +// against a list of files for a given (prompt, tool, manual) +// triple. +// +// # Two Tool Families, Two Delivery Paths +// +// Not every AI editor consumes steering the same way; ctx +// handles two families: +// +// - **Native-rules tools**: Cursor, Cline, Kiro +// ([syncableTools]) have a built-in rules primitive +// (`.cursor/rules/*.mdc`, `.clinerules`, +// `.kiro/steering/*.md`). [SyncTool] writes +// ctx-managed `.context/steering/*.md` into each tool's +// native format. [SyncAll] does this for every supported +// tool in one call. Idempotent: unchanged content is +// skipped. +// - **Hook-driven tools**: Claude Code and Codex use +// `ctx agent` to assemble the context packet on every +// prompt; their steering arrives via the agent pipeline +// (no file sync). They are deliberately **not** in +// [syncableTools]; calling `SyncTool` for them returns +// [errSteering.UnsupportedTool]. +// +// Mixed setups (project uses both Cursor and Claude Code) +// run `ctx steering sync` for the native-rules tools and let +// the hook+MCP pipeline cover Claude Code automatically. See +// `docs/home/steering.md` for the user-facing summary of this +// split. +// +// # Foundation Files +// +// `ctx init` scaffolds four foundation steering files +// (`product`, `tech`, `structure`, `workflow`) so users have +// real templates to edit instead of an empty directory. +// [FoundationFiles] returns the set; bodies and descriptions +// come from YAML text assets at call time so they stay in sync +// with the embedded copy. Re-running `ctx init` is safe: +// existing files are left alone. +// +// # Format Adapters +// +// Each native tool needs a slightly different frontmatter +// shape: +// +// - [cursorFrontmatter]: `description`, `globs`, +// `alwaysApply`. +// - [kiroFrontmatter]: `name`, `description`, `mode`. +// - Cline takes plain Markdown with no frontmatter. +// +// [format.go] holds the per-tool serializers; the unexported +// types in [types.go] keep the YAML shape decoupled from the +// canonical [SteeringFile]. +// +// # Concurrency and Idempotency +// +// Functions are stateless. [SyncTool] reads from the steering +// directory, computes the desired output for each file, +// compares it to what is on disk, and writes only the +// changed files, so running it twice in a row produces no +// `Written` entries the second time, just `Skipped`. Output +// paths are validated to resolve within `projectRoot` before +// writing. package steering diff --git a/internal/steering/frontmatter.go b/internal/steering/frontmatter.go index 81aa79b56..9d35bdf9a 100644 --- a/internal/steering/frontmatter.go +++ b/internal/steering/frontmatter.go @@ -22,5 +22,5 @@ func applyDefaults(sf *SteeringFile) { if sf.Priority == 0 { sf.Priority = cfgSteering.DefaultPriority } - // Tools: nil means all tools — no default needed. + // Tools: nil means all tools, no default needed. } diff --git a/internal/steering/sync.go b/internal/steering/sync.go index ae67db202..fa25a952e 100644 --- a/internal/steering/sync.go +++ b/internal/steering/sync.go @@ -128,7 +128,7 @@ func SyncAll( // StaleFiles returns the names of steering files whose synced // tool-native output differs from what SyncTool would produce. -// This is a read-only check — no files are written. +// This is a read-only check; no files are written. // // Returns nil if no stale files are found or if the steering // directory cannot be read. diff --git a/internal/sysinfo/doc.go b/internal/sysinfo/doc.go index ddea198a3..e4f5cfb81 100644 --- a/internal/sysinfo/doc.go +++ b/internal/sysinfo/doc.go @@ -4,10 +4,62 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sysinfo gathers OS-level resource metrics (memory, swap, disk, load) -// and evaluates them against configurable thresholds to produce alerts at -// WARNING and DANGER severity levels. +// Package sysinfo gathers OS-level resource metrics (memory, +// swap, disk, load average) and evaluates them against +// configurable thresholds to produce alerts at **WARNING** and +// **DANGER** severity levels. // -// Platform support uses build tags: Linux reads /proc, macOS shells out to -// sysctl/vm_stat, and other platforms return Supported: false gracefully. +// The package powers two surfaces: +// +// - **`ctx sysinfo`**: the top-level user-facing CLI that +// prints a snapshot of host resources. +// - **`ctx system check_resource`**: the hook that fires a +// pressure warning during sessions when load, memory, or +// disk crosses a danger threshold. +// +// # Per-Platform Implementations +// +// Resource collection is **platform-conditional** via Go build +// tags so the binary stays a single static cross-compile while +// still asking each OS in its native dialect: +// +// - **Linux**: reads `/proc/meminfo` and `/proc/loadavg` +// directly ([memory_linux.go], [load_linux.go]). +// - **macOS / Darwin**: shells out to `sysctl -n vm.loadavg` +// and `vm_stat` and parses their output +// ([memory_darwin.go], [load_darwin.go]). +// - **Other / Windows**: stubs that return +// `Supported: false` ([memory_other.go], [load_other.go], +// [disk_windows.go]). The hook degrades gracefully rather +// than aborting the session. +// +// Disk usage is read uniformly via `syscall.Statfs` on +// Unix-likes ([disk.go]) and stubbed on Windows. +// +// # Threshold Evaluation +// +// [threshold.go] holds the WARNING / DANGER cutoffs and the +// per-metric evaluator that turns a raw measurement into a +// severity. Defaults reflect "headroom you almost certainly +// want": load averages compared against CPU count, memory +// available below a percentage, disk free below a percentage. +// The 5-minute load average, not the 1-minute, is used to +// avoid false positives from transient spikes (a deliberate +// behavior, see commit `5958e558`). +// +// # The Output Shape +// +// [Resource] ([types.go]) is the unified record emitted by +// each collector: kind, value, unit, threshold, severity, +// support flag. [calc.go] holds the per-metric arithmetic +// (percent free, ratio computations) with explicit +// zero-division guards. +// +// # Concurrency +// +// Each call to a collector is a one-shot syscall + parse; +// nothing is cached at the package level. Concurrent callers +// produce independent readings. The `vm_stat` / `sysctl` +// shell-out path on macOS uses an external process which is +// the slowest case (~tens of milliseconds). package sysinfo diff --git a/internal/sysinfo/threshold.go b/internal/sysinfo/threshold.go index 2fd9da804..d3129fa78 100644 --- a/internal/sysinfo/threshold.go +++ b/internal/sysinfo/threshold.go @@ -96,7 +96,7 @@ func Evaluate(snap Snapshot) []ResourceAlert { } } - // Load (5m) — 5-minute average smooths transient build/test spikes. + // Load (5m): 5-minute average smooths transient build/test spikes. if snap.Load.Supported && snap.Load.NumCPU > 0 { ratio := snap.Load.Load5 / float64(snap.Load.NumCPU) msg := fmt.Sprintf(desc.Text(text.DescKeyResourcesAlertLoad), ratio) diff --git a/internal/task/doc.go b/internal/task/doc.go index 612aff664..38c829d5d 100644 --- a/internal/task/doc.go +++ b/internal/task/doc.go @@ -4,9 +4,51 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package task provides task item parsing, matching, and domain logic. +// Package task is the pure-logic core behind every +// operation against TASKS.md lines: parsing one task +// line into its components, classifying it as +// completed or pending, measuring its indent, and +// extracting its human-readable content. // -// Key exports: [Completed], [Pending], [Indent], [Content], [Sub]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Public Surface +// +// - [Completed]: true when the match represents +// a checked task (- [x] ...). +// - [Pending]: true when the match represents +// an unchecked task (- [ ] ...). +// - [Indent]: returns the leading whitespace +// from a match, used to determine top-level +// versus nested tasks. +// - [Content]: returns the task text from a +// match, stripping the checkbox prefix. +// - [Sub]: reports whether a match represents +// a subtask (indented 2+ spaces). +// +// All functions operate on the result of +// ItemPattern.FindStringSubmatch, using the match +// index constants [MatchIndent], [MatchState], and +// [MatchContent]. +// +// # Why a Separate Package +// +// Five callers need the same predicates and the same +// definition of what counts as a task line. Hoisting +// them here means the spec lives in one place and +// the audit suite catches duplication. +// +// # Format Reference +// +// Task lines follow the canonical shape established +// by [internal/assets/tpl.Task]: +// +// - [ ] Implement rate limiting #priority:high +// #session:abc1 #branch:main #added:2026-04-12 +// +// Continuation indents are not separate tasks; the +// parsers treat them as belonging to the parent task body. +// +// # Concurrency +// +// All functions are pure. Concurrent callers never +// race. package task diff --git a/internal/task/task.go b/internal/task/task.go index 3e542e419..504826990 100644 --- a/internal/task/task.go +++ b/internal/task/task.go @@ -4,10 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package task provides task item parsing and matching. -// -// This package handles the domain logic for task items, independent of -// their Markdown representation. package task import ( diff --git a/internal/testutil/testctx/doc.go b/internal/testutil/testctx/doc.go new file mode 100644 index 000000000..e7879e3a1 --- /dev/null +++ b/internal/testutil/testctx/doc.go @@ -0,0 +1,23 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package testctx provides helpers for exercising ctx commands in +// tests under the explicit-context-dir resolution model (spec: +// specs/explicit-context-dir.md). +// +// Under that model [rc.ContextDir] returns "" unless the caller has +// declared a context directory via --context-dir or CTX_DIR. Tests +// that chain multiple ctx commands in the same process (e.g., +// `ctx init` followed by `ctx add`) must therefore declare CTX_DIR +// before any non-exempt command runs, and must reset rc state between +// test cases so process-global overrides do not leak. +// +// [Declare] is the one-stop helper: it points CTX_DIR at +// `/.context`, resets rc, and registers an end-of-test reset +// via `t.Cleanup`. Callers still need to run `ctx init` (or +// materialize .context/ themselves); Declare only wires the +// environment. +package testctx diff --git a/internal/testutil/testctx/testctx.go b/internal/testutil/testctx/testctx.go new file mode 100644 index 000000000..daae95fb7 --- /dev/null +++ b/internal/testutil/testctx/testctx.go @@ -0,0 +1,55 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package testctx + +import ( + "path/filepath" + "testing" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/config/env" + "github.com/ActiveMemory/ctx/internal/rc" +) + +// Declare wires CTX_DIR to /.context, redirects HOME to +// tempDir so user-home writes (e.g. ~/.claude/settings.json) stay +// inside the temp tree, resets rc state, and returns the absolute +// path that CTX_DIR now points to. +// +// HOME isolation matters because `ctx init` reads and writes +// ~/.claude/settings.json. Without isolation, parallel `go test +// ./...` packages all read-modify-write the same real file and race. +// +// Typical pattern: +// +// tmpDir := t.TempDir() +// t.Chdir(tmpDir) +// ctxPath := testctx.Declare(t, tmpDir) +// _ = initialize.Cmd().Execute() // materialize .context/ +// // subsequent ctx commands in the same process resolve to ctxPath +// +// Declare does NOT create the directory; that is the caller's +// responsibility, typically via `ctx init`. Tests that only need the +// environment declared (without materializing .context/) can skip the +// init step. +// +// Parameters: +// - t: test handle (required for t.Setenv / t.Cleanup). +// - tempDir: absolute path to the per-test temp directory, usually +// the value returned by t.TempDir(). +// +// Returns: +// - string: absolute path `/.context`. +func Declare(t *testing.T, tempDir string) string { + t.Helper() + ctxDir := filepath.Join(tempDir, dir.Context) + t.Setenv(env.CtxDir, ctxDir) + t.Setenv(env.Home, tempDir) + rc.Reset() + t.Cleanup(rc.Reset) + return ctxDir +} diff --git a/internal/tidy/archive.go b/internal/tidy/archive.go index 89c570fdd..656918e58 100644 --- a/internal/tidy/archive.go +++ b/internal/tidy/archive.go @@ -36,7 +36,11 @@ import ( // - string: Path to the written archive file // - error: If creating the archive directory or writing fails func WriteArchive(prefix, heading, content string) (string, error) { - archiveDir := filepath.Join(rc.ContextDir(), dir.Archive) + ctxDir, ctxErr := rc.ContextDir() + if ctxErr != nil { + return "", ctxErr + } + archiveDir := filepath.Join(ctxDir, dir.Archive) if mkErr := io.SafeMkdirAll(archiveDir, fs.PermExec); mkErr != nil { return "", errBackup.CreateArchiveDir(mkErr) } diff --git a/internal/tidy/doc.go b/internal/tidy/doc.go index 2a9ea5ca1..264c33240 100644 --- a/internal/tidy/doc.go +++ b/internal/tidy/doc.go @@ -4,11 +4,70 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package tidy provides shared helpers for context file maintenance:. +// Package tidy provides the **archive and compact** primitives +// that keep `.context/` files lean as a project ages, moving +// completed tasks into dated archive files, sweeping empty +// sections, and reorganizing TASKS.md without losing +// provenance. // -// Key exports: [WriteArchive], [ParseTaskBlocks], -// [RemoveBlocksFromLines], [CompactContext], -// [RemoveEmptySections]. -// See source files for implementation details. -// Part of the internal subsystem. +// The package is the *engine*; the user-facing surface is +// `ctx task archive`, `ctx compact`, and the `_ctx-archive` +// skill. All three call into the helpers here so the rules are +// applied identically regardless of caller. +// +// # The Archive Pipeline +// +// [WriteArchive](contextDir) is the top-level entry point. +// Behavior: +// +// 1. **Parse** TASKS.md into [TaskBlock] records via +// [ParseTaskBlocks]. Only **top-level** tasks +// (`indent == 0`) marked `[x]` are candidates; +// nested subtasks ride along with their parent. +// 2. **Group** archived tasks by Phase header so the +// archive file preserves the same Phase structure as +// the source (a constitutional invariant; Phase +// identity must survive archival). +// 3. **Write** the archive to +// `.context/archive/tasks-YYYY-MM-DD.md`, creating +// the directory if needed. If today's archive file +// already exists, the new content is *appended*, not +// overwritten. +// 4. **Remove** the archived blocks from TASKS.md via +// [RemoveBlocksFromLines]; the rewriter operates on +// the raw line slice so byte offsets stay aligned. +// +// # Compact and Sanitize +// +// [CompactContext] runs the broader cleanup that `ctx +// compact` performs: archives done tasks **and** sweeps +// empty H2/H3 sections via [RemoveEmptySections] so the +// file does not accumulate dangling headers after every +// archival round. [sanitize.go] holds the helpers that +// trim trailing whitespace, normalize blank-line runs to +// at-most-one, and ensure the file ends with a single +// newline. +// +// # Pure-Logic Core +// +// [block.go] and [parse.go] form the pure-logic core: no +// IO, no time, no flags. They take `[]string` and return +// `[]TaskBlock` / new `[]string`. This split makes the +// archival math testable in isolation; the only IO sits +// in [archive.go] and [compact.go] at the boundary. +// +// # Constitutional Invariants Honored +// +// The CONSTITUTION.md rule "Archival is allowed, deletion +// is not" is enforced at this layer: archival never drops +// content; archive files preserve Phase headers; and +// compaction refuses to touch an entry that has not been +// explicitly marked complete. +// +// # Concurrency +// +// All functions are stateless. Callers serialize through +// process-level execution; concurrent invocations against +// the same context dir would race on file writes (no +// locking is implemented). package tidy diff --git a/internal/trace/collect.go b/internal/trace/collect.go index 61d7a94a7..d64dbdaf6 100644 --- a/internal/trace/collect.go +++ b/internal/trace/collect.go @@ -16,8 +16,8 @@ import ( cfgTrace "github.com/ActiveMemory/ctx/internal/config/trace" ) -// Collect gathers context refs from all three sources — pending records, -// staged file diffs, and current working state — then deduplicates them. +// Collect gathers context refs from all three sources (pending records, +// staged file diffs, and current working state), then deduplicates them. // // Parameters: // - contextDir: absolute path to the .context/ directory diff --git a/internal/trace/doc.go b/internal/trace/doc.go index cb8cbcfb6..883057436 100644 --- a/internal/trace/doc.go +++ b/internal/trace/doc.go @@ -4,12 +4,114 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trace provides commit context tracing — linking git commits -// back to the decisions, tasks, learnings, and sessions that motivated them. -// -// Key exports: [Collect], [FormatTrailer], [Record], [Resolve], [ShortHash], -// [ReadHistory], [WriteHistory], [ReadOverrides], [WriteOverride], -// [CollectRefsForCommit], [ResolveCommitHash], [CommitMessage], [CommitDate]. -// See source files for implementation details. -// Part of the internal subsystem. +// Package trace implements **commit context tracing**: the layer +// that links a git commit back to the decisions, learnings, +// conventions, tasks, and AI sessions that motivated it. +// +// The point is to make `git log -p` answer not just "what +// changed" but "*why* it changed", without forcing the developer +// to write hand-curated provenance every time. The package +// gathers context references from three sources at commit time, +// renders them as a structured git trailer, and persists a +// per-commit history record so the link survives even when the +// commit message is later squashed or rewritten. +// +// # Reference Format +// +// A "ref" is a short, parseable string that points at one +// concrete piece of context: +// +// - `decision:12`: DECISIONS.md entry #12 +// - `learning:7`: LEARNINGS.md entry #7 +// - `convention:3`: CONVENTIONS.md entry #3 +// - `task:8`: TASKS.md item #8 +// - `session:abc`: AI session ID `abc` +// - `"free note"`: quoted free-form note +// +// [parseRef] turns a string into (type, number, text); [Resolve] +// looks up the entry and returns a [ResolvedRef] populated with +// the entry title and a one-line detail preview. +// +// # The Three-Source Collection +// +// [Collect] runs at commit time (typically from a `prepare- +// commit-msg` hook) and gathers refs from three independent +// sources, **in this order**, then deduplicates while preserving +// first-occurrence order: +// +// 1. **Pending records**: refs that were explicitly staged +// ahead of time via `ctx trace tag` and stored as +// [PendingEntry] in `state/trace-pending.jsonl`. Cleared +// after the commit lands. +// 2. **Staged file diffs**: [StagedRefs] runs `git diff +// --cached` on each of DECISIONS.md, LEARNINGS.md, +// CONVENTIONS.md and parses **added entries** into refs of +// the matching type. For TASKS.md it parses **completed +// tasks** (lines that flipped from `[ ]` to `[x]`). This is +// the source that catches "I just wrote a new decision and +// committed it" without any tagging. +// 3. **Working state**: [WorkingRefs] adds in-progress task +// refs (from TASKS.md) plus an `session:` ref derived +// from `$CTX_SESSION_ID` when an AI session is active. +// +// First-source-wins ordering means a ref a developer explicitly +// pinned via `ctx trace tag` always shows up before one auto- +// detected from a diff. +// +// # The Trailer +// +// [FormatTrailer] turns a `[]string` of refs into a single git +// trailer line of the form: +// +// ctx-context: decision:12, task:8, session:abc +// +// Empty input produces an empty string (no trailer is written). +// The trailer is appended to the commit message by the +// `prepare-commit-msg` hook installed by `ctx trace hook +// enable`. +// +// # Persistence +// +// Two append-only JSONL stores live under `state/`: +// +// - **history.jsonl**: one [HistoryEntry] per commit: +// full commit hash, the refs that were attached, the +// commit message, and a UTC timestamp. Written by the +// `post-commit` hook so the link survives later message +// edits or squashes. +// - **overrides.jsonl**: [OverrideEntry] records that let a +// human pin a different set of refs to a commit after the +// fact (`ctx trace tag --note "..."`). Resolution +// prefers the most recent override over the original +// history entry. +// +// Both files are read with [ReadHistory] / [ReadOverrides]; both +// silently skip malformed lines so a corrupt tail does not +// break query commands. [WriteHistory] / [WriteOverride] use +// [appendJSONL] which creates the parent directory on demand and +// stamps a UTC timestamp when the caller leaves it zero. +// +// # Resolution +// +// The CLI side (`ctx trace `, `ctx trace file `) +// asks the package to **resolve** raw refs back to human +// information: +// +// - [Resolve](ref, contextDir) → [ResolvedRef] with title and +// one-line preview (or `Found: false` for stale refs). +// - [CollectRefsForCommit] picks the ref set for a given +// commit, preferring override → history. +// - [ResolveCommitHash] takes a short hash, abbrev, or +// ref-like string and returns the full SHA via `git +// rev-parse`. +// - [CommitMessage] / [CommitDate] are thin `git log` wrappers +// used to render the trace output. +// +// # Concurrency and Safety +// +// All filesystem operations go through [appendJSONL] / +// [readJSONL]; writes are append-only so concurrent commits in +// quick succession (rare but possible with parallel worktrees) +// produce interleaved-but-valid JSONL. The package holds no +// process-wide state. package trace diff --git a/internal/trace/git.go b/internal/trace/git.go index cf40c3e8b..a25468928 100644 --- a/internal/trace/git.go +++ b/internal/trace/git.go @@ -135,7 +135,7 @@ func CollectRefsForCommit( all = append(all, entry.Refs...) } - // Source 2: git trailers (optional — slow for bulk operations) + // Source 2: git trailers (optional, slow for bulk operations) if includeTrailers { all = append(all, ReadTrailerRefs(commitHash)...) } diff --git a/internal/trace/staged_parse.go b/internal/trace/staged_parse.go index 57fdbb612..bd81f3be7 100644 --- a/internal/trace/staged_parse.go +++ b/internal/trace/staged_parse.go @@ -99,7 +99,7 @@ func parseCompletedTasks(diff string) []string { return refs } -// stagedDiff runs git diff --cached -- filePath and returns the output. +// stagedDiff runs `git diff --cached`: filePath and returns the output. // Returns an empty string on any error (best-effort). // // Parameters: diff --git a/internal/trigger/doc.go b/internal/trigger/doc.go index 0b5c7b621..67b1cf9b4 100644 --- a/internal/trigger/doc.go +++ b/internal/trigger/doc.go @@ -4,10 +4,64 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trigger manages lifecycle automation scripts for AI tool events. +// Package trigger implements ctx's lifecycle +// automation layer: project-owned shell scripts that +// run when an AI session crosses a defined boundary. // -// Key exports: [Discover], [FindByName], [RunAll], -// [ValidatePath], [ValidTypes]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Trigger Types +// +// Six lifecycle events are supported: +// +// - session-start: a new AI session begins. +// - session-end: an AI session ends. +// - pre-tool-use: before a tool call; can block +// the call via cancel:true. +// - post-tool-use: after a tool call completes. +// - file-save: a file is saved. +// - context-add: a new entry was added to +// .context/. +// +// Each script receives a JSON [HookInput] on stdin +// and emits a JSON [HookOutput] on stdout. +// +// # Discovery +// +// [Discover] scans .context/hooks// and returns +// one [HookInfo] per script, sorted alphabetically. +// The executable permission bit controls whether a +// hook is enabled. [FindByName] locates a single +// script by its stem for enable/disable operations. +// +// # Security +// +// Triggers run with the same privileges as the AI +// tool. The package enforces a strict workflow: +// +// 1. ctx trigger add creates scripts without the +// executable bit (inert until reviewed). +// 2. ctx trigger enable sets the bit after +// [ValidatePath] passes. +// 3. [ValidatePath] rejects symlinks, paths that +// escape the hooks directory, and files lacking +// the executable bit. +// +// # Execution +// +// [RunAll] runs every enabled hook for a given type +// in alphabetical order. Per-hook behavior: +// +// - cancel:true halts the chain immediately. +// - Non-empty context is appended to the aggregate. +// - Non-zero exit is logged and recorded but does +// not abort the chain. +// - Timeout exceeded kills the process group. +// +// The default timeout is 10 seconds +// ([DefaultTimeout]). +// +// # Concurrency +// +// No mutable global state. [RunAll] runs hooks +// sequentially within a single invocation; concurrent +// invocations from different goroutines are safe. package trigger diff --git a/internal/trigger/security.go b/internal/trigger/security.go index fa4bcbae6..d1b1fa7c7 100644 --- a/internal/trigger/security.go +++ b/internal/trigger/security.go @@ -40,7 +40,7 @@ func ValidatePath(hooksDir, hookPath string) error { return errTrigger.Symlink(hookPath) } - // 2. Boundary check — hookPath must resolve within hooksDir. + // 2. Boundary check: hookPath must resolve within hooksDir. absHooksDir, absHooksDirErr := filepath.Abs(hooksDir) if absHooksDirErr != nil { return errTrigger.ResolveHooksDir(hooksDir, absHooksDirErr) diff --git a/internal/validate/doc.go b/internal/validate/doc.go index da08dd85d..33fcf3b65 100644 --- a/internal/validate/doc.go +++ b/internal/validate/doc.go @@ -4,10 +4,42 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package validate provides input sanitization and validation -// utilities. +// Package validate provides input-validation helpers +// that ctx uses at filesystem and security boundaries. // -// It includes functions for converting user-provided strings into -// safe values for use in filenames, paths, and other contexts where -// special characters could cause problems. +// # Path Validation +// +// - [Boundary] checks that a directory resolves to +// a path within the current working directory. +// Resolves symlinks in both paths so traversal +// via symlinked parents is caught. On Windows, +// comparisons are case-insensitive to handle +// NTFS path normalization. Returns a typed error +// from [internal/err/context] if the path escapes +// the project root. +// - [Symlinks] checks whether a directory or any +// of its immediate children are symlinks. Returns +// a typed error describing the first symlink +// found. Non-existent directories are not an +// error (let the caller handle that). +// +// # Design Philosophy +// +// Unlike [internal/sanitize] (which transforms bad +// input into safe values), this package rejects bad +// input outright. Unlike [internal/io] (which guards +// against system directory access), this package +// guards against project-boundary escapes and +// symlink-based traversal. +// +// The validate.go file currently contains only the +// package declaration, serving as an anchor for +// future non-path validators. +// +// # Concurrency +// +// All functions are pure and safe for concurrent +// use. They rely on os.Getwd, filepath.Abs, and +// filepath.EvalSymlinks, which are themselves +// goroutine-safe. package validate diff --git a/internal/validate/path.go b/internal/validate/path.go index 250035ca7..e275b6180 100644 --- a/internal/validate/path.go +++ b/internal/validate/path.go @@ -9,74 +9,10 @@ package validate import ( "os" "path/filepath" - "runtime" - "strings" - "github.com/ActiveMemory/ctx/internal/config/env" errCtx "github.com/ActiveMemory/ctx/internal/err/context" - errFs "github.com/ActiveMemory/ctx/internal/err/fs" ) -// Boundary checks that dir resolves to a path within the current -// working directory. Returns an error if the resolved path escapes the -// project root. -// -// Parameters: -// - dir: Directory path to validate -// -// Returns: -// - error: Non-nil if the path escapes the project root -func Boundary(dir string) error { - cwd, cwdErr := os.Getwd() - if cwdErr != nil { - return errFs.BoundaryViolation(cwdErr) - } - - absDir, absErr := filepath.Abs(dir) - if absErr != nil { - return errFs.BoundaryViolation(absErr) - } - - // Resolve symlinks in both paths so traversal via symlinked parents - // is caught. - resolvedCwd, resolveErr := filepath.EvalSymlinks(cwd) - if resolveErr != nil { - return errFs.BoundaryViolation(resolveErr) - } - - resolvedDir, dirResolveErr := filepath.EvalSymlinks(absDir) - if dirResolveErr != nil { - // If the target doesn't exist yet (e.g. before init), fall back - // to the absolute path for the prefix check. - resolvedDir = filepath.Clean(absDir) - } - - // On Windows, path comparisons must be case-insensitive because - // filepath.EvalSymlinks resolves to actual disk casing while - // os.Getwd preserves the casing from the caller (e.g. VS Code - // passes a lowercase drive letter via fsPath). - equal := func(a, b string) bool { return a == b } - hasPrefix := strings.HasPrefix - if runtime.GOOS == env.OSWindows { - equal = strings.EqualFold - hasPrefix = func(s, prefix string) bool { - return len(s) >= len(prefix) && strings.EqualFold(s[:len(prefix)], prefix) - } - } - - // Ensure the resolved dir is equal to or nested under the project root. - // Append os.PathSeparator to avoid "/foo/bar" matching "/foo/b". - // On Windows, use case-insensitive comparison since NTFS paths are - // case-insensitive but EvalSymlinks normalizes casing only for the - // existing cwd, not the non-existent target — creating a mismatch. - root := resolvedCwd + string(os.PathSeparator) - if !equal(resolvedDir, resolvedCwd) && !hasPrefix(resolvedDir, root) { - return errCtx.OutsideRoot(dir, resolvedCwd) - } - - return nil -} - // Symlinks checks whether dir itself or any of its immediate children // are symlinks. Returns an error describing the first symlink found. // diff --git a/internal/validate/path_test.go b/internal/validate/path_test.go index a5a341230..5a2ec28df 100644 --- a/internal/validate/path_test.go +++ b/internal/validate/path_test.go @@ -9,79 +9,9 @@ package validate import ( "os" "path/filepath" - "runtime" - "strings" "testing" - - "github.com/ActiveMemory/ctx/internal/config/env" ) -func TestBoundary(t *testing.T) { - cwd, err := os.Getwd() - if err != nil { - t.Fatal(err) - } - - tests := []struct { - name string - dir string - wantErr bool - }{ - {"relative inside cwd", ".context", false}, - {"absolute inside cwd", filepath.Join(cwd, ".context"), false}, - {"deeply nested", filepath.Join(cwd, "a", "b", "c"), false}, - {"cwd itself", cwd, false}, - {"dot", ".", false}, - {"escapes cwd", "../../etc", true}, - {"absolute outside cwd", "/tmp/evil", true}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := Boundary(tt.dir) - if (err != nil) != tt.wantErr { - t.Errorf("Boundary(%q) error = %v, wantErr %v", - tt.dir, err, tt.wantErr) - } - }) - } -} - -func TestBoundaryCaseInsensitive(t *testing.T) { - if runtime.GOOS != env.OSWindows { - t.Skip("case-insensitive path test only applies to Windows") - } - - // On Windows, EvalSymlinks normalizes casing to the filesystem's - // canonical form. When .context/ doesn't exist yet the fallback - // preserves the original cwd casing. The prefix check must be - // case-insensitive to avoid false "outside cwd" errors. - tmp := t.TempDir() - - // Change cwd to a case-mangled version of the temp dir. - // TempDir returns canonical casing; flip it. - mangled := strings.ToUpper(tmp) - if mangled == tmp { - mangled = strings.ToLower(tmp) - } - - orig, err := os.Getwd() - if err != nil { - t.Fatal(err) - } - defer func() { _ = os.Chdir(orig) }() - - if err := os.Chdir(mangled); err != nil { - t.Skipf("cannot chdir to case-mangled path %q: %v", mangled, err) - } - - // .context doesn't exist — this is the exact scenario that caused the - // false positive on Windows. - if err := Boundary(".context"); err != nil { - t.Errorf("Boundary(.context) with case-mangled cwd: %v", err) - } -} - func TestCheckSymlinks(t *testing.T) { t.Run("regular directory passes", func(t *testing.T) { dir := t.TempDir() @@ -136,45 +66,3 @@ func TestCheckSymlinks(t *testing.T) { } }) } - -func TestBoundary_WindowsCaseInsensitive(t *testing.T) { - if runtime.GOOS != env.OSWindows { - t.Skip("Windows-only test") - } - - // Simulate the VS Code plugin scenario: CWD has a lowercase drive letter - // but EvalSymlinks resolves to the actual (uppercase) casing. - // When .context doesn't exist yet (first init), the fallback path - // preserves the lowercase letter, causing a case mismatch. - cwd, err := os.Getwd() - if err != nil { - t.Fatal(err) - } - - // Swap the drive letter case to simulate VS Code's fsPath - if len(cwd) >= 2 && cwd[1] == ':' { - var swapped string - if cwd[0] >= 'A' && cwd[0] <= 'Z' { - swapped = strings.ToLower(cwd[:1]) + cwd[1:] - } else { - swapped = strings.ToUpper(cwd[:1]) + cwd[1:] - } - - origDir, _ := os.Getwd() - if chErr := os.Chdir(swapped); chErr != nil { - t.Fatalf("cannot chdir to %s: %v", swapped, chErr) - } - defer func() { _ = os.Chdir(origDir) }() - - // Non-existent subdir simulates .context before init - nonExistent := filepath.Join(swapped, ".nonexistent-ctx-dir") - if err := Boundary(nonExistent); err != nil { - t.Errorf("Boundary(%q) with swapped drive case should pass, got: %v", nonExistent, err) - } - - // Also test the default relative path that ctx init uses - if err := Boundary(".context"); err != nil { - t.Errorf("Boundary(.context) with swapped drive case should pass, got: %v", err) - } - } -} diff --git a/internal/wrap/doc.go b/internal/wrap/doc.go index de2d6a05f..717b2360f 100644 --- a/internal/wrap/doc.go +++ b/internal/wrap/doc.go @@ -4,11 +4,55 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package wrap soft-wraps long lines in markdown files to a target -// width (default 80 characters). +// Package wrap soft-wraps long lines in markdown files to a +// target column width (default 80) without breaking +// markdown semantics, preserving fenced code blocks, tables, +// frontmatter, and list continuation indentation. // -// [Content] wraps all lines in a journal entry. [ContextFile] wraps -// lines in a context file (.context/*.md), handling markdown list -// continuation with 2-space indent. [Soft] wraps a single line at -// word boundaries, returning multiple lines. +// The package is what backs `ctx fmt` for journal and context +// files; it is also called by the journal-import pipeline +// before writing enriched entries so reviewers see the same +// shape on disk that they would see in a code review. +// +// # The Three Public Functions +// +// - **[Soft](line, width)**: wraps a **single** line at +// word boundaries and returns the resulting `[]string`. +// Preserves the leading indent of the original line on +// each continuation. Never breaks inside a word. +// - **[Content](text, width)**: wraps every line in a +// **journal entry**. Recognizes YAML frontmatter and +// skips it (frontmatter values may not be wrapped), +// skips lines inside fenced code blocks, and skips +// table rows. +// - **[ContextFile](text, width)**: same intent as +// [Content] but tuned for `.context/*.md` files: aware +// of the markdown list continuation convention +// (2-space indent for follow-on lines under a +// bullet) so wrapped continuations look like the +// original input. Used by `ctx fmt` and the post-add +// formatter. +// +// # What Stays Unwrapped +// +// The wrap functions deliberately leave several constructs +// alone: +// +// - **YAML frontmatter**: keys and scalar values must +// stay on one line. +// - **Fenced code blocks** (` ``` ` / ` ~~~ `): code +// is wrapped by the language, not the markdown +// renderer. +// - **Table rows** (lines that match the markdown table +// pattern); rewrapping would break column alignment. +// - **Heading lines**: wrapping a heading mid-phrase +// would change semantics in many renderers. +// - **Lines that have no whitespace inside the body +// beyond the column limit** (e.g. a single long URL): +// better to overflow than to break the link. +// +// # Concurrency +// +// All functions are pure: input string → output string. +// Concurrent callers never race. package wrap diff --git a/internal/write/activate/activate.go b/internal/write/activate/activate.go new file mode 100644 index 000000000..f508088cd --- /dev/null +++ b/internal/write/activate/activate.go @@ -0,0 +1,88 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package activate + +import ( + "fmt" + + "github.com/spf13/cobra" + + cfgShell "github.com/ActiveMemory/ctx/internal/config/shell" +) + +// Emit writes pre-formatted shell-eval content to cmd's stdout +// without adding a trailing newline. The emit-layer functions in +// [internal/cli/activate/core/emit] already include the newline +// they need, so this helper must not add another (a stray blank +// line in `eval` output is harmless but ugly in `set -x` traces). +// +// Parameters: +// - cmd: cobra command providing the stdout sink. Nil is a +// no-op so test setups that omit the command don't crash. +// - content: shell-eval line(s); may be empty (no-op). +func Emit(cmd *cobra.Command, content string) { + if cmd == nil || content == "" { + return + } + _, _ = fmt.Fprint(cmd.OutOrStdout(), content) +} + +// ActivatedAt writes a single informational line to stderr +// announcing the bound `.context/` path. Always called by +// `ctx activate` on success (single-candidate too) so the user +// always sees what just happened, not just an empty terminal. +// +// Stderr (not stdout) because the line is for the user, not the +// shell. `eval` lets stderr pass through to the terminal while +// stripping the eval-captured stdout stream. +// +// Parameters: +// - cmd: cobra command providing the stderr sink. Nil is a +// no-op. +// - path: absolute path of the bound `.context/` directory. +// Empty is a no-op (defensive; Run never calls with empty). +func ActivatedAt(cmd *cobra.Command, path string) { + if cmd == nil || path == "" { + return + } + // ErrOrStderr (not OutOrStderr): cobra's OutOrStderr returns + // the SetOut writer with stderr fallback (confusingly named). + // Wrong helper would land the advisory inside the + // eval-captured stream and make it invisible to anyone + // running `eval "$(ctx activate)"`. + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), + cfgShell.FormatActivatedAtAdvisory, path) +} + +// AlsoVisible writes one informational line per additional +// `.context/` candidate to stderr. Used by `ctx activate` when +// more than one candidate is visible upward from CWD: innermost +// wins (the bind goes to stdout via [Emit] and is announced via +// [ActivatedAt]), and the others get surfaced here so the user +// can see what's around but isn't being bound. +// +// Each line follows the shape: +// +// ctx: also visible upward: +// +// Multiple paths produce multiple lines (one per path) so the +// output stays parseable when anyone scripts around it. +// +// Parameters: +// - cmd: cobra command providing the stderr sink. Nil is a +// no-op. +// - paths: additional candidates to surface, in the order they +// came back from the upward scan. Empty / nil is a no-op. +func AlsoVisible(cmd *cobra.Command, paths []string) { + if cmd == nil || len(paths) == 0 { + return + } + for _, p := range paths { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), + cfgShell.FormatAlsoVisibleAdvisory, p) + } +} diff --git a/internal/write/activate/doc.go b/internal/write/activate/doc.go new file mode 100644 index 000000000..2f327570a --- /dev/null +++ b/internal/write/activate/doc.go @@ -0,0 +1,27 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package activate carries the write-layer helpers for the +// `ctx activate` and `ctx deactivate` commands. Both produce a +// single shell-eval line (export / unset) that callers consume +// via `eval "$(ctx activate)"`. +// +// # Why a separate write package +// +// The `cmd_print` and `cmd_fprint` audits forbid `cmd.Print*` and +// `fmt.Fprint*(, ...)` outside `internal/write/`. +// The shell-eval lines are pre-formatted by +// [internal/cli/activate/core/emit] (no template substitution at the +// write layer), so this package is intentionally tiny: a single +// helper that owns the actual stdout write. +// +// # Exported Functions +// +// [Emit] writes pre-formatted shell-eval content to the cobra +// command's stdout, no trailing newline added (the emit-layer +// helpers already include one). Both `ctx activate` and +// `ctx deactivate` Run functions call it. +package activate diff --git a/internal/write/add/doc.go b/internal/write/add/doc.go index 1ceae05b9..54fadc8e9 100644 --- a/internal/write/add/doc.go +++ b/internal/write/add/doc.go @@ -1,12 +1,37 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package add provides formatted output helpers for the add command. +// Package add provides terminal output for the context +// entry addition commands (ctx task add, ctx decision add, +// ctx learning add, ctx convention add). // -// All functions take *cobra.Command for output routing. -// Exports: [Added]. -// Exports: [Added]. +// # Exported Functions +// +// [Added] prints a confirmation message after an entry +// is appended to a context file. The message includes +// the target filename so the user knows which file was +// modified (e.g. TASKS.md, DECISIONS.md). +// +// [SpecNudge] prints a one-line tip suggesting that +// the user create a feature spec when a task is complex +// enough to benefit from structured planning. This nudge +// is shown conditionally by the calling command. +// +// # Message Categories +// +// - Info: confirmation that an entry was written +// - Nudge: optional guidance tip after task creation +// +// # Usage +// +// Both functions accept a *cobra.Command for output +// routing. Messages are loaded from the embedded +// descriptor system and formatted with the target +// filename. +// +// add.Added(cmd, "TASKS.md") +// add.SpecNudge(cmd) package add diff --git a/internal/write/agent/doc.go b/internal/write/agent/doc.go index 54273f573..7118227c4 100644 --- a/internal/write/agent/doc.go +++ b/internal/write/agent/doc.go @@ -1,12 +1,33 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package agent provides terminal output functions for the agent command. +// Package agent provides terminal output for the agent +// command (ctx agent). // -// All functions take *cobra.Command for output routing. -// Exports: [Packet]. -// Exports: [Packet]. +// # Exported Functions +// +// [Packet] prints a pre-rendered markdown context packet +// to stdout. The packet is assembled by the agent core +// package and contains a budget-constrained snapshot of +// the project's context files, formatted for consumption +// by AI coding assistants. +// +// # Message Categories +// +// - Info: rendered markdown content printed verbatim +// to stdout without additional formatting +// +// # Nil Safety +// +// A nil *cobra.Command is treated as a no-op, making +// Packet safe to call from paths where a command may +// not be available. +// +// # Usage +// +// content := core.Assemble(files, budget) +// agent.Packet(cmd, content) package agent diff --git a/internal/write/archive/doc.go b/internal/write/archive/doc.go index 0dc4015b2..8ea3652e4 100644 --- a/internal/write/archive/doc.go +++ b/internal/write/archive/doc.go @@ -1,28 +1,42 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package archive provides terminal output for task archival and -// snapshot operations. +// Package archive provides terminal output for task +// archival and snapshot operations (ctx task archive, +// ctx task snapshot). // -// All functions take a *cobra.Command for output routing. The package -// handles two related workflows: +// # Task Archival // -// - Task archival: [DryRun] previews what would be archived, -// [Success] reports completion, [NoCompleted] handles the -// empty case, and [Skipping]/[SkipIncomplete] explain why -// specific tasks were excluded. -// - Task snapshots: [SnapshotSaved] confirms the write path -// and [SnapshotContent] formats the snapshot body with a -// timestamp header and separator. +// Functions cover the full lifecycle of an archive +// operation. [DryRun] previews what would be archived +// with counts and a content preview. [Success] reports +// the number of tasks archived and the output file path. +// [NoCompleted] handles the empty case when no completed +// tasks exist. [Skipping] explains why a specific parent +// task was excluded due to incomplete children, and +// [SkipIncomplete] summarizes the total skip count. // -// Example usage from a command's Run function: +// # Task Snapshots +// +// [SnapshotSaved] confirms a snapshot was written and +// prints the output file path. [SnapshotContent] formats +// the snapshot body with a name header, creation +// timestamp, and separator, returning the assembled +// string for the caller to write to disk. +// +// # Message Categories +// +// - Info: archive and snapshot confirmations +// - Warning: skip notices for incomplete tasks +// +// # Usage // // if dryRun { -// write.DryRun(cmd, tasks, archivePath) +// archive.DryRun(cmd, count, pending, preview, sep) // return // } -// write.Success(cmd, count, archivePath) +// archive.Success(cmd, count, path, pending) package archive diff --git a/internal/write/backup/backup.go b/internal/write/backup/backup.go deleted file mode 100644 index bfc7a25a4..000000000 --- a/internal/write/backup/backup.go +++ /dev/null @@ -1,43 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package backup - -import ( - "fmt" - - "github.com/spf13/cobra" - - "github.com/ActiveMemory/ctx/internal/assets/read/desc" - "github.com/ActiveMemory/ctx/internal/config/embed/text" - "github.com/ActiveMemory/ctx/internal/format" -) - -// ResultLine prints a single backup result with optional SMB destination. -// -// Parameters: -// - cmd: Cobra command for output. Nil is a no-op. -// - scope: backup scope label (e.g. "project", "global"). -// - archive: archive file path. -// - size: archive size in bytes. -// - smbDest: optional SMB destination (empty string skips). -func ResultLine( - cmd *cobra.Command, - scope, archive string, - size int64, - smbDest string, -) { - if cmd == nil { - return - } - line := fmt.Sprintf( - desc.Text(text.DescKeyWriteBackupResult), - scope, archive, format.Bytes(size)) - if smbDest != "" { - line += fmt.Sprintf(desc.Text(text.DescKeyWriteBackupSMBDest), smbDest) - } - cmd.Println(line) -} diff --git a/internal/write/backup/doc.go b/internal/write/backup/doc.go deleted file mode 100644 index 85d693128..000000000 --- a/internal/write/backup/doc.go +++ /dev/null @@ -1,12 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -// Package backup provides formatted output helpers for the backup command. -// -// All functions take *cobra.Command for output routing. -// Exports: [ResultLine]. -// Exports: [ResultLine]. -package backup diff --git a/internal/write/backup/skip.go b/internal/write/backup/skip.go deleted file mode 100644 index 0056f3b08..000000000 --- a/internal/write/backup/skip.go +++ /dev/null @@ -1,27 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package backup - -import ( - "fmt" - "io" - - "github.com/ActiveMemory/ctx/internal/assets/read/desc" - "github.com/ActiveMemory/ctx/internal/config/embed/text" -) - -// SkipEntry writes a message indicating that an optional archive -// entry was skipped because its source file does not exist. -// -// Parameters: -// - w: output writer -// - prefix: entry prefix label -func SkipEntry(w io.Writer, prefix string) { - _, _ = fmt.Fprintf( - w, desc.Text(text.DescKeyWriteBackupSkipEntry), prefix, - ) -} diff --git a/internal/write/bootstrap/doc.go b/internal/write/bootstrap/doc.go index 8e9e302ac..24095047a 100644 --- a/internal/write/bootstrap/doc.go +++ b/internal/write/bootstrap/doc.go @@ -1,12 +1,46 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package bootstrap provides formatted output helpers for the. +// Package bootstrap provides terminal output for the +// bootstrap command (ctx system bootstrap). // -// All functions take *cobra.Command for output routing. -// Exports: [Dir], [Text], [JSON]. -// Exports: [Dir], [Text], [JSON]. +// # Exported Functions +// +// [Dir] prints just the context directory path for +// quiet/machine-readable mode. This is used when agents +// need only the path without surrounding text. +// +// [Text] prints the full human-readable bootstrap +// output: a title banner, the context directory path, +// a wrapped file list, numbered constitution rules, +// numbered next-step suggestions, and an optional +// warning block. +// +// [JSON] prints the bootstrap output as a structured +// JSON object containing the context directory, file +// list, rules, next steps, and optional warnings. If +// JSON encoding fails, a structured error object is +// printed instead. +// +// [CommunityFooter] prints the community link footer +// shown at the bottom of help output. +// +// # Message Categories +// +// - Info: directory path, file list, rules, next steps +// - Warning: optional initialization warnings +// - Error: JSON encoding failures (to stderr) +// +// # Usage +// +// if quiet { +// bootstrap.Dir(cmd, contextDir) +// } else if jsonMode { +// bootstrap.JSON(cmd, dir, files, rules, steps, warn) +// } else { +// bootstrap.Text(cmd, dir, fileList, rules, steps, warn) +// } package bootstrap diff --git a/internal/write/change/doc.go b/internal/write/change/doc.go index a74c557fa..1637b04d4 100644 --- a/internal/write/change/doc.go +++ b/internal/write/change/doc.go @@ -1,12 +1,32 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package change provides terminal output functions for the change command. +// Package change provides terminal output for the change +// log command (ctx change). // -// All functions take *cobra.Command for output routing. -// Exports: [List]. -// Exports: [List]. +// # Exported Functions +// +// [List] prints a pre-rendered changes string to stdout. +// The content is assembled by the change core package +// and may include commit history, version diffs, or +// release notes depending on the subcommand invoked. +// +// # Nil Safety +// +// A nil *cobra.Command is treated as a no-op, making +// List safe to call from paths where a command may not +// be available. +// +// # Message Categories +// +// - Info: rendered change content printed verbatim +// to stdout without additional formatting +// +// # Usage +// +// rendered := core.RenderChanges(commits) +// change.List(cmd, rendered) package change diff --git a/internal/write/compact/doc.go b/internal/write/compact/doc.go index 1284339a5..960cbdea1 100644 --- a/internal/write/compact/doc.go +++ b/internal/write/compact/doc.go @@ -1,23 +1,47 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package compact provides terminal output for the context compaction -// workflow (ctx compact). +// Package compact provides terminal output for the +// context compaction workflow (ctx compact). // -// Functions cover the full lifecycle of a compact operation: -// [ReportHeading] opens the report, [InfoMovingTask] and -// [InfoSkippingTask] narrate per-task decisions, [InfoArchivedTasks] -// confirms what was written, [SectionsRemoved] reports empty section -// cleanup, and [ReportSummary]/[ReportClean] close the report. +// # Report Lifecycle // -// Example: +// Functions cover the full lifecycle of a compact +// operation. [ReportHeading] opens the report with a +// title and separator. Per-task decisions are narrated +// by [InfoMovingTask] (completed task being archived) +// and [InfoSkippingTask] (task excluded due to +// incomplete children). [InfoArchivedTasks] confirms +// how many tasks were written to the archive file +// with the age threshold used. // -// write.ReportHeading(cmd) +// # Cleanup Output +// +// [SectionsRemoved] reports how many empty sections +// were pruned from a context file during compaction. +// [TaskError] reports errors encountered while +// processing individual tasks. +// +// # Summary +// +// [ReportSummary] prints the final change count, and +// [ReportClean] prints the message when no changes +// were needed. +// +// # Message Categories +// +// - Info: per-task decisions, archive results +// - Error: task processing failures +// - Summary: total change count or clean status +// +// # Usage +// +// compact.ReportHeading(cmd) // for _, t := range tasks { -// write.InfoMovingTask(cmd, t) +// compact.InfoMovingTask(cmd, t) // } -// write.ReportSummary(cmd, changes) +// compact.ReportSummary(cmd, changes) package compact diff --git a/internal/write/complete/doc.go b/internal/write/complete/doc.go index 9fd152f2d..21f884012 100644 --- a/internal/write/complete/doc.go +++ b/internal/write/complete/doc.go @@ -1,17 +1,29 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package complete provides terminal output for the task completion -// command (ctx complete). +// Package complete provides terminal output for the task +// completion command (ctx task complete). // -// The single exported function [Completed] prints a confirmation -// message when a task checkbox is toggled from [ ] to [x] in -// TASKS.md. +// # Exported Functions // -// Example: +// [Completed] prints a confirmation message when a task +// checkbox is toggled from [ ] to [x] in TASKS.md. The +// message includes the task description so the user can +// verify which task was marked done. // -// write.Completed(cmd, "Implement session cooldown") +// # Message Categories +// +// - Info: task completion confirmation with the task +// description echoed back to the user +// +// # Usage +// +// The calling command identifies the target task by +// index or text match, toggles the checkbox, and then +// calls Completed to confirm the change: +// +// complete.Completed(cmd, "Implement session cooldown") package complete diff --git a/internal/write/config/doc.go b/internal/write/config/doc.go index 41d271e1b..5d5adbe69 100644 --- a/internal/write/config/doc.go +++ b/internal/write/config/doc.go @@ -1,12 +1,34 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package config provides formatted output helpers for the config command. +// Package config provides terminal output for the +// configuration management commands (ctx config). // -// All functions take *cobra.Command for output routing. -// Exports: [ProfileStatus], [Schema], [SwitchConfirm]. -// Exports: [ProfileStatus], [Schema], [SwitchConfirm]. +// # Exported Functions +// +// [ProfileStatus] prints the active runtime profile +// status line. It distinguishes between "dev" mode, +// "base" mode, and no profile (with a hint to create +// a .ctxrc file). +// +// [Schema] prints the raw JSON schema content for the +// ctx configuration file. The schema is printed as-is +// without additional formatting. +// +// [SwitchConfirm] prints the confirmation message +// after a profile switch operation completes. The +// message is generated by the switch logic and passed +// through verbatim. +// +// # Message Categories +// +// - Info: profile status, schema dump, switch +// confirmation +// +// # Nil Safety +// +// All functions treat a nil *cobra.Command as a no-op. package config diff --git a/internal/write/connect/doc.go b/internal/write/connect/doc.go index 2441f6109..6491fbce4 100644 --- a/internal/write/connect/doc.go +++ b/internal/write/connect/doc.go @@ -4,10 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package connect provides output functions for the connect -// command group (hub client operations). +// Package connect provides terminal output for the hub +// client commands (ctx connect). // -// Key exports: [Registered]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Registration and Subscription +// +// [Registered] confirms a successful hub registration +// and prints the assigned client ID. [Subscribed] +// confirms which entry types the client is subscribed +// to receive. +// +// # Data Transfer +// +// [Synced] reports how many entries were pulled from +// the hub. [Published] reports how many entries were +// pushed to the hub. [PublishFailed] warns when a +// publish operation fails without aborting. +// +// # Live Stream +// +// [Listening] confirms the listen stream is active. +// [EntryReceived] reports each entry received via the +// live stream with its type. +// +// # Hub Status +// +// [Status] prints the hub connection dashboard: the +// hub address, total entry count, and connected client +// count. +// +// # Message Categories +// +// - Info: registration, sync, publish confirmations +// - Warning: publish failures +// - Status: hub connection dashboard package connect diff --git a/internal/write/ctximport/doc.go b/internal/write/ctximport/doc.go index 964dede91..d26e69949 100644 --- a/internal/write/ctximport/doc.go +++ b/internal/write/ctximport/doc.go @@ -1,16 +1,42 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package ctximport provides terminal output functions for memory. -// -// All functions take *cobra.Command for output routing. -// Exports: [NoEntries], [ScanHeader], -// [EntrySkipped], [EntryClassified], -// [EntryAdded], and 2 more. -// Exports: [NoEntries], [ScanHeader], -// [EntrySkipped], [EntryClassified], -// [EntryAdded], [ErrPromote]. +// Package ctximport provides terminal output for memory +// import operations (ctx import). +// +// # Scan Phase +// +// [NoEntries] prints a notice when the source file +// contains no importable entries. [ScanHeader] prints +// the source filename and discovered entry count. +// +// # Classification Phase +// +// [EntrySkipped] prints a block for entries classified +// as "skip" (not promotable). [EntryClassified] prints +// a block for entries that matched a target file with +// keywords during dry-run preview. [EntryAdded] prints +// a block for entries that were successfully promoted +// to a context file. +// +// # Error Handling +// +// [ErrPromote] prints a promotion error to stderr when +// an entry cannot be written to its target file. +// +// # Summary +// +// [Summary] prints the full import summary with totals +// broken down by type (conventions, decisions, learnings, +// tasks), plus counts of skipped and duplicate entries. +// The summary adjusts its wording for dry-run mode. +// +// # Message Categories +// +// - Info: scan results, classifications, promotions +// - Error: promotion failures (stderr) +// - Summary: aggregate counts with type breakdown package ctximport diff --git a/internal/write/doc.go b/internal/write/doc.go index eadb8d11b..edbc31cf7 100644 --- a/internal/write/doc.go +++ b/internal/write/doc.go @@ -4,14 +4,46 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package write centralizes user-facing output for CLI commands. +// Package write centralizes user-facing terminal output +// for every CLI command in ctx. // -// All formatted messages, error output, and informational lines that CLI -// commands to print to the user are routed through this package. This ensures -// consistent prefixes, templates, and output routing (stdout vs. stderr) -// across the entire CLI surface. +// All formatted messages, error output, progress lines, +// and informational text that CLI commands print to the +// user are routed through subpackages of write. This +// ensures consistent prefixes, templates, and output +// routing (stdout vs. stderr) across the entire CLI +// surface. // -// Functions accept a *cobra.Command to write to the correct output stream. -// Nil commands are treated as no-ops, making it safe to call from code -// paths where a command may not be available. +// # Organization +// +// Each subpackage corresponds to one command or feature +// area. For example, write/archive handles task archival +// output, write/bootstrap handles the bootstrap command, +// and write/err provides shared error formatting. +// +// # Conventions +// +// Functions accept a *cobra.Command to write to the +// correct output stream. Nil commands are treated as +// no-ops, making it safe to call from code paths where +// a command may not be available. Functions accept +// primitive types (strings, ints) rather than domain +// types to avoid coupling write packages to business +// logic. +// +// Message text is never hardcoded in write packages. +// All strings are loaded from the embedded descriptor +// system via internal/assets/read/desc, keyed by +// constants defined in internal/config/embed/text. +// +// # Message Categories +// +// Output falls into three categories: +// +// - Info: progress confirmations, status lines, and +// success messages printed to stdout via cmd.Println. +// - Errors: prefixed error messages printed to stderr +// via cmd.PrintErrln (see write/err). +// - Warnings: non-fatal file or config warnings printed +// to stderr with a warning prefix. package write diff --git a/internal/write/doctor/doc.go b/internal/write/doctor/doc.go index 03e85c243..919dd151e 100644 --- a/internal/write/doctor/doc.go +++ b/internal/write/doctor/doc.go @@ -1,12 +1,35 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package doctor provides formatted output helpers for the doctor command. +// Package doctor provides terminal output for the health +// check command (ctx doctor). // -// All functions take *cobra.Command for output routing. -// Exports: [JSON], [Report]. -// Exports: [JSON], [Report]. +// # Exported Functions +// +// [JSON] prints pre-marshaled JSON check results to +// stdout. This is used when the --json flag is set. +// +// [Report] prints a human-readable health report +// grouped by category. Categories include structure, +// quality, plugin, hooks, state, size, resources, and +// events. Each check result is printed with a status +// icon (pass/fail/warning) and a descriptive message. +// The report closes with a summary line showing total +// warning and error counts. +// +// # Types +// +// [ResultItem] holds the display data for a single +// check result: category name, status symbol, and +// human-readable message. The calling command maps +// domain check results into ResultItem values before +// passing them to Report. +// +// # Message Categories +// +// - Info: per-check results with status icons +// - Summary: warning and error totals package doctor diff --git a/internal/write/drift/doc.go b/internal/write/drift/doc.go index df2c10e9b..1884a53bc 100644 --- a/internal/write/drift/doc.go +++ b/internal/write/drift/doc.go @@ -1,16 +1,50 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package drift provides formatted output helpers for the drift command. -// -// All functions take *cobra.Command for output routing. -// Exports: [FixHeader], [FixRecheck], -// [FixedCount], [SkippedCount], [FixError], -// and 19 more. -// Exports: [FixHeader], [FixRecheck], -// [FixedCount], [SkippedCount], [FixError], -// [FixStaleness]. +// Package drift provides the **terminal-output helpers** +// the `ctx drift` and `ctx drift --fix` CLI surfaces use +// to render their per-issue progress and final summary. +// +// All exported functions take a `*cobra.Command` so +// they route through cobra's output stream (which tests +// can wire to a buffer for assertion). +// +// # Public Surface +// +// Output families: +// +// - **Fix progress**: [FixHeader], +// [FixRecheck], [FixedCount], +// [SkippedCount], [FixError], [FixStaleness]. +// Used by `--fix` to narrate what is being +// auto-remediated and what remained. +// - **Per-issue lines**: formatters for the +// individual issue rows (path refs, +// staleness markers, missing files, +// constitution violations) with the matching +// status glyph. +// - **Summaries**: final roll-up for `ctx +// drift` (counts of warnings/violations/ +// passed) and for `--fix` (counts of fixed +// vs skipped). +// +// # Why a Separate Output Package +// +// Same data, two surfaces (`ctx drift` and +// `ctx drift --fix`), each with its own preferred +// presentation. Hoisting both renderers keeps the +// drift detector +// ([internal/drift]) free of presentation +// concerns and the fix engine +// ([internal/cli/drift/core/fix]) free of UI +// strings. +// +// # Concurrency +// +// Pure data → io.Writer. Concurrent calls go +// through cobra's output stream which is +// serialized. package drift diff --git a/internal/write/err/doc.go b/internal/write/err/doc.go index 07a5c06f0..c053711c6 100644 --- a/internal/write/err/doc.go +++ b/internal/write/err/doc.go @@ -1,20 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package err provides shared error output helpers for CLI commands. +// Package err provides shared error and warning output +// helpers used across all CLI commands. // -// [With] prints a formatted error message to the command's error -// output. [WarnFile] prints a file-specific warning with the path -// and underlying error. Both route through cobra's error writer -// to respect output redirection. +// # Exported Functions // -// Example: +// [With] prints a formatted error message to the +// command's stderr stream with an "Error: " prefix. +// This is the standard way to report fatal errors +// in ctx commands before returning an error code. +// +// [WarnFile] prints a non-fatal file operation warning +// to stdout with the file path and underlying error. +// This is used when a file operation fails but the +// command can continue (e.g. a missing optional file). +// +// # Nil Safety +// +// Both functions treat a nil *cobra.Command as a no-op, +// making them safe to call from code paths where a +// command may not be available. +// +// # Message Categories +// +// - Error: prefixed error messages to stderr +// - Warning: file-specific warnings to stdout +// +// # Usage // // if err != nil { -// write.With(cmd, err) -// return +// writeerr.With(cmd, err) +// return err // } +// writeerr.WarnFile(cmd, path, err) package err diff --git a/internal/write/events/doc.go b/internal/write/events/doc.go index dd5dc3ed5..90f0dc283 100644 --- a/internal/write/events/doc.go +++ b/internal/write/events/doc.go @@ -1,12 +1,35 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package events provides formatted output helpers for the events command. +// Package events provides terminal output for the event +// log command (ctx events). // -// All functions take *cobra.Command for output routing. -// Exports: [JSON], [Human], [Empty]. -// Exports: [JSON], [Human], [Empty]. +// # Exported Functions +// +// [JSON] prints pre-formatted JSONL event lines to +// stdout, one per line. This is used when the --json +// flag is set for machine-readable output. +// +// [Human] prints pre-formatted human-readable event +// lines to stdout. Each line includes a timestamp, +// event type, and summary formatted by the events +// core package. +// +// [Empty] prints a notice when the event log contains +// no entries matching the query. +// +// Both [JSON] and [Human] delegate to the shared +// [line.All] primitive for nil-safe iteration. +// +// # Message Categories +// +// - Info: event lines in JSON or human format +// - Empty: no-results notice +// +// # Nil Safety +// +// All functions treat a nil *cobra.Command as a no-op. package events diff --git a/internal/write/export/doc.go b/internal/write/export/doc.go index 0d3cc0c07..e7909530c 100644 --- a/internal/write/export/doc.go +++ b/internal/write/export/doc.go @@ -1,13 +1,35 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package export provides terminal output for file export operations -// across multiple commands (recall import, pad export). +// Package export provides terminal output for file +// export operations across multiple commands (recall +// import, pad export, and similar workflows). // -// [InfoExistsWritingAsAlternative] notifies the user when an output -// file already exists and an alternative filename is used to avoid -// overwriting. +// # Exported Functions +// +// [InfoExistsWritingAsAlternative] notifies the user +// when an output file already exists at the target +// path and an alternative filename is used to avoid +// overwriting. The message includes both the original +// path and the fallback path so the user knows where +// to find the output. +// +// # Message Categories +// +// - Info: alternative filename notice +// +// # Nil Safety +// +// A nil *cobra.Command is treated as a no-op. +// +// # Usage +// +// if fileExists(target) { +// alt := timestampedName(target) +// export.InfoExistsWritingAsAlternative( +// cmd, target, alt) +// } package export diff --git a/internal/write/fmt/doc.go b/internal/write/fmt/doc.go index 59009bb45..e5e7d5563 100644 --- a/internal/write/fmt/doc.go +++ b/internal/write/fmt/doc.go @@ -4,10 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package fmt provides output functions for the fmt command. +// Package fmt provides terminal output for the context +// file formatter command (ctx fmt). // -// [Summary] prints how many files were formatted. -// [NeedsFormatting] prints a per-file message in check mode. +// # Exported Functions // -// Exports: [Summary], [NeedsFormatting]. +// [Summary] prints the formatting result showing how +// many context files were reformatted out of the total +// number scanned. This is printed to stdout after both +// format and check modes complete. +// +// [NeedsFormatting] prints a per-file message to stderr +// in check mode, identifying each context file that +// would need reformatting. This enables CI pipelines +// to detect formatting drift without modifying files. +// +// # Message Categories +// +// - Info: format summary (stdout) +// - Warning: per-file check-mode notices (stderr) +// +// # Usage +// +// for _, f := range dirty { +// fmt.NeedsFormatting(cmd, f.Name) +// } +// fmt.Summary(cmd, len(dirty), len(all)) package fmt diff --git a/internal/write/guide/doc.go b/internal/write/guide/doc.go index 48bed9942..1be1d929f 100644 --- a/internal/write/guide/doc.go +++ b/internal/write/guide/doc.go @@ -1,20 +1,45 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package guide provides terminal output for the help/guide command -// that lists available skills and commands. +// Package guide provides terminal output for the help +// and guide commands (ctx guide, ctx help). // -// Functions render two sections: skills ([InfoSkillsHeader], -// [InfoSkillLine]) and CLI commands ([CommandsHeader], [CommandLine]). -// [Default] outputs the combined guide when no subcommand is given. +// # Skills Section // -// Example: +// [InfoSkillsHeader] prints the skills list heading +// followed by a blank line. [InfoSkillLine] prints a +// single skill entry with its name and a truncated +// description. // -// write.InfoSkillsHeader(cmd) +// # Commands Section +// +// [CommandsHeader] prints the CLI commands list heading +// followed by a blank line. [CommandLine] prints a +// single command entry with its name and short +// description. +// +// # Default Guide +// +// [Default] outputs the combined default guide text +// when no subcommand is specified. The content is +// loaded from the embedded descriptor system and +// printed verbatim. +// +// # Message Categories +// +// - Info: skill and command listings, default guide +// +// # Nil Safety +// +// All functions treat a nil *cobra.Command as a no-op. +// +// # Usage +// +// guide.InfoSkillsHeader(cmd) // for _, s := range skills { -// write.InfoSkillLine(cmd, s.Name, s.Description) +// guide.InfoSkillLine(cmd, s.Name, s.Desc) // } package guide diff --git a/internal/write/hub/doc.go b/internal/write/hub/doc.go index a7ae4fe73..1f89494a0 100644 --- a/internal/write/hub/doc.go +++ b/internal/write/hub/doc.go @@ -4,11 +4,35 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package hub provides output functions for the hub -// cluster management commands. +// Package hub provides terminal output for the hub +// cluster management commands (ctx hub). // -// Key exports: [ClusterStatus], [PeerAdded], [PeerRemoved], -// [SteppedDown]. -// See source files for implementation details. -// Part of the internal subsystem. +// # Cluster Status +// +// [ClusterStatus] prints the full cluster dashboard: +// the current node role (Leader or Follower), the +// leader address, total entry count, and peer count. +// +// # Peer Management +// +// [PeerAdded] confirms a peer was added to the cluster +// and prints the peer address. [PeerRemoved] confirms +// a peer was removed with its address. +// +// # Leadership +// +// [SteppedDown] confirms that leadership was +// transferred to another node. This is printed after +// a successful step-down operation. +// +// # Message Categories +// +// - Info: cluster status, peer changes, leadership +// transfer confirmations +// +// # Usage +// +// hub.ClusterStatus(cmd, role, leader, entries, peers) +// hub.PeerAdded(cmd, peerAddr) +// hub.SteppedDown(cmd) package hub diff --git a/internal/write/initialize/doc.go b/internal/write/initialize/doc.go index f50403f0f..deb0f25a6 100644 --- a/internal/write/initialize/doc.go +++ b/internal/write/initialize/doc.go @@ -1,16 +1,46 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package initialize provides terminal output functions for the. +// Package initialize provides the **terminal-output +// helpers** the `ctx init` command uses to narrate every +// step of the initialization workflow: directory +// creation, foundation-file deployment, plugin +// detection, settings merge, hook installation, summary. // -// All functions take *cobra.Command for output routing. -// Exports: [InfoOverwritePrompt], [InfoAborted], -// [InfoExistsSkipped], [InfoFileCreated], -// [Initialized], and 36 more. -// Exports: [InfoOverwritePrompt], [InfoAborted], -// [InfoExistsSkipped], [InfoFileCreated], -// [Initialized], [InfoWarnNonFatal]. +// The package owns ~40 named output functions, one per +// distinct user-visible event. Centralizing them keeps +// the init flow's terminal text consistent and makes +// localization a single-package change when it +// arrives. +// +// All exported functions take a `*cobra.Command` so +// they route through cobra's output stream (which +// tests can wire to a buffer for assertion). +// +// # Function Families +// +// - **Prompts**: [InfoOverwritePrompt], +// [InfoAborted] for the "should I overwrite?" +// dialog. +// - **Per-file results**: [InfoFileCreated], +// [InfoExistsSkipped], [InfoMerged] etc., one +// line per artifact written. +// - **Plugin / tool detection**: +// [InfoPluginInstalled], +// [InfoPluginEnabled], etc. +// - **Warnings & non-fatal errors**: +// [InfoWarnNonFatal] for issues the user +// should know about but that do not abort +// init. +// - **Summary**: [Initialized] (the final +// "ctx is ready, here's what to do next" +// banner). +// +// # Concurrency +// +// Pure data → io.Writer. cobra serializes +// concurrent writes through its output stream. package initialize diff --git a/internal/write/initialize/info.go b/internal/write/initialize/info.go index 23deb4dc7..abd86f8f5 100644 --- a/internal/write/initialize/info.go +++ b/internal/write/initialize/info.go @@ -152,6 +152,29 @@ func InfoNextSteps(cmd *cobra.Command) { cmd.Println(desc.Text(text.DescKeyWriteInitNextStepsBlock)) } +// InfoActivateHint prints the shell-activation block shown right +// after `ctx init` finishes. The block tells the user how to bind +// CTX_DIR for their shell so subsequent ctx commands resolve to the +// freshly-created context directory. +// +// Under the single-source-anchor resolution model +// (specs/single-source-context-anchor.md) this step is not +// optional: every non-exempt ctx command refuses to run without a +// declared CTX_DIR. The hint closes the loop for new users so +// `ctx init` → next command actually works. +// +// Parameters: +// - cmd: cobra command for output. +// - contextDir: absolute path to the just-created .context/ +// directory; used in the `export CTX_DIR=...` variant of the +// hint. The `eval "$(ctx activate)"` variant takes no arg +// under the single-source-anchor model and discovers the +// path itself. +func InfoActivateHint(cmd *cobra.Command, contextDir string) { + tpl := desc.Text(text.DescKeyWriteInitActivateHint) + cmd.Println(fmt.Sprintf(tpl, contextDir)) +} + // InfoWorkflowTips prints the workflow tips block showing key skills // and the ceremony loop. // diff --git a/internal/write/journal/doc.go b/internal/write/journal/doc.go index b95565a2c..e5e551432 100644 --- a/internal/write/journal/doc.go +++ b/internal/write/journal/doc.go @@ -1,14 +1,63 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package journal provides terminal output for journal site -// generation and maintenance commands. +// Package journal provides terminal output for journal +// commands (ctx journal source, site, lock, unlock, +// sync, and related subcommands). // -// [InfoSiteGenerated] reports the final build result with entry -// counts and output path. [InfoSiteStarting] and [InfoSiteBuilding] -// provide progress feedback during generation. [InfoOrphanRemoved] -// reports cleanup of orphaned journal files. +// # Site Generation +// +// [InfoSiteGenerated] reports the final build result +// with entry count and output path. [InfoSiteStarting] +// and [InfoSiteBuilding] provide progress feedback. +// [InfoOrphanRemoved] reports cleanup of orphan files. +// +// # Session Import +// +// [ImportSummary] previews what an import will do. +// [ImportedFile] and [SkipFile] report per-file +// results. [ImportFinalSummary] prints aggregate +// counts for new, updated, renamed, and skipped +// files. [ConfirmPrompt] and [Aborted] handle the +// interactive confirmation flow. +// +// # Session Listing +// +// [SessionListHeader] prints the session count. +// [SessionListRow] prints a formatted table row. +// [SessionListFooter] prints the footer with an +// optional --limit hint. [NoSessionsForProject] +// and [NoSessionsWithHint] handle empty results. +// [NoFiltersMatch] handles empty filter results. +// [AmbiguousSessionMatch] and +// [AmbiguousSessionMatchWithHint] resolve ambiguous +// session queries. +// +// # Session Detail +// +// [SessionMetadata] prints the full metadata block +// with identity, timing, and token usage sections. +// [SessionDetail] and [SessionDetailInt] print +// individual metadata lines. [SectionHeader] prints +// a Markdown heading. [ConversationTurn] prints a +// turn header. [TextBlock] and [CodeBlock] render +// content blocks. [ListItem] and [NumberedItem] +// render list entries. [MoreTurns] prints a +// continuation notice. [Hint] prints usage tips. +// +// # Lock / Unlock / Sync +// +// [LockUnlockNone] handles empty entry sets. +// [LockUnlockEntry] confirms per-entry changes. +// [LockUnlockSummary] prints aggregate results. +// [SyncNone], [SyncLocked], [SyncUnlocked], and +// [SyncSummary] handle the sync workflow. +// +// # Types +// +// [SessionInfo] carries pre-formatted session metadata +// for display by [SessionMetadata]. package journal diff --git a/internal/write/line/doc.go b/internal/write/line/doc.go index 14787d0c7..427a2d123 100644 --- a/internal/write/line/doc.go +++ b/internal/write/line/doc.go @@ -4,20 +4,38 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package line provides low-level print primitives shared across write -// subpackages. It is not intended for direct use by callers outside -// internal/write/; domain write packages wrap these primitives with -// domain-specific function names. +// Package line provides low-level print primitives +// shared across write subpackages. // -// Example usage from a domain write package: +// This package is not intended for direct use by +// callers outside internal/write/. Domain write +// packages wrap these primitives with domain-specific +// function names to keep their public API descriptive. // -// // write/events/events.go +// # Exported Functions +// +// [All] prints each line in a string slice to the +// command's output. A nil *cobra.Command is a no-op. +// This eliminates the repeated nil-guard + range loop +// pattern across write packages. +// +// [Count] prints a formatted count line only when the +// count is positive. It accepts a descriptor key and +// formats the message with the count value. This +// eliminates the repeated if-count-gt-zero-print +// pattern used for summary lines. +// +// # Message Categories +// +// - Info: line-by-line output and conditional counts +// +// # Usage +// +// // From write/events: // func JSON(cmd *cobra.Command, lines []string) { -// writeIO.Lines(cmd, lines) +// line.All(cmd, lines) // } // -// // write/stat/stat.go -// func Table(cmd *cobra.Command, lines []string) { -// writeIO.Lines(cmd, lines) -// } +// // From write/publish: +// line.Count(cmd, text.DescKeyPublishTasks, tasks) package line diff --git a/internal/write/load/doc.go b/internal/write/load/doc.go index 42e93a1e9..8ee814dde 100644 --- a/internal/write/load/doc.go +++ b/internal/write/load/doc.go @@ -1,12 +1,38 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package load provides terminal output functions for the load and. +// Package load provides terminal output for the context +// load commands (ctx load, ctx agent). // -// All functions take *cobra.Command for output routing. -// Exports: [Raw], [Assembled]. -// Exports: [Raw], [Assembled]. +// # Exported Functions +// +// [Raw] outputs context files without assembly or +// headers. Files are printed in read order separated +// by blank lines, with content printed as-is. This +// mode is used for piping context into other tools. +// +// [Assembled] outputs context as a formatted Markdown +// document with token budgeting. Files are included in +// read order until the token budget is exhausted. Each +// file gets a section heading generated by a caller- +// supplied title function. Truncated files are noted +// in the output with a separator and truncation notice. +// The document opens with a budget summary showing +// used vs. available tokens. +// +// # Message Categories +// +// - Info: raw file content or assembled Markdown +// document with budget metadata +// +// # Usage +// +// if raw { +// load.Raw(cmd, files) +// } else { +// load.Assembled(cmd, files, budget, total, titleFn) +// } package load diff --git a/internal/write/loop/doc.go b/internal/write/loop/doc.go index cc4fe8bc4..b22104882 100644 --- a/internal/write/loop/doc.go +++ b/internal/write/loop/doc.go @@ -1,12 +1,30 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package loop provides terminal output functions for the loop and. +// Package loop provides terminal output for the loop +// script generation command (ctx loop). // -// All functions take *cobra.Command for output routing. -// Exports: [InfoGenerated]. -// Exports: [InfoGenerated]. +// # Exported Functions +// +// [InfoGenerated] reports successful loop script +// generation with full details: the output file path, +// heading text, selected AI tool, prompt file path, +// maximum iteration count (or "unlimited"), and the +// completion signal string. The output is formatted +// as a multi-line block so the user can review all +// parameters at a glance. +// +// # Message Categories +// +// - Info: script generation confirmation with all +// configuration parameters +// +// # Usage +// +// loop.InfoGenerated(cmd, +// outputFile, heading, tool, +// promptFile, maxIter, completionMsg) package loop diff --git a/internal/write/mark_journal/doc.go b/internal/write/mark_journal/doc.go index 64572bdbc..60fa418f8 100644 --- a/internal/write/mark_journal/doc.go +++ b/internal/write/mark_journal/doc.go @@ -1,12 +1,38 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package mark_journal provides formatted output helpers for the. +// Package mark_journal provides terminal output for the +// journal processing stage marker commands (ctx journal +// mark). // -// All functions take *cobra.Command for output routing. -// Exports: [StageChecked], [StageMarked]. -// Exports: [StageChecked], [StageMarked]. +// # Exported Functions +// +// [StageChecked] prints the result of a --check query, +// showing the journal filename, the processing stage +// name, and the current stage value. This lets users +// inspect which stages have been completed for a given +// journal entry. +// +// [StageMarked] prints a confirmation after a stage is +// marked as complete, showing the journal filename and +// the stage name that was just recorded. +// +// # Message Categories +// +// - Info: stage check results and mark confirmations +// +// # Nil Safety +// +// Both functions treat a nil *cobra.Command as a no-op. +// +// # Usage +// +// if checkOnly { +// mark_journal.StageChecked(cmd, file, stage, val) +// } else { +// mark_journal.StageMarked(cmd, file, stage) +// } package mark_journal diff --git a/internal/write/memory/doc.go b/internal/write/memory/doc.go index 8090baa62..f9ca820b9 100644 --- a/internal/write/memory/doc.go +++ b/internal/write/memory/doc.go @@ -1,25 +1,46 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package memory provides terminal output for the memory bridge -// commands (ctx memory status, sync, diff). -// -// Status output renders a dashboard with source path, mirror path, -// sync timestamps, line counts, drift detection, and archive counts. -// Functions are composable: the caller assembles the status display -// by calling [BridgeHeader], [Source], [Mirror], [LastSync], -// [SourceLines], [DriftDetected]/[DriftNone], and [Archives] in -// sequence, separated by [StatusSeparator]. -// -// Example (status command): -// -// write.BridgeHeader(cmd) -// write.Source(cmd, sourcePath) -// write.Mirror(cmd, mirrorRelPath) -// write.LastSync(cmd, formatted, ago) -// write.StatusSeparator(cmd) -// write.SourceLines(cmd, count, drifted) +// Package memory provides terminal output for the +// memory bridge commands (ctx memory status, sync, diff). +// +// # Status Dashboard +// +// Functions are composable: the caller assembles the +// status display by calling them in sequence with +// [StatusSeparator] between sections. +// +// [BridgeHeader] prints the "Memory Bridge Status" +// heading. [Source] prints the MEMORY.md source path. +// [SourceNotActive] prints a notice when auto memory +// is not active. [Mirror] prints the mirror relative +// path. [LastSync] prints the last sync timestamp with +// a human-readable age string. [LastSyncNever] prints +// that no sync has occurred yet. +// +// [SourceLines] prints the MEMORY.md line count with +// an optional drift indicator. [MirrorLines] prints +// the mirror line count. [MirrorNotSynced] prints +// that the mirror has not been synced yet. +// +// # Drift Detection +// +// [DriftDetected] prints that drift was detected +// between source and mirror. [DriftNone] prints that +// no drift was detected. +// +// # Archive and Diff +// +// [Archives] prints the archive snapshot count and +// directory. [DiffOutput] prints diff content to +// stdout. [NoChanges] prints that no changes exist +// since the last sync. +// +// # Message Categories +// +// - Info: dashboard sections, sync status, counts +// - Warning: drift detection notices package memory diff --git a/internal/write/message/doc.go b/internal/write/message/doc.go index 5cc917042..9ce20ea55 100644 --- a/internal/write/message/doc.go +++ b/internal/write/message/doc.go @@ -1,16 +1,38 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package message provides formatted output helpers for the message command. +// Package message provides the **terminal-output +// helpers** the `ctx hook message` CLI surface uses to +// render its `list`, `show`, `edit`, and `reset` +// subcommands' output. // -// All functions take *cobra.Command for output routing. -// Exports: [TemplateVars], [CtxSpecificWarning], -// [OverrideCreated], [EditHint], -// [SourceOverride], and 6 more. -// Exports: [TemplateVars], [CtxSpecificWarning], -// [OverrideCreated], [EditHint], -// [SourceOverride], [SourceDefault]. +// All exported functions take a `*cobra.Command` so +// they route through cobra's output stream. +// +// # Public Surface +// +// - **[TemplateVars]**: renders a template's +// placeholder variable list (the `%[1]s`-style +// positional parameters). +// - **[CtxSpecificWarning]**: the warning +// shown when the user tries to override a +// ctx-specific (non-customizable) message. +// - **[OverrideCreated]**: the +// "wrote override at PATH" line `edit` and +// `reset` print after a write. +// - **[EditHint]**: the "run `$EDITOR PATH` to +// edit" hint surfaced by `show` when no +// override exists yet. +// - **[SourceOverride] / [SourceDefault]**: +// "[override]" / "[default]" badges shown +// next to each message in `list` so users +// know which entries they have customized. +// +// # Concurrency +// +// Pure data → io.Writer. Concurrent calls +// serialize through cobra's output stream. package message diff --git a/internal/write/notify/doc.go b/internal/write/notify/doc.go index c12d118ab..205f36ba2 100644 --- a/internal/write/notify/doc.go +++ b/internal/write/notify/doc.go @@ -1,15 +1,39 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package notify provides terminal output for webhook notification -// setup and testing (ctx notify setup, ctx notify test). -// -// [SetupPrompt] displays the webhook URL prompt, [SetupDone] -// confirms successful configuration. [TestResult] reports the HTTP -// response from a test notification, [TestNoWebhook] handles the -// unconfigured case, and [TestFiltered] explains when an event -// type is excluded by the filter. +// Package notify provides terminal output for webhook +// notification setup and testing (ctx hook notify setup, +// ctx hook notify test). +// +// # Setup Flow +// +// [SetupPrompt] displays the interactive webhook URL +// prompt where the user enters their endpoint. +// [SetupDone] prints the success block after saving +// a webhook, showing the masked URL and the encrypted +// file path where credentials are stored. +// +// # Test Flow +// +// [TestResult] reports the HTTP response from a test +// notification including the status code and status +// text. When the response indicates success (2xx), +// an additional confirmation line is printed. +// +// [TestNoWebhook] handles the case when no webhook is +// configured. [TestFiltered] explains when a test +// event type is excluded by the user's event filter +// configuration. +// +// # Message Categories +// +// - Info: setup confirmation, test results +// - Warning: unconfigured or filtered states +// +// # Nil Safety +// +// All functions treat a nil *cobra.Command as a no-op. package notify diff --git a/internal/write/obsidian/doc.go b/internal/write/obsidian/doc.go index d06dfe0c7..b03acab5e 100644 --- a/internal/write/obsidian/doc.go +++ b/internal/write/obsidian/doc.go @@ -1,12 +1,31 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package obsidian provides terminal output for the Obsidian vault -// generation command (ctx journal obsidian). +// Package obsidian provides terminal output for the +// Obsidian vault generation command (ctx journal +// obsidian). // -// [InfoGenerated] reports the number of entries written and the -// output directory path after vault generation completes. +// # Exported Functions +// +// [InfoGenerated] reports the number of journal entries +// written and the output directory path after vault +// generation completes. It also prints a "Next Steps" +// section with instructions for opening the vault in +// Obsidian, including the exact directory path to use. +// +// # Message Categories +// +// - Info: generation result with entry count and +// output path, followed by next-step guidance +// +// # Usage +// +// obsidian.InfoGenerated(cmd, entryCount, outputDir) +// +// The output includes both the result confirmation +// and actionable next steps so the user can immediately +// open the generated vault without consulting docs. package obsidian diff --git a/internal/write/pad/doc.go b/internal/write/pad/doc.go index c48a6ad1c..7628c5694 100644 --- a/internal/write/pad/doc.go +++ b/internal/write/pad/doc.go @@ -4,23 +4,59 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package pad provides terminal output for the encrypted scratchpad -// command (ctx pad). +// Package pad provides terminal output for the encrypted +// scratchpad command (ctx pad). // -// The scratchpad supports text entries and binary blobs, each with -// its own output group: +// The scratchpad stores short text entries and binary +// blobs, each encrypted at rest. Every mutation and +// query has a dedicated output function so the command +// layer stays free of presentation logic. // -// - Text entries: [EntryAdded], [EntryUpdated], [EntryRemoved], -// [EntryMoved], [EntryShow], [EntryList] -// - Binary blobs: [BlobWritten], [BlobShow] -// - Import/export: [ImportDone], [ImportNone], [ImportBlobAdded], -// [ExportPlan], [ExportDone], [ExportSummary] -// - Merge: [MergeAdded], [MergeDupe], [MergeBlobConflict], -// [MergeSummary] -// - State: [Empty], [KeyCreated] +// # Text Entries // -// Example: +// [EntryAdded], [EntryUpdated], and [EntryRemoved] emit +// confirmation lines when entries change. [EntryMoved] +// reports position changes. [EntryShow] prints a single +// entry and [EntryList] prints a formatted list item. +// [Normalized] confirms sequential ID renumbering. // -// write.EntryAdded(cmd, index) -// write.EntryShow(cmd, formatted) +// # Binary Blobs +// +// [BlobWritten] confirms a blob was written to disk with +// its byte count and path. [BlobShow] prints raw blob +// data to stdout for piping. +// +// # Import and Export +// +// [ImportDone] reports the number of entries imported. +// [ImportNone] handles the empty-import case. +// [ImportBlobAdded] confirms each blob file imported. +// [ImportBlobSummary] closes import with counts. +// [ErrImportBlobSkipped] and [ErrImportBlobTooLarge] +// report per-blob failures to stderr. +// +// [ExportPlan] previews each blob in dry-run mode. +// [ExportDone] confirms each exported blob. +// [ExportSummary] closes the operation with totals. +// +// # Merge +// +// [MergeAdded] and [MergeDupe] report per-entry merge +// results. [MergeBlobConflict] warns about label +// collisions. [MergeBinaryWarning] flags binary data +// in a source file. [MergeSummary] closes the merge +// with added and skipped counts, adjusting for +// dry-run mode. +// +// # Tags and State +// +// [TagsItem] prints a tag with its count. [TagsJSON] +// emits JSON-encoded tag data. [TagsNone] handles the +// empty case. [Empty] reports an empty scratchpad. +// [KeyCreated] confirms encryption key generation. +// +// # Conflict Resolution +// +// [ResolveSide] renders OURS/THEIRS conflict blocks +// with numbered entries for interactive resolution. package pad diff --git a/internal/write/pause/doc.go b/internal/write/pause/doc.go index 633d65c1a..cb3a6a439 100644 --- a/internal/write/pause/doc.go +++ b/internal/write/pause/doc.go @@ -4,9 +4,22 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package pause provides formatted output helpers for the pause command. +// Package pause provides terminal output for the context +// pause command (ctx pause). // -// All functions take *cobra.Command for output routing. -// Exports: [Confirmed]. -// Exports: [Confirmed]. +// When a user pauses context hooks for the current +// session, the CLI confirms the action through this +// package. The pause command suspends all hook-based +// nudges and context injections until the session is +// explicitly resumed. +// +// # Output +// +// [Confirmed] prints a confirmation message that +// includes the session ID whose hooks were paused. +// It accepts a *cobra.Command for output routing +// and the session identifier string. +// +// A nil *cobra.Command is treated as a no-op so +// callers do not need nil guards. package pause diff --git a/internal/write/provenance/doc.go b/internal/write/provenance/doc.go index c02fc231a..665ba3914 100644 --- a/internal/write/provenance/doc.go +++ b/internal/write/provenance/doc.go @@ -1,12 +1,27 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package provenance writes session and git identity lines to -// cobra command output. Used by hooks to emit provenance before -// any conditional logic. +// Package provenance provides terminal output for +// session and git identity lines emitted by hooks. // -// Key exports: [Line]. +// Before any conditional hook logic runs, the hook +// system emits a provenance line that identifies the +// current session, git branch, and commit. This +// package formats and prints that line. +// +// # Output +// +// [Line] prints a single provenance line containing +// the short session ID, branch name, commit hash, +// and an optional context-free percentage suffix. +// The suffix is formatted by [ContextSuffix], which +// returns an empty string when the percentage is +// zero or out of range. +// +// Together they produce output like: +// +// [abc123] main @ def456 | Context: 45% free package provenance diff --git a/internal/write/prune/doc.go b/internal/write/prune/doc.go index ff9cb2d66..696c92101 100644 --- a/internal/write/prune/doc.go +++ b/internal/write/prune/doc.go @@ -4,18 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package prune provides terminal output for the state file pruning -// command (ctx system prune). +// Package prune provides terminal output for the state +// file pruning command (ctx prune). // -// [DryRunLine] previews each file that would be removed with its -// age. [ErrorLine] reports per-file removal failures. [Summary] -// closes the operation with counts of pruned, skipped, and -// preserved files, adjusting its wording for dry-run mode. +// The prune command removes stale session state files +// that exceed a configured age threshold. Output +// functions cover the full lifecycle from preview +// through completion. // -// Example: +// # Dry-Run Preview // -// for _, f := range stale { -// write.DryRunLine(cmd, f.Name, f.Age) -// } -// write.Summary(cmd, dryRun, pruned, skipped, preserved) +// [DryRunLine] prints each candidate file with its +// human-readable age so the user can review what +// would be removed before committing. +// +// # Error Reporting +// +// [ErrorLine] writes per-file removal failures to +// stderr. Each line includes the file name and the +// underlying error so the user can investigate. +// +// # Summary +// +// [Summary] closes the operation with counts of +// pruned, skipped, and preserved files. It adjusts +// its wording automatically for dry-run mode, +// showing "would prune" instead of "pruned". package prune diff --git a/internal/write/publish/doc.go b/internal/write/publish/doc.go index 16298aacf..d87b16386 100644 --- a/internal/write/publish/doc.go +++ b/internal/write/publish/doc.go @@ -4,11 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package publish provides terminal output for the memory publish -// command (ctx memory publish). +// Package publish provides terminal output for the +// memory publish command (ctx memory publish). // -// [Plan] displays what would be written to MEMORY.md before -// execution. [DryRun] confirms that no changes were made. -// [Done] confirms a successful publish. [NotFound] and -// [Unpublished] handle error states. +// Publishing compiles selected context files into a +// single MEMORY.md block with a line budget. The +// output functions narrate each stage of that +// process. +// +// # Planning +// +// [Plan] displays the full publish plan: a header, +// source file list, line budget, per-file counts +// (tasks, decisions, conventions, learnings), and +// the total line count versus the budget. +// +// # Execution +// +// [Done] confirms a successful publish with marker +// information. [DryRun] prints a notice that no +// changes were written. +// +// # Unpublish +// +// [NotFound] reports that no published block exists +// in the target file. [Unpublished] confirms that +// the published block was removed. package publish diff --git a/internal/write/rc/doc.go b/internal/write/rc/doc.go index 7cf33dc44..1e4787858 100644 --- a/internal/write/rc/doc.go +++ b/internal/write/rc/doc.go @@ -4,9 +4,26 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package rc provides formatted output helpers for runtime config loading. +// Package rc provides terminal output for runtime +// configuration loading warnings. // -// All functions take *cobra.Command for output routing. -// Exports: [ParseWarning]. -// Exports: [ParseWarning]. +// During startup, ctx loads YAML configuration files +// from the context directory. When a file cannot be +// parsed, the warning must be emitted before any +// cobra command is available, so this package writes +// directly to os.Stderr through the log/warn layer +// rather than through cobra's output stream. +// +// # Output +// +// [ParseWarning] prints a YAML parse warning that +// includes the filename that failed and the parse +// error. It delegates to the structured warning +// system in [internal/log/warn] with the config +// warning category from [internal/config/warn]. +// +// This runs at config-load time, before any cobra +// command exists, so it bypasses the usual +// *cobra.Command output pattern used by other +// write packages. package rc diff --git a/internal/write/remind/doc.go b/internal/write/remind/doc.go index 390095eca..809679eb4 100644 --- a/internal/write/remind/doc.go +++ b/internal/write/remind/doc.go @@ -4,11 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package remind provides terminal output for the session reminder -// commands (ctx remind add, list, dismiss). +// Package remind provides terminal output for the +// session reminder commands (ctx remind add, list, +// dismiss). // -// [Added] confirms a new reminder was created. [Item] renders a -// single reminder in the list with its ID and optional trigger -// condition. [Dismissed] confirms removal. [None] handles the -// empty list case. [DismissedAll] reports bulk dismissal. +// Reminders are short messages attached to a session +// with an optional date gate. The output functions +// cover the full CRUD lifecycle. +// +// # Adding +// +// [Added] prints a confirmation that includes the +// reminder ID, message text, and optional "after" +// date suffix when a date gate is set. +// +// # Listing +// +// [Item] renders a single reminder with its ID, +// message, and a "not yet due" annotation when the +// gate date is in the future relative to today. +// [None] handles the empty-list case. +// +// # Dismissing +// +// [Dismissed] confirms removal of a single reminder +// by ID and message. [DismissedAll] reports bulk +// dismissal with a count of removed items. +// +// # Maintenance +// +// [Normalized] confirms that reminder IDs were +// renumbered sequentially after gaps formed from +// deletions. package remind diff --git a/internal/write/resource/doc.go b/internal/write/resource/doc.go index 9759ef845..ce4ae8050 100644 --- a/internal/write/resource/doc.go +++ b/internal/write/resource/doc.go @@ -1,12 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package resource provides formatted output helpers for the. +// Package resource provides the **terminal-output +// helpers** the resource-related CLI surfaces use to +// render their results in either human-readable text or +// JSON for tooling. // -// All functions take *cobra.Command for output routing. -// Exports: [Text], [JSON]. -// Exports: [Text], [JSON]. +// All functions take a `*cobra.Command` so they route +// through cobra's output stream (which tests can wire +// to a buffer for assertion). +// +// # Public Surface +// +// - **[Text](cmd, payload)**: renders the +// resource snapshot in human-readable form, +// section headers, glyph-prefixed counts, +// summary line. +// - **[JSON](cmd, payload)**: emits the same +// payload as a structured JSON document with +// a UTC timestamp wrapper, suitable for +// `jq` consumption in CI. +// +// # Why a Separate Output Package +// +// The same data shape needs two different +// renderings (human vs machine). Hoisting both +// into a write-side package keeps the producer +// (the CLI command) free of presentation choices +// and the renderer free of business logic. +// +// # Concurrency +// +// Pure data → io.Writer transformation. +// Concurrent calls each write to the cobra +// command's output stream; cobra serializes them. package resource diff --git a/internal/write/restore/doc.go b/internal/write/restore/doc.go index f9e95b101..88920878a 100644 --- a/internal/write/restore/doc.go +++ b/internal/write/restore/doc.go @@ -4,13 +4,31 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package restore provides terminal output for the permission -// restore command (ctx permissions restore/snapshot). +// Package restore provides terminal output for the +// permission restore and snapshot commands +// (ctx permissions restore, ctx permissions snapshot). // -// Output covers two workflows: +// These commands manage a golden image of Claude Code +// settings.local.json permissions. Output covers two +// distinct workflows. // -// - Restore: [Diff] shows what would change between the golden -// image and current settings, [Done] confirms the restore, -// [NoLocal] handles missing settings, [Match] reports no diff. -// - Snapshot: [SnapshotDone] confirms the golden image was saved. +// # Restore Workflow +// +// [Diff] renders the permission difference between +// the golden image and current settings, showing +// dropped and restored entries for both allow and +// deny rules. When all permission lists are empty +// but the files still differ, it prints a note that +// only non-permission settings changed. +// +// [Done] confirms the restore completed. [NoLocal] +// handles the case where no local settings file +// exists. [Match] reports that the current settings +// already match the golden image. +// +// # Snapshot Workflow +// +// [SnapshotDone] confirms the golden image was +// saved or updated, distinguishing between a new +// save and an update of an existing snapshot. package restore diff --git a/internal/write/schema/doc.go b/internal/write/schema/doc.go index d24823548..493216161 100644 --- a/internal/write/schema/doc.go +++ b/internal/write/schema/doc.go @@ -4,12 +4,34 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package schema provides write functions for schema CLI output. -// -// All terminal output for the schema check and dump commands is -// routed through this package per the project convention that -// cmd.Print* calls live in internal/write/. Functions accept -// primitive types (strings, ints) rather than domain types to -// avoid cross-package type references that would trigger the -// CrossPackageTypes audit. +// Package schema provides terminal output for the +// schema check and dump commands (ctx schema check, +// ctx schema dump). +// +// The schema system validates session state files +// against expected field layouts and detects drift. +// Output functions render validation results and +// raw schema dumps. +// +// # Validation Results +// +// [NoDirs] reports that no session directories were +// found. [NoFiles] reports that no session files +// were found within the directories. [Clean] prints +// a success message with the count of files and +// lines scanned when no drift is detected. +// +// [DriftSummary] prints a pre-formatted drift +// summary to stderr when validation finds +// mismatches between actual and expected schemas. +// +// # Schema Dump +// +// [DumpLine] prints a single line of schema dump +// output. [DumpBlank] prints a blank separator +// line between dump sections. +// +// Functions accept primitive types (strings, ints) +// rather than domain types to avoid cross-package +// type references. package schema diff --git a/internal/write/serve/doc.go b/internal/write/serve/doc.go index 4cb2e5054..63988c273 100644 --- a/internal/write/serve/doc.go +++ b/internal/write/serve/doc.go @@ -1,13 +1,29 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package serve provides output functions for the serve -// command, including ctx Hub startup messages. +// Package serve provides terminal output for the +// ctx Hub server command (ctx serve). // -// Key exports: [HubStarted], [AdminToken]. -// See source files for implementation details. -// Part of the internal subsystem. +// The Hub is a local HTTP server that exposes +// context operations over a REST API. Output +// functions cover the server lifecycle from +// startup through shutdown. +// +// # Startup +// +// [HubStarted] prints the network address the +// server is listening on. [AdminToken] prints the +// generated admin token for authenticating API +// requests. Both are emitted at launch before the +// server begins accepting connections. +// +// # Background Mode +// +// [Daemonized] confirms the hub started as a +// background process, printing the daemon PID. +// [Stopped] confirms a running daemon was killed, +// also printing the PID that was terminated. package serve diff --git a/internal/write/session/doc.go b/internal/write/session/doc.go index ae3680da9..620c434c1 100644 --- a/internal/write/session/doc.go +++ b/internal/write/session/doc.go @@ -4,11 +4,31 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package session provides terminal output for session lifecycle -// commands (ctx pause, ctx resume, ctx wrap-up, ctx system session-event). +// Package session provides terminal output for session +// lifecycle commands. // -// [Event] confirms a session start or end event was recorded. -// [Paused] confirms hooks were suspended for the session. -// [Resumed] confirms hooks were re-enabled. [WrappedUp] confirms -// the end-of-session persistence ceremony completed. +// Sessions are the fundamental unit of agent +// interaction in ctx. Each session has a start event, +// optional pause/resume cycles, and an end event. +// The output functions confirm each lifecycle +// transition. +// +// # Lifecycle Events +// +// [Event] confirms a session start or end event was +// recorded, printing the event type and the calling +// editor identifier (e.g. "vscode", "claude"). +// +// # Hook Control +// +// [Paused] confirms that hooks were suspended for +// the named session. [Resumed] confirms hooks were +// re-enabled. Both print the session ID so the +// user can verify which session was affected. +// +// # Wrap-Up +// +// [WrappedUp] confirms the end-of-session +// persistence ceremony completed. This is the +// final output before context files are committed. package session diff --git a/internal/write/setup/doc.go b/internal/write/setup/doc.go index e0ff35e16..d30624cb9 100644 --- a/internal/write/setup/doc.go +++ b/internal/write/setup/doc.go @@ -4,16 +4,43 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package setup provides terminal output for the setup generation +// Package setup provides terminal output for the setup // command (ctx setup) and hook lifecycle output. // -// Functions cover setup deployment output ([InfoCopilotCreated], -// [InfoCopilotMerged], [InfoCopilotSkipped], [InfoCopilotSummary]), -// hook runtime output ([Nudge], [NudgeBlock], [BlockResponse], -// [Context]), and general-purpose hook helpers ([Content], -// [Separator], [InfoTool], [InfoUnknownTool]). +// This package covers two distinct output surfaces: +// tool deployment results and hook runtime messages. // -// Nudge vs NudgeBlock: [Nudge] emits a single-line relay, -// [NudgeBlock] emits a multi-line boxed message. Both are -// consumed by the agent as VERBATIM relay directives. +// # Tool Deployment +// +// Setup deploys integration files for various AI +// tools. Copilot output uses [InfoCopilotCreated], +// [InfoCopilotMerged], [InfoCopilotSkipped], and +// [InfoCopilotSummary]. Copilot-CLI uses +// [InfoCopilotCLICreated], [InfoCopilotCLISkipped], +// and [InfoCopilotCLISummary]. AGENTS.md uses +// [InfoAgentsCreated], [InfoAgentsMerged], +// [InfoAgentsSkipped], and [InfoAgentsSummary]. +// +// Generic deploy functions handle file creation +// across tools: [DeployComplete], [DeployFileExists], +// [DeployFileCreated], [DeploySteeringSynced], +// [DeploySteeringSkipped], [DeployNoSteering]. +// +// Editor-specific integration instructions are +// printed by [InfoCursorIntegration], +// [InfoKiroIntegration], and [InfoClineIntegration]. +// +// # Hook Runtime +// +// [Nudge] emits a single-line relay directive. +// [NudgeBlock] emits a multi-line boxed message +// followed by a blank line. Both are consumed by +// the agent as VERBATIM relay directives. +// +// [Context] and [BlockResponse] print JSON hook +// response lines. [Content] prints raw hook +// content. [Separator] prints blank-line dividers. +// +// [InfoTool] prints a pre-formatted tool section. +// [InfoUnknownTool] reports an unrecognized tool. package setup diff --git a/internal/write/site/doc.go b/internal/write/site/doc.go index 174525b49..342cf44b4 100644 --- a/internal/write/site/doc.go +++ b/internal/write/site/doc.go @@ -4,9 +4,28 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package site provides output helpers for the site command. +// Package site provides terminal output for the site +// feed generation command (ctx site feed). // -// All functions take *cobra.Command for output routing. -// Exports: [PrintFeedReport]. -// Exports: [PrintFeedReport]. +// The site command generates RSS/Atom feeds from +// journal entries and blog posts in the context +// directory. Output renders the generation summary +// after the feed file is written. +// +// # Output +// +// [PrintFeedReport] outputs the complete feed +// generation summary. It prints the output path +// and number of entries included, then optionally +// lists skipped entries and warnings. +// +// Skipped entries appear when a journal entry +// lacks required frontmatter or fails validation. +// Warnings cover non-fatal issues like missing +// dates or truncated content. +// +// The function accepts a [scan.FeedReport] struct +// that carries pre-computed counts and message +// lists so the output function contains no +// business logic. package site diff --git a/internal/write/skill/doc.go b/internal/write/skill/doc.go index a08f79b10..4fe474fa1 100644 --- a/internal/write/skill/doc.go +++ b/internal/write/skill/doc.go @@ -4,9 +4,30 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package skill provides formatted output helpers for skill commands. +// Package skill provides terminal output for the skill +// management commands (ctx skill install, list, remove). // -// All functions take *cobra.Command for output routing. -// Exports: [Installed], [NoSkillsFound], [SkillEntryWithDesc], -// [SkillEntry], [SkillCount], [Removed]. +// Skills are reusable prompt templates that extend +// agent capabilities. The output functions cover +// the full management lifecycle. +// +// # Installation +// +// [Installed] prints a confirmation that includes +// the skill name and the directory where it was +// installed. +// +// # Listing +// +// [EntryWithDesc] prints a skill entry with its +// name and description. [Entry] prints a skill +// entry with name only, used when no description +// is available. [Count] prints the total number +// of installed skills. [NoSkillsFound] handles +// the empty-list case. +// +// # Removal +// +// [Removed] prints a confirmation that the named +// skill was removed from the skills directory. package skill diff --git a/internal/write/stat/doc.go b/internal/write/stat/doc.go index 89e7c4e69..7a97c6092 100644 --- a/internal/write/stat/doc.go +++ b/internal/write/stat/doc.go @@ -4,9 +4,27 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package stat provides formatted output helpers for the stats command. +// Package stat provides terminal output for the stats +// command (ctx stats). // -// All functions take *cobra.Command for output routing. -// Exports: [Table]. -// Exports: [Table]. +// The stats command displays context usage metrics +// in a tabular format. This package handles both +// cobra-routed output and direct writer output for +// streaming scenarios. +// +// # Table Output +// +// [Table] prints pre-formatted stats lines through +// cobra's output stream. It accepts a slice of +// strings containing the header, separator, and +// data rows, and delegates to [line.All] for +// sequential printing. A nil *cobra.Command is +// treated as a no-op. +// +// # Streaming Output +// +// [StreamLine] writes a single formatted stats line +// to an arbitrary io.Writer. This is used when +// stats are emitted outside a cobra command context, +// such as piped output or background reporting. package stat diff --git a/internal/write/status/doc.go b/internal/write/status/doc.go index 8ecafb7d3..c752f47cc 100644 --- a/internal/write/status/doc.go +++ b/internal/write/status/doc.go @@ -4,14 +4,41 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package status provides terminal output for the context status -// command (ctx status). +// Package status provides terminal output for the +// context status command (ctx status). // -// [Header] renders the context directory path with file count and -// total token estimate. [FileItem] renders one context file with -// its token count and age; verbose mode adds the full path. -// [Activity] renders recent session activity as a summary list. +// The status command displays a summary of the +// context directory: its path, file inventory, +// token estimates, and recent activity. // -// Types [FileInfo] and [ActivityInfo] carry pre-computed display -// data so the write functions contain no business logic. +// # Header +// +// [Header] renders the context directory path with +// the total file count and estimated token count. +// Token counts are formatted with thousand +// separators for readability. +// +// # File Listing +// +// [FileItem] renders a single context file entry. +// In compact mode it shows a status indicator, +// file name, and status text. In verbose mode it +// adds token count, byte size, and a content +// preview of the first few lines. +// +// # Activity +// +// [Activity] renders the recent session activity +// section with a header and a list of entries, +// each showing a file name and a human-readable +// time-ago string. +// +// # Data Types +// +// [FileInfo] carries pre-computed display data for +// a single file: indicator glyph, name, status +// text, token count, byte size, and preview lines. +// [ActivityInfo] carries a file name and its +// time-ago string. Both types keep business logic +// out of the output functions. package status diff --git a/internal/write/steering/doc.go b/internal/write/steering/doc.go index 695466c0f..20bbaa14b 100644 --- a/internal/write/steering/doc.go +++ b/internal/write/steering/doc.go @@ -1,15 +1,40 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\\ +// `.,'\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package steering provides formatted output helpers for steering commands. -// -// All functions take *cobra.Command for output routing. -// Exports: [Created], [Skipped], [InitSummary], -// [NoFilesFound], [FileEntry], [FileCount], -// [NoFilesMatch], [PreviewHeader], [PreviewEntry], -// [PreviewCount], [SyncWritten], [SyncSkipped], -// [SyncError], [SyncSummary]. +// Package steering provides the **terminal-output +// helpers** the `ctx steering` CLI subcommands use to +// narrate their `add`, `init`, `list`, `preview`, and +// `sync` operations. +// +// All exported functions take a `*cobra.Command` so +// they route through cobra's output stream (which +// tests can wire to a buffer for assertion). +// +// # Public Surface +// +// Output families: +// +// - **Init**: [Created], [Skipped], +// [InitSummary]. The `init` subcommand +// announces each foundation file it +// materializes (or skipped because it +// already exists), then summarizes counts. +// - **List / Preview**: [NoFilesFound], +// [FileEntry], [FileCount], [NoFilesMatch], +// [PreviewHeader], [PreviewEntry], +// [PreviewCount]. Render the available +// steering files and their inclusion-rule +// match results against a sample prompt. +// - **Sync**: [SyncWritten], [SyncSkipped], +// [SyncError], [SyncSummary]. Per-tool +// progress narration during +// `ctx steering sync`. +// +// # Concurrency +// +// Pure data → io.Writer. Concurrent calls +// serialize through cobra's output stream. package steering diff --git a/internal/write/sync/doc.go b/internal/write/sync/doc.go index 1ea4c0a76..154c8c553 100644 --- a/internal/write/sync/doc.go +++ b/internal/write/sync/doc.go @@ -4,9 +4,35 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package sync provides formatted output helpers for the sync command. +// Package sync provides terminal output for the context +// sync command (ctx sync). // -// All functions take *cobra.Command for output routing. -// Exports: [AllClear], [Header], [Action], [Summary], [DryRun], and 2 more. -// Exports: [AllClear], [Header], [Action], [Summary], [DryRun], [Result]. +// The sync command has two modes: dependency/config +// analysis and MEMORY.md mirror synchronization. +// Output functions cover both workflows. +// +// # Dependency Analysis +// +// [AllClear] prints the all-clear message when the +// context is fully in sync and no actions are +// needed. +// +// [Header] prints the analysis heading with an +// optional dry-run notice. [Action] prints a +// numbered sync action item with its type label, +// description, and optional suggestion text. +// [Summary] closes the analysis with the total +// action count, adjusting wording for dry-run. +// +// # Memory Mirror +// +// [DryRun] prints the dry-run plan block with the +// source path, mirror path, and drift status. +// [Result] prints the full sync result: optional +// archive notice, synced confirmation, source +// path, and line counts with new-content delta. +// +// [ErrAutoMemoryNotActive] prints an informational +// stderr message when automatic memory source +// discovery fails. package sync diff --git a/internal/write/trace/doc.go b/internal/write/trace/doc.go index c28258f69..9d1575eaa 100644 --- a/internal/write/trace/doc.go +++ b/internal/write/trace/doc.go @@ -1,14 +1,46 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trace provides terminal output functions -// for trace commands. +// Package trace provides terminal output for the +// context trace commands (ctx trace, ctx trace tag, +// ctx trace enable/disable). // -// Formats git commit history, context refs, and tag -// annotations for human-readable display. -// Key exports: [FileEntry], [Tagged]. -// All functions take *cobra.Command for output routing. +// The trace system attaches context references to +// git commits and renders commit history with +// resolved context annotations. Output functions +// format commits, references, and hook status. +// +// # Commit Display +// +// [CommitHeader] prints the hash, subject, and date +// for a single commit. [CommitContext] prints the +// "Context:" label before resolved references. +// [CommitNoContext] prints when a commit has no +// attached context. +// +// # File Trace +// +// [FileEntry] prints a single-line trace entry with +// hash, date, subject, and formatted ref summary. +// [LastEntry] prints a compact entry for the +// last-N listing mode. +// +// # Reference Resolution +// +// [Resolved] prints a single resolved context +// reference with its type label, raw value, and +// optional title and detail. It formats the output +// differently depending on whether the reference +// was found and has metadata. +// +// # Tagging and Hooks +// +// [Tagged] confirms a commit was annotated with a +// context note. [HooksEnabled] and [HooksDisabled] +// report trace hook installation and removal. +// [Trailer] prints a collected context trailer +// line when non-empty. package trace diff --git a/internal/write/trigger/doc.go b/internal/write/trigger/doc.go index ee3d2db6e..a3f918bb7 100644 --- a/internal/write/trigger/doc.go +++ b/internal/write/trigger/doc.go @@ -4,11 +4,37 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package trigger provides formatted output helpers for trigger commands. +// Package trigger provides terminal output for the +// trigger hook commands (ctx trigger create, list, +// enable, disable, test). // -// All functions take *cobra.Command for output routing. -// Exports: [Created], [Disabled], [Enabled], [TypeHeader], -// [HookEntry], [BlankLine], [NoHooksFound], [HookCount], -// [TestingHeader], [TestInput], [Cancelled], [ContextOutput], -// [ErrorsHeader], [ErrorLine], [NoOutput]. +// Triggers are user-defined hook scripts that run +// at specific lifecycle points. The output functions +// cover management, listing, and testing workflows. +// +// # Management +// +// [Created] confirms a hook script was created at +// a given path. [Disabled] and [Enabled] confirm +// status changes, printing hook name and path. +// +// # Listing +// +// [TypeHeader] prints a section header for each +// hook type. [Entry] prints a single hook with +// its name, enabled/disabled status, and path. +// [Count] prints the total hook count. +// [NoHooksFound] handles the empty-list case. +// [BlankLine] separates sections visually. +// +// # Testing +// +// [TestingHeader] prints the header for a hook +// test run. [TestInput] prints the JSON input +// block sent to the hook. [ContextOutput] prints +// context output from hook execution. +// [Cancelled] prints a cancellation message. +// [ErrorsHeader] and [ErrorLine] render the +// errors section. [NoOutput] reports when hooks +// produced no output. package trigger diff --git a/internal/write/vscode/doc.go b/internal/write/vscode/doc.go index 84acfae74..1ea6de5df 100644 --- a/internal/write/vscode/doc.go +++ b/internal/write/vscode/doc.go @@ -1,18 +1,37 @@ // / ctx: https://ctx.ist // ,'`./ do you remember? -// `.,'\ +// `.,'\\ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package vscode provides terminal output for VS Code artifact generation -// during ctx init. +// Package vscode provides terminal output for VS Code +// artifact generation during ctx init. // -// [InfoCreated] and [InfoExistsSkipped] report file creation results. -// [InfoRecommendationExists] and [InfoAddManually] guide users through -// manual extension setup. [InfoWarnNonFatal] reports non-fatal errors -// without aborting the init flow. +// When ctx initializes a project, it optionally +// creates VS Code configuration files such as +// extensions.json recommendations and workspace +// settings. The output functions report the result +// of each file operation. // -// Key exports: [InfoCreated], [InfoExistsSkipped], -// [InfoRecommendationExists], [InfoAddManually], [InfoWarnNonFatal]. -// Used by the setup core packages when deploying VS Code integration. +// # File Creation +// +// [InfoCreated] confirms a VS Code configuration +// file was created at the given path. +// [InfoExistsSkipped] reports a file was skipped +// because it already exists. +// +// # Extension Recommendations +// +// [InfoRecommendationExists] reports the ctx +// extension recommendation already exists in +// extensions.json. [InfoAddManually] guides the +// user to add the extension ID manually when the +// file exists but lacks the ctx recommendation. +// +// # Error Handling +// +// [InfoWarnNonFatal] reports a non-fatal error +// during artifact creation without aborting the +// init flow. It prints a short description and +// the underlying error. package vscode diff --git a/internal/write/vscode/info.go b/internal/write/vscode/info.go index af4745e46..173281ad8 100644 --- a/internal/write/vscode/info.go +++ b/internal/write/vscode/info.go @@ -4,7 +4,6 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package vscode provides terminal output for VS Code artifact generation. package vscode import ( diff --git a/internal/write/watch/doc.go b/internal/write/watch/doc.go index 4743c214e..68c76e4ef 100644 --- a/internal/write/watch/doc.go +++ b/internal/write/watch/doc.go @@ -4,14 +4,36 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package watch provides terminal output for the stdin watch -// command (ctx watch). -// -// Watch monitors stdin for context-update tags and applies them -// to context files. Output functions cover the lifecycle: -// [Started] confirms the watch loop began, [DryRun] enables -// preview mode, [StopHint] shows how to exit, [DryRunPreview] -// shows what would be applied, [ApplySuccess]/[ApplyFailed] -// report per-update results, and [Separator] visually separates -// updates. [CloseLogError] reports log cleanup failures. +// Package watch provides terminal output for the stdin +// watch command (ctx watch). +// +// Watch monitors stdin for context-update tags and +// applies them to context files in real time. Output +// functions cover the full lifecycle from startup +// through per-update results. +// +// # Startup +// +// [Started] confirms the watch loop began and is +// reading from stdin. [DryRun] prints a notice +// that updates will be previewed but not applied. +// [StopHint] shows the Ctrl+C hint for exiting. +// +// # Per-Update Results +// +// [DryRunPreview] shows what would be applied, +// printing the update type and content. +// [ApplySuccess] confirms an update was applied +// successfully. [ApplyFailed] reports a failure +// with the update type and error. +// +// # Visual Structure +// +// [Separator] prints a blank line between updates +// for visual clarity in the output stream. +// +// # Cleanup +// +// [CloseLogError] reports a log file close error +// during shutdown, printing the underlying error. package watch diff --git a/internal/write/why/doc.go b/internal/write/why/doc.go index f1e643592..ff0a29a5c 100644 --- a/internal/write/why/doc.go +++ b/internal/write/why/doc.go @@ -4,12 +4,31 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package why provides terminal output for the philosophy command -// (ctx why). -// -// The why command presents embedded project philosophy documents -// through an interactive menu. [Banner] renders the header, -// [MenuItem] renders a numbered choice, [MenuPrompt] asks for -// selection, [Content] displays the chosen document, and -// [Separator] visually divides sections. +// Package why provides terminal output for the +// philosophy command (ctx why). +// +// The why command presents embedded project philosophy +// documents through an interactive numbered menu. +// Output functions handle each stage of the menu +// interaction and document display. +// +// # Menu Rendering +// +// [Banner] renders the ctx ASCII art header at the +// top of the menu. [MenuItem] prints a numbered +// choice with its display label. [MenuPrompt] +// prints the selection prompt and waits for input. +// +// # Document Display +// +// [Content] prints the chosen philosophy document +// body to stdout. The content is pre-processed +// before being passed to this function, so it +// contains no formatting logic. +// +// # Visual Structure +// +// [Separator] prints a blank line for visual +// separation between menu sections and between +// the menu and the document body. package why diff --git a/quarantine/deadcode/internal/assets/commands/text/errors.yaml.dead b/quarantine/deadcode/internal/assets/commands/text/errors.yaml.dead deleted file mode 100644 index 96ced8e1a..000000000 --- a/quarantine/deadcode/internal/assets/commands/text/errors.yaml.dead +++ /dev/null @@ -1,41 +0,0 @@ -# Orphan YAML entries removed from errors.yaml — no corresponding DescKey constant in Go code. -# Quarantined: 2026-04-02 - -err.add.missing-decision: - short: |- - decisions require complete ADR format - - Missing required flags: %s - - Usage: - ctx add decision "Decision title" \ - --context "What prompted this decision" \ - --rationale "Why this choice over alternatives" \ - --consequence "What changes as a result" - - Example: - ctx add decision "Use PostgreSQL for primary database" \ - --context "Need a reliable database for production workloads" \ - --rationale "PostgreSQL offers ACID compliance, JSON support, and team familiarity" \ - --consequence "Team needs PostgreSQL training; must set up replication" -err.add.missing-learning: - short: |- - learnings require complete format - - Missing required flags: %s - - Usage: - ctx add learning "Learning title" \ - --context "What prompted this learning" \ - --lesson "The key insight" \ - --application "How to apply this going forward" - - Example: - ctx add learning "Go embed requires files in same package" \ - --context "Tried to embed files from parent directory, got compile error" \ - --lesson "go:embed only works with files in same or child directories" \ - --application "Keep embedded files in internal/templates/, not project root" -err.backup.create-backup-generic: - short: 'failed to create backup: %w' -err.deps.cargo-not-found: - short: 'cargo not found in PATH: install Rust toolchain to analyze Cargo projects' diff --git a/quarantine/deadcode/internal/assets/commands/text/hooks.yaml.dead b/quarantine/deadcode/internal/assets/commands/text/hooks.yaml.dead deleted file mode 100644 index a86615799..000000000 --- a/quarantine/deadcode/internal/assets/commands/text/hooks.yaml.dead +++ /dev/null @@ -1,12 +0,0 @@ -# Orphan YAML entries removed from hooks.yaml — no corresponding DescKey constant in Go code. -# Quarantined: 2026-04-02 - -skill-discovery.relay-message: - short: Skill discovery nudge -context-load-gate.index-fallback: - short: (no index entries) -context-load-gate.index-header: - short: |+ - --- %s (index - read full entries by date when relevant) --- - %s - diff --git a/quarantine/deadcode/internal/assets/commands/text/ui.yaml.dead b/quarantine/deadcode/internal/assets/commands/text/ui.yaml.dead deleted file mode 100644 index 1f57542a2..000000000 --- a/quarantine/deadcode/internal/assets/commands/text/ui.yaml.dead +++ /dev/null @@ -1,23 +0,0 @@ -# Orphan YAML entries removed from ui.yaml — no corresponding DescKey constant in Go code. -# Quarantined: 2026-04-02 - -pad.key-created: - short: Scratchpad key created at %s -rc.parse_warning: - short: 'ctx: warning: failed to parse %s: %v (using defaults)' -time.commit: - short: commit -time.commits: - short: commits -time.day: - short: day -time.days: - short: days -time.hour: - short: hour -time.hours: - short: hours -time.minute: - short: minute -time.minutes: - short: minutes diff --git a/quarantine/deadcode/internal/assets/commands/text/write.yaml.dead b/quarantine/deadcode/internal/assets/commands/text/write.yaml.dead deleted file mode 100644 index eb104809d..000000000 --- a/quarantine/deadcode/internal/assets/commands/text/write.yaml.dead +++ /dev/null @@ -1,9 +0,0 @@ -# Orphan YAML entries removed from write.yaml — no corresponding DescKey constant in Go code. -# Quarantined: 2026-04-02 - -write.init-created-with: - short: ' ✓ %s%s' -write.journal-source-bullet-item: - short: '- %s' -write.notify-close-response: - short: "ctx: close response body: %v" diff --git a/quarantine/deadcode/internal/assets/hooks/messages/registry.go.dead b/quarantine/deadcode/internal/assets/hooks/messages/registry.go.dead deleted file mode 100644 index a46f4bc07..000000000 --- a/quarantine/deadcode/internal/assets/hooks/messages/registry.go.dead +++ /dev/null @@ -1,9 +0,0 @@ -// Dead exports quarantined from internal/assets/hooks/messages/registry.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package messages - -// CategoryCustomizable marks messages intended for -// project-specific customization. -const CategoryCustomizable = "customizable" diff --git a/quarantine/deadcode/internal/assets/read/hook/hook.go.dead b/quarantine/deadcode/internal/assets/read/hook/hook.go.dead deleted file mode 100644 index 4129e92cc..000000000 --- a/quarantine/deadcode/internal/assets/read/hook/hook.go.dead +++ /dev/null @@ -1,51 +0,0 @@ -// Dead exports quarantined from internal/assets/read/hook/hook.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package hook - -// MessageList returns available hook message directory names. -// -// Each hook is a directory under hooks/messages/ containing one or -// more variant .txt template files. -// -// Returns: -// - []string: List of hook directory names -// - error: Non-nil if directory read fails -func MessageList() ([]string, error) { - entries, readErr := assets.FS.ReadDir(asset.DirHooksMessages) - if readErr != nil { - return nil, readErr - } - - names := make([]string, 0, len(entries)) - for _, entry := range entries { - if entry.IsDir() { - names = append(names, entry.Name()) - } - } - return names, nil -} - -// VariantList returns available variant filenames for a hook. -// -// Parameters: -// - hook: Hook directory name (e.g., "qa-reminder") -// -// Returns: -// - []string: List of variant filenames (e.g., "gate.txt") -// - error: Non-nil if the hook directory is not found or read fails -func VariantList(hook string) ([]string, error) { - entries, readErr := assets.FS.ReadDir(path.Join(asset.DirHooksMessages, hook)) - if readErr != nil { - return nil, readErr - } - - names := make([]string, 0, len(entries)) - for _, entry := range entries { - if !entry.IsDir() { - names = append(names, entry.Name()) - } - } - return names, nil -} diff --git a/quarantine/deadcode/internal/assets/read/project/project.go.dead b/quarantine/deadcode/internal/assets/read/project/project.go.dead deleted file mode 100644 index 62243da48..000000000 --- a/quarantine/deadcode/internal/assets/read/project/project.go.dead +++ /dev/null @@ -1,20 +0,0 @@ -// Dead exports quarantined from internal/assets/read/project/project.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package project - -// File reads a project-root file by name from the embedded filesystem. -// -// These files are deployed to the project root (not .context/) by dedicated -// handlers during initialization. -// -// Parameters: -// - name: Filename (e.g., "Makefile.ctx") -// -// Returns: -// - []byte: File content -// - error: Non-nil if the file is not found or read fails -func File(name string) ([]byte, error) { - return assets.FS.ReadFile(path.Join(asset.DirProject, name)) -} diff --git a/quarantine/deadcode/internal/assets/tpl/tpl_recall.go.dead b/quarantine/deadcode/internal/assets/tpl/tpl_recall.go.dead deleted file mode 100644 index f9f7232e2..000000000 --- a/quarantine/deadcode/internal/assets/tpl/tpl_recall.go.dead +++ /dev/null @@ -1,14 +0,0 @@ -// Dead exports quarantined from internal/assets/tpl/tpl_recall.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package tpl - -// RecallTokens formats the token stats line. -// Args: total, in, out. -// -//nolint:gosec // G101: display template, not a credential -const RecallTokens = "**Tokens**: %s (in: %s, out: %s)" - -// RecallSummaryPlaceholder is the placeholder text in the summary section. -const RecallSummaryPlaceholder = "[Add your summary of this session]" diff --git a/quarantine/deadcode/internal/cli/system/core/journal/types.go.dead b/quarantine/deadcode/internal/cli/system/core/journal/types.go.dead deleted file mode 100644 index c9e41f912..000000000 --- a/quarantine/deadcode/internal/cli/system/core/journal/types.go.dead +++ /dev/null @@ -1,10 +0,0 @@ -// Dead exports quarantined from internal/cli/system/core/journal/types.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package journal - -// MarkResult holds the outcome of marking a stage. -type MarkResult struct { - Marked bool -} diff --git a/quarantine/deadcode/internal/config/asset/asset.go.dead b/quarantine/deadcode/internal/config/asset/asset.go.dead deleted file mode 100644 index f761bb2b1..000000000 --- a/quarantine/deadcode/internal/config/asset/asset.go.dead +++ /dev/null @@ -1,9 +0,0 @@ -// Dead exports quarantined from internal/config/asset/asset.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package asset - -// FileInstructionsCtxMd is the embedded asset filename for -// instructions-context.md. -const FileInstructionsCtxMd = "instructions-context.md" diff --git a/quarantine/deadcode/internal/config/embed/text/check_skill_discovery.go.dead b/quarantine/deadcode/internal/config/embed/text/check_skill_discovery.go.dead deleted file mode 100644 index 0edfbd6c5..000000000 --- a/quarantine/deadcode/internal/config/embed/text/check_skill_discovery.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/check_skill_discovery.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeySkillDiscoveryRelay is the relay message text key. -const DescKeySkillDiscoveryRelay = "skill-discovery.relay-message" diff --git a/quarantine/deadcode/internal/config/embed/text/context.go.dead b/quarantine/deadcode/internal/config/embed/text/context.go.dead deleted file mode 100644 index 2aaa8981d..000000000 --- a/quarantine/deadcode/internal/config/embed/text/context.go.dead +++ /dev/null @@ -1,12 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/context.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -const ( - // DescKeyContextLoadGateIndexFallback is the index fallback text key. - DescKeyContextLoadGateIndexFallback = "context-load-gate.index-fallback" - // DescKeyContextLoadGateIndexHeader is the index header text key. - DescKeyContextLoadGateIndexHeader = "context-load-gate.index-header" -) diff --git a/quarantine/deadcode/internal/config/embed/text/hook.go.dead b/quarantine/deadcode/internal/config/embed/text/hook.go.dead deleted file mode 100644 index b8253884c..000000000 --- a/quarantine/deadcode/internal/config/embed/text/hook.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/hook.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeyWriteHookCopilotCLISkills is the hook write output text key. -const DescKeyWriteHookCopilotCLISkills = "write.hook-copilot-cli-skills" diff --git a/quarantine/deadcode/internal/config/embed/text/initialize.go.dead b/quarantine/deadcode/internal/config/embed/text/initialize.go.dead deleted file mode 100644 index c397d7016..000000000 --- a/quarantine/deadcode/internal/config/embed/text/initialize.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/initialize.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeyRcParseWarning is the rc parse warning text key. -const DescKeyRcParseWarning = "rc.parse_warning" diff --git a/quarantine/deadcode/internal/config/embed/text/journal_source.go.dead b/quarantine/deadcode/internal/config/embed/text/journal_source.go.dead deleted file mode 100644 index 59ceb3228..000000000 --- a/quarantine/deadcode/internal/config/embed/text/journal_source.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/journal_source.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeyWriteJournalSourceBulletItem is the bullet item text key. -const DescKeyWriteJournalSourceBulletItem = "write.journal-source-bullet-item" diff --git a/quarantine/deadcode/internal/config/embed/text/notify.go.dead b/quarantine/deadcode/internal/config/embed/text/notify.go.dead deleted file mode 100644 index c3dd5f6f4..000000000 --- a/quarantine/deadcode/internal/config/embed/text/notify.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/notify.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeyWriteNotifyCloseResponse is the notify output text key. -const DescKeyWriteNotifyCloseResponse = "write.notify-close-response" diff --git a/quarantine/deadcode/internal/config/embed/text/pad.go.dead b/quarantine/deadcode/internal/config/embed/text/pad.go.dead deleted file mode 100644 index 6f3899feb..000000000 --- a/quarantine/deadcode/internal/config/embed/text/pad.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/pad.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -// DescKeyPadKeyCreated is the pad key created text key. -const DescKeyPadKeyCreated = "pad.key-created" diff --git a/quarantine/deadcode/internal/config/embed/text/time.go.dead b/quarantine/deadcode/internal/config/embed/text/time.go.dead deleted file mode 100644 index e5eb03bb4..000000000 --- a/quarantine/deadcode/internal/config/embed/text/time.go.dead +++ /dev/null @@ -1,16 +0,0 @@ -// Dead exports quarantined from internal/config/embed/text/time.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package text - -const ( - DescKeyTimeCommit = "time.commit" - DescKeyTimeCommits = "time.commits" - DescKeyTimeDay = "time.day" - DescKeyTimeDays = "time.days" - DescKeyTimeHour = "time.hour" - DescKeyTimeHours = "time.hours" - DescKeyTimeMinute = "time.minute" - DescKeyTimeMinutes = "time.minutes" -) diff --git a/quarantine/deadcode/internal/config/entry/entry.go.dead b/quarantine/deadcode/internal/config/entry/entry.go.dead deleted file mode 100644 index 015a5c4d1..000000000 --- a/quarantine/deadcode/internal/config/entry/entry.go.dead +++ /dev/null @@ -1,11 +0,0 @@ -// Dead exports quarantined from internal/config/entry/entry.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package entry - -// Conventions is the plural form used as a label and resource identifier. -const Conventions = "conventions" - -// Tasks is the plural form used as a label and resource identifier. -const Tasks = "tasks" diff --git a/quarantine/deadcode/internal/config/entry/field.go.dead b/quarantine/deadcode/internal/config/entry/field.go.dead deleted file mode 100644 index 5977201a8..000000000 --- a/quarantine/deadcode/internal/config/entry/field.go.dead +++ /dev/null @@ -1,19 +0,0 @@ -// Dead exports quarantined from internal/config/entry/field.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package entry - -const ( - // FieldContext is the background/situation field for decisions and learnings. - FieldContext = "context" - // FieldRationale is the reasoning field for decisions (why this choice). - FieldRationale = "rationale" - // FieldConsequence is the outcomes field for decisions (what changes). - FieldConsequence = "consequence" - // FieldApplication is the usage field for learnings - // (how to apply going forward). - FieldApplication = "application" - // FieldLesson is the insight field for learnings (the key takeaway). - FieldLesson = "lesson" -) diff --git a/quarantine/deadcode/internal/config/file/ext.go.dead b/quarantine/deadcode/internal/config/file/ext.go.dead deleted file mode 100644 index bd99ca6bf..000000000 --- a/quarantine/deadcode/internal/config/file/ext.go.dead +++ /dev/null @@ -1,12 +0,0 @@ -// Dead exports quarantined from internal/config/file/ext.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package file - -const ( - // ExtJSON is the JSON file extension. - ExtJSON = ".json" - // ExtEnc is the encrypted file extension. - ExtEnc = ".enc" -) diff --git a/quarantine/deadcode/internal/config/flag/flag.go.dead b/quarantine/deadcode/internal/config/flag/flag.go.dead deleted file mode 100644 index c82dd8fa1..000000000 --- a/quarantine/deadcode/internal/config/flag/flag.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/flag/flag.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package flag - -// Stdin is a shared flag name used across commands. -const Stdin = "stdin" diff --git a/quarantine/deadcode/internal/config/hook/hook.go.dead b/quarantine/deadcode/internal/config/hook/hook.go.dead deleted file mode 100644 index 157bc4e37..000000000 --- a/quarantine/deadcode/internal/config/hook/hook.go.dead +++ /dev/null @@ -1,13 +0,0 @@ -// Dead exports quarantined from internal/config/hook/hook.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package hook - -// Copilot CLI hook event names (GitHub Copilot CLI lifecycle stages). -const ( - CLIEventSessionStart = "sessionStart" - CLIEventSessionEnd = "sessionEnd" - CLIEventPreToolUse = "preToolUse" - CLIEventPostToolUse = "postToolUse" -) diff --git a/quarantine/deadcode/internal/config/hook/notify.go.dead b/quarantine/deadcode/internal/config/hook/notify.go.dead deleted file mode 100644 index 96009ab27..000000000 --- a/quarantine/deadcode/internal/config/hook/notify.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/hook/notify.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package hook - -// NotifyChannelSession is the notification channel for session events. -const NotifyChannelSession = "session" diff --git a/quarantine/deadcode/internal/config/load_gate/load_gate.go.dead b/quarantine/deadcode/internal/config/load_gate/load_gate.go.dead deleted file mode 100644 index 72902682c..000000000 --- a/quarantine/deadcode/internal/config/load_gate/load_gate.go.dead +++ /dev/null @@ -1,9 +0,0 @@ -// Dead exports quarantined from internal/config/load_gate/load_gate.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package load_gate - -// ContextLoadIndexSuffix is the suffix appended to filenames -// for index entries. -const ContextLoadIndexSuffix = " (idx)" diff --git a/quarantine/deadcode/internal/config/marker/marker.go.dead b/quarantine/deadcode/internal/config/marker/marker.go.dead deleted file mode 100644 index bbefc40da..000000000 --- a/quarantine/deadcode/internal/config/marker/marker.go.dead +++ /dev/null @@ -1,20 +0,0 @@ -// Dead exports quarantined from internal/config/marker/marker.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package marker - -const ( - // CopilotEnd marks the end of ctx-managed Copilot content. - CopilotEnd = "" - // AgentsEnd marks the end of ctx-managed AGENTS.md content. - AgentsEnd = "" - // TablePipePad is the padded cell delimiter. - TablePipePad = " | " - // TableRowOpen opens a table row. - TableRowOpen = "| " - // TableRowClose closes a table row. - TableRowClose = " |" - // TableSepCell is a header separator cell. - TableSepCell = "------" -) diff --git a/quarantine/deadcode/internal/config/obsidian/obsidian.go.dead b/quarantine/deadcode/internal/config/obsidian/obsidian.go.dead deleted file mode 100644 index 4bf52835d..000000000 --- a/quarantine/deadcode/internal/config/obsidian/obsidian.go.dead +++ /dev/null @@ -1,9 +0,0 @@ -// Dead exports quarantined from internal/config/obsidian/obsidian.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package obsidian - -// MOCPrefix is prepended to MOC filenames so they sort first -// in the Obsidian file explorer. -const MOCPrefix = "_" diff --git a/quarantine/deadcode/internal/config/session/session.go.dead b/quarantine/deadcode/internal/config/session/session.go.dead deleted file mode 100644 index 9ef508818..000000000 --- a/quarantine/deadcode/internal/config/session/session.go.dead +++ /dev/null @@ -1,11 +0,0 @@ -// Dead exports quarantined from internal/config/session/session.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package session - -// EventStart marks the beginning of a workspace session. -const EventStart = "start" - -// EventEnd marks the end of a workspace session. -const EventEnd = "end" diff --git a/quarantine/deadcode/internal/config/sync/pattern.go.dead b/quarantine/deadcode/internal/config/sync/pattern.go.dead deleted file mode 100644 index 3ea259573..000000000 --- a/quarantine/deadcode/internal/config/sync/pattern.go.dead +++ /dev/null @@ -1,20 +0,0 @@ -// Dead exports quarantined from internal/config/sync/pattern.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package sync - -// Patterns returns all config file glob patterns in detection order. -// -// Returns: -// - []string: Glob patterns for all supported config file types -func Patterns() []string { - return []string{ - PatternEslint, - PatternPrettier, - PatternTSConfig, - PatternEditorConf, - PatternMakefile, - PatternDockerfile, - } -} diff --git a/quarantine/deadcode/internal/config/token/prefix.go.dead b/quarantine/deadcode/internal/config/token/prefix.go.dead deleted file mode 100644 index 884f9cb08..000000000 --- a/quarantine/deadcode/internal/config/token/prefix.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/token/prefix.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package token - -// PrefixBracket is the opening bracket used for placeholder checks. -const PrefixBracket = "[" diff --git a/quarantine/deadcode/internal/config/trace/trace.go.dead b/quarantine/deadcode/internal/config/trace/trace.go.dead deleted file mode 100644 index f5bf600f5..000000000 --- a/quarantine/deadcode/internal/config/trace/trace.go.dead +++ /dev/null @@ -1,8 +0,0 @@ -// Dead exports quarantined from internal/config/trace/trace.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package trace - -// TaskCompletedMarker is the checkbox state for completed tasks in diffs. -const TaskCompletedMarker = "x" diff --git a/quarantine/deadcode/internal/config/vscode/vscode.go.dead b/quarantine/deadcode/internal/config/vscode/vscode.go.dead deleted file mode 100644 index b2fd0a151..000000000 --- a/quarantine/deadcode/internal/config/vscode/vscode.go.dead +++ /dev/null @@ -1,17 +0,0 @@ -// Dead exports quarantined from internal/config/vscode/vscode.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package vscode - -const ( - KeyVersion = "version" - KeyTasks = "tasks" - KeyLabel = "label" - KeyType = "type" - KeyGroup = "group" - KeyPresentation = "presentation" - KeyReveal = "reveal" - KeyPanel = "panel" - KeyProblemMatcher = "problemMatcher" -) diff --git a/quarantine/deadcode/internal/config/watch/watch.go.dead b/quarantine/deadcode/internal/config/watch/watch.go.dead deleted file mode 100644 index 9341eede5..000000000 --- a/quarantine/deadcode/internal/config/watch/watch.go.dead +++ /dev/null @@ -1,9 +0,0 @@ -// Dead exports quarantined from internal/config/watch/watch.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package watch - -// AttrExtractFormat is the regex format for extracting an XML -// attribute value by name. -const AttrExtractFormat = `%s="([^"]*)"` diff --git a/quarantine/deadcode/internal/err/add/add.go.dead b/quarantine/deadcode/internal/err/add/add.go.dead deleted file mode 100644 index 46bb12c21..000000000 --- a/quarantine/deadcode/internal/err/add/add.go.dead +++ /dev/null @@ -1,33 +0,0 @@ -// Dead exports quarantined from internal/err/add/add.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package add - -// MissingDecision returns an error with usage help for incomplete decisions. -// -// Parameters: -// - missing: List of missing required flag names (e.g., "--context") -// -// Returns: -// - error: Formatted error with ADR format requirements and example -func MissingDecision(missing []string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrAddMissingDecision), - strings.Join(missing, token.CommaSpace), - ) -} - -// MissingLearning returns an error with usage help for incomplete learnings. -// -// Parameters: -// - missing: List of missing required flag names (e.g., "--lesson") -// -// Returns: -// - error: Formatted error with learning format requirements and example -func MissingLearning(missing []string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrAddMissingLearning), - strings.Join(missing, token.CommaSpace), - ) -} diff --git a/quarantine/deadcode/internal/err/backup/backup.go.dead b/quarantine/deadcode/internal/err/backup/backup.go.dead deleted file mode 100644 index fa3e0ea13..000000000 --- a/quarantine/deadcode/internal/err/backup/backup.go.dead +++ /dev/null @@ -1,19 +0,0 @@ -// Dead exports quarantined from internal/err/backup/backup.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package backup - -// CreateGeneric wraps a generic backup creation failure. -// -// Parameters: -// - cause: the underlying OS error -// -// Returns: -// - error: "failed to create backup: " -func CreateGeneric(cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrBackupCreateBackupGeneric), - cause, - ) -} diff --git a/quarantine/deadcode/internal/err/dep/dep.go.dead b/quarantine/deadcode/internal/err/dep/dep.go.dead deleted file mode 100644 index a9444f760..000000000 --- a/quarantine/deadcode/internal/err/dep/dep.go.dead +++ /dev/null @@ -1,13 +0,0 @@ -// Dead exports quarantined from internal/err/dep/dep.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package dep - -// CargoNotFound returns an error when cargo is not in PATH. -// -// Returns: -// - error: advises installing the Rust toolchain -func CargoNotFound() error { - return errors.New(desc.Text(text.DescKeyErrDepsCargoNotFound)) -} diff --git a/quarantine/deadcode/internal/err/prompt/prompt.go.dead b/quarantine/deadcode/internal/err/prompt/prompt.go.dead deleted file mode 100644 index cee90c541..000000000 --- a/quarantine/deadcode/internal/err/prompt/prompt.go.dead +++ /dev/null @@ -1,32 +0,0 @@ -// Dead exports quarantined from internal/err/prompt/prompt.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package prompt - -// ListEntryTemplates wraps a failure to list entry templates. -// -// Parameters: -// - cause: the underlying error -// -// Returns: -// - error: "failed to list entry templates: " -func ListEntryTemplates(cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrPromptListEntryTemplates), cause, - ) -} - -// ReadEntryTemplate wraps a failure to read an entry template. -// -// Parameters: -// - name: template name that failed to read -// - cause: the underlying error -// -// Returns: -// - error: "failed to read entry template : " -func ReadEntryTemplate(name string, cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrPromptReadEntryTemplate), name, cause, - ) -} diff --git a/quarantine/deadcode/internal/write/initialize/init.go.dead b/quarantine/deadcode/internal/write/initialize/init.go.dead deleted file mode 100644 index 2b10c1a5d..000000000 --- a/quarantine/deadcode/internal/write/initialize/init.go.dead +++ /dev/null @@ -1,17 +0,0 @@ -// Dead exports quarantined from internal/write/initialize/init.go -// Quarantined: 2026-04-02 -// Restore from git history if needed. - -package initialize - -// CreatedWith reports a file created with a qualifier (e.g. " (ralph mode)"). -// -// Parameters: -// - cmd: Cobra command for output -// - path: created file path -// - qualifier: additional info appended after the path -func CreatedWith(cmd *cobra.Command, path, qualifier string) { - cmd.Println(fmt.Sprintf( - desc.Text(text.DescKeyWriteInitCreatedWith), - path, qualifier)) -} diff --git a/site/404.html b/site/404.html index fdcca82a8..948fd4102 100644 --- a/site/404.html +++ b/site/404.html @@ -124,9 +124,9 @@ - + -

A Meta-Experiment in AI-As to a disciplined system for persistent AI context, and what I have learned along the way.

-

Context is a Record

+

Context Is a Record

Context is a persistent record.

By "context", I don't mean model memory or stored thoughts:

I mean the durable record of decisions, learnings, and intent @@ -1322,7 +1294,7 @@

The Rename87dcfa1 README. 4f0e195 feat: separate orchestrator directive from agent tasks

-

YOLO Mode: Fast, But Dangerous

+

YOLO Mode: Fast, but Dangerous

The Ralph Loop made feature development incredibly fast.

But it created technical debt that I didn't notice until later.

A comparison session on January 25th revealed the patterns:

@@ -1425,11 +1397,11 @@

The Constitution versus Conventions conventions...) should go in to CONVENTIONS.md.

Here's how ctx explained why the distinction was important:

-

Decision record, 2026-01-25

+

Decision Record, 2026-01-25

Overly strict constitution creates friction and gets ignored.

Conventions can be bent; constitution cannot.

-

Hooks: Harder Than They Look

+

Hooks: Harder than They Look

Claude Code hooks seemed simple: Run a script before/after certain events.

But I hit multiple gotchas:

1. Key names matter

@@ -1466,7 +1438,7 @@

The Session Files -

Middle Ground: the Scratchpad

+

Middle Ground: The Scratchpad

For sensitive notes that do need to travel with the project, ctx pad stores encrypted one-liners in git, and ctx pad add "label" --file PATH can ingest small files.

@@ -1562,7 +1534,7 @@

Task Archives: The Completed WorkPhase 13: Rich Context Entries

That's an impressive ^^173 commits** across 8 days of development.

-

What I Learned About AI-Assisted Development

+

What I Learned about AI-Assisted Development

1. Memory changes everything

When the AI remembers decisions, it doesn't repeat mistakes.

When the AI knows your conventions, it follows them.

@@ -1616,7 +1588,7 @@

Conclusionctx.ist.
-

Session Records are a Gold Mine

+

Session Records Are a Gold Mine

By the time of this writing, I have more than 70 megabytes of text-only session capture, spread across >100 Markdown and JSONL files.

diff --git a/site/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release/index.html b/site/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release/index.html index c9ea3c9aa..7f46e67fa 100644 --- a/site/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release/index.html +++ b/site/blog/2026-02-01-ctx-v0.2.0-the-archaeology-release/index.html @@ -131,9 +131,9 @@ - + -

ctx

-

Digging Through the Past to Build the Future

+

Digging through the Past to Build the Future

Jose Alekhinne / 2026-02-01

-

What if Your AI Could Remember Everything?

+

What If Your AI Could Remember Everything?

Not just the current session, but every session:

  • Every decision made,
  • @@ -1305,7 +1277,7 @@

    The Problem: Amnesia Isn't
  • "How did the embed.go split actually happen?"
-

Fate is Whimsical

+

Fate Is Whimsical

The irony was painful:

I built a tool to prevent AI amnesia, but I was suffering from human amnesia about what happened in AI sessions.

@@ -1360,7 +1332,7 @@

ctx recall: Browse Your PastSlugs are auto-generated from session IDs (memorable names instead of UUIDs). The goal (as the name implies) is recall, not archival accuracy.

-

2,121 lines of new code

+

2,121 Lines of New Code

The ctx recall feature was the largest single addition:

parser library, CLI commands, test suite, and slash command.

@@ -1448,7 +1420,7 @@

The Structure: Decision --application "Added to Makefile and CI config"

-

Structured entries are prompts to the AI

+

Structured Entries Are Prompts to the AI

When the AI reads a decision with full context, rationale, and consequences, it understands the why, not just the what.

@@ -1474,7 +1446,7 @@

The Index: Quick Reference Tables

The same structure serves two very different readers.

-

Reindex after manual edits

+

Reindex After Manual Edits

If you edit entries by hand, rebuild the index with:

ctx decisions reindex
 ctx learnings reindex
@@ -1506,7 +1478,7 @@ 

1. Raw Data Isn't Knowledge2. Enforcement > Documentation

-

The Prompt is a Guideline

+

The Prompt Is a Guideline

The code is more what you'd call 'guidelines' than actual rules.

-Hector Barbossa

@@ -1519,7 +1491,7 @@

4. Meta-Tools CompoundTools that analyze their own development tend to generalize well.

The journal system started as a way to understand ctx itself.

It immediately became useful for everything else.

-

v0.2.0 in The Numbers

+

v0.2.0 in the Numbers

This was a heavy release. The numbers reflect that:

diff --git a/site/blog/2026-02-01-refactoring-with-intent/index.html b/site/blog/2026-02-01-refactoring-with-intent/index.html index 5518e3a5b..7022e3d3b 100644 --- a/site/blog/2026-02-01-refactoring-with-intent/index.html +++ b/site/blog/2026-02-01-refactoring-with-intent/index.html @@ -131,9 +131,9 @@ - + -

Summaries first. Details: on demand.

-

Quality Over Quantity

+

Quality over Quantity

Here is the counterintuitive part: more context can make AI worse.

Extra tokens add noise, not clarity:

-

Judgment Suppression is Dangerous

+

Judgment Suppression Is Dangerous

The attack vector structurally identical to prompt injection.

It teaches the AI that its own judgment is wrong.

It weakens or disables safeguard mechanisms, and it is @@ -1409,7 +1381,7 @@

Conflict Pattern 5: Universal Tri

Universal triggers override the platform's relevance matching: The AI spends tokens on process overhead instead of the actual task.

-

ctx preserves relevance

+

ctx Preserves Relevance

This is exactly the failure mode ctx exists to mitigate:

Wasting attention budget on irrelevant process instead of task-specific state.

diff --git a/site/blog/2026-02-05-you-cant-import-expertise/index.html b/site/blog/2026-02-05-you-cant-import-expertise/index.html index 724d30bd3..a5adc7a9a 100644 --- a/site/blog/2026-02-05-you-cant-import-expertise/index.html +++ b/site/blog/2026-02-05-you-cant-import-expertise/index.html @@ -131,9 +131,9 @@ - + -
-

What Changes Between Versions

+

What Changes between Versions

ctx init generates two categories of files:

diff --git a/site/recipes/activating-context/index.html b/site/recipes/activating-context/index.html new file mode 100644 index 000000000..3e146ccc1 --- /dev/null +++ b/site/recipes/activating-context/index.html @@ -0,0 +1,2416 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Activating a Context Directory - ctx: do you remember? + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + + + + + + +
+ +
+ + + + +
+
+ + + +
+
+
+ + + + + + + +
+
+
+ + + + + + + +
+ + + + + + + + + + + +
+ + + + + + +

Activating a Context Directory

+ +

ctx

+

The Problem

+

You ran a ctx command and got:

+
Error: no context directory specified for this project
+
+

This means ctx doesn't know which .context/ directory to operate +on. It will not guess, and it will not walk up from your current +working directory looking for one; that behavior was removed +deliberately, because silent inference was the source of several +bugs (stray agent-created directories, cross-project bleed-through, +webhook-route misrouting, sub-agent fragmentation). Every ctx +command requires you to declare the target directory explicitly.

+

This page shows you the three ways to do that and when to use each.

+

TL;DR

+

If the project has already been initialized and you just need to +bind it for your shell:

+
eval "$(ctx activate)"
+
+

That's 95% of the time. Add it to .zshrc / .bashrc per project +with direnv, or run it once per terminal.

+

When You See the Error

+

The exact error message depends on how many .context/ directories +are visible from the current directory:

+

Zero Candidates

+
Error: no context directory specified for this project
+
+

Either you haven't initialized this project yet (run ctx init) +or you're in a directory that doesn't belong to a ctx-tracked +project. If you know the project lives elsewhere, use one of the +declaration methods below with its absolute path.

+

One Candidate

+
Error: no context directory specified; a likely candidate is at
+    /Users/you/repos/myproject/.context
+
+

ctx found a single .context/ on the way up from here but won't +bind to it automatically. Run eval "$(ctx activate)" and ctx +will emit the export for the candidate. Or set CTX_DIR by hand.

+

Multiple Candidates

+
Error: no context directory specified; multiple candidates visible:
+  /Users/you/repos/myproject/.context
+  /Users/you/repos/myproject/packages/web/.context
+
+

You're inside nested projects. Pick the one you mean:

+
ctx activate /Users/you/repos/myproject/.context
+# …copy and paste the `export` line it prints, or wrap in eval:
+eval "$(ctx activate /Users/you/repos/myproject/.context)"
+
+

Three Ways to Declare

+ +

ctx activate emits a shell-native export CTX_DIR=... line to +stdout. Wrap it in eval and the binding takes effect for the +current shell:

+
# Walk up from current dir and bind the single visible candidate:
+eval "$(ctx activate)"
+
+# Bind a specific path explicitly:
+eval "$(ctx activate /abs/path/to/.context)"
+
+# Clear the binding:
+eval "$(ctx deactivate)"
+
+

ctx activate validates paths strictly: the target must exist, be +a directory, and contain at least one canonical context file +(CONSTITUTION.md or TASKS.md). It refuses to emit for multiple +upward candidates; pick one explicitly in that case.

+

Under the hood, the emitted line is just:

+
export CTX_DIR='/abs/path/to/.context'
+
+

So you can copy it into your .zshrc / .bashrc if you want the +binding permanent for a given shell setup. Better: use +direnv with a per-project .envrc.

+

2. CTX_DIR Env Var

+

If you already know the path, export it directly:

+
export CTX_DIR=/abs/path/to/.context
+ctx status
+
+

CTX_DIR is the same variable ctx activate writes; activate +is just a convenience that figures out the path for you.

+

3. Inline One-Shot

+

For one-shot commands (CI jobs, scripts, debugging a specific +project without changing your shell state), prefix the binding +inline:

+
CTX_DIR=/abs/path/to/.context ctx status
+
+

This binds CTX_DIR for that invocation only.

+

CTX_DIR must be an absolute path with .context as its basename. +Relative paths and other names are rejected on first use; the +basename guard catches the common footgun +(export CTX_DIR=$(pwd)) before stray writes can leak to the +project root.

+

For CI and Scripts

+

Do not rely on shell activation in automated flows. Set CTX_DIR +explicitly at the top of the script:

+
#!/usr/bin/env bash
+set -euo pipefail
+
+export CTX_DIR="$GITHUB_WORKSPACE/.context"
+ctx status
+ctx drift
+
+

For Claude Code Users

+

The ctx plugin's hooks are generated with +CTX_DIR="$CLAUDE_PROJECT_DIR/.context" prefixed to each command, +so hook-driven ctx invocations resolve correctly without any +per-session setup. You only need to activate manually when running +ctx yourself in a terminal.

+

One Project, One .context/

+

The context directory is not a free-floating bag of files. It is +pinned to a project by contract: filepath.Dir(ContextDir()) is +the project root. That parent directory is what ctx sync, +ctx drift, and the memory-drift hook scan for code, secret files, +and MEMORY.md respectively.

+

The practical consequences:

+
    +
  • Don't share one .context/ across multiple projects. It holds + per-project journals, per-session state, and per-project secrets. + Pointing two codebases at the same directory corrupts all three.
  • +
  • If you want to share knowledge (CONSTITUTION, CONVENTIONS, + ARCHITECTURE) across projects, use ctx hub. It cherry-picks + entries at the right granularity and keeps the per-project bits + where they belong.
  • +
  • The CTX_DIR you activate is implicitly a project-root + declaration. Setting CTX_DIR=/weird/place/.context means + you're telling ctx the project root is /weird/place/. That's + your call to make; ctx does not police it.
  • +
+ +
~/WORKSPACE/my-to-do-list
+  ├── .git
+  ├── .context          ← owned by this project; do not share
+  ├── ideas
+  │   └── ...
+  ├── Makefile
+  ├── Makefile.ctx
+  └── specs
+      └── ...
+
+

.context/ sits at the project root, next to .git. ctx activate +binds to it; every ctx subsystem reads the project from its parent.

+

Why Not Walk Up Automatically?

+

Nested projects, submodules, rogue agent-created .context/ +directories, and sub-agent sessions all produced silent misrouting +under the old walk-up model. See the +explicit-context-dir spec +and the analysis doc +for the full reasoning.

+

The short version: ctx decided to stop guessing and require the +caller to declare. Every other decision flows from there.

+ + + + + + + + + + + + + + + + + + +
+
+ + + + + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + + + + + + + \ No newline at end of file diff --git a/site/recipes/architecture-deep-dive/index.html b/site/recipes/architecture-deep-dive/index.html index 188f618ac..354572179 100644 --- a/site/recipes/architecture-deep-dive/index.html +++ b/site/recipes/architecture-deep-dive/index.html @@ -131,9 +131,9 @@ - + -
@@ -1200,20 +1172,20 @@

Commands and Skills Used

The Workflow

-

Pass 1: Map what exists

+

Pass 1: Map What Exists

/ctx-architecture
 

Produces:

    -
  • ARCHITECTURE.md — succinct project map (< 4000 tokens), +
  • ARCHITECTURE.md: succinct project map (< 4000 tokens), loaded at every session start
  • -
  • DETAILED_DESIGN*.md — deep per-module reference with +
  • DETAILED_DESIGN*.md: deep per-module reference with exported API, data flow, danger zones, extension points
  • -
  • CHEAT-SHEETS.md — lifecycle flow diagrams
  • -
  • map-tracking.json — coverage state with confidence scores
  • +
  • CHEAT-SHEETS.md: lifecycle flow diagrams
  • +
  • map-tracking.json: coverage state with confidence scores

This pass forces deep code reading. No shortcuts, no code -intelligence tools — the agent reads every module it analyzes. +intelligence tools; the agent reads every module it analyzes. That forced reading is what makes the subsequent passes useful.

When to run: First time on a codebase, or after significant structural changes (new packages, moved files, changed @@ -1222,7 +1194,7 @@

Pass 1: Map what exists
/ctx-architecture principal
 
-

Pass 2: Enrich with code intelligence

+

Pass 2: Enrich with Code Intelligence

/ctx-architecture-enrich
 

Takes the Pass 1 artifacts as baseline and layers on verified, @@ -1233,14 +1205,14 @@

Pass 2: Enrich with code intellige
  • Domain clustering validation
  • Registration site discovery
  • -

    This pass does not replace reading — it quantifies what reading +

    This pass does not replace reading; it quantifies what reading found. If Pass 1 says "module X depends on module Y," Pass 2 says "module X has 47 callers in module Y, and changing function Z would affect 12 downstream consumers."

    When to run: After Pass 1, when you need quantified confidence for refactoring decisions or risk assessment.

    Requires: GitNexus MCP server connected.

    -

    Pass 3: Hunt for failure modes

    +

    Pass 3: Hunt for Failure Modes

    /ctx-architecture-failure-analysis
     

    The adversarial pass. Reads all prior artifacts, then @@ -1262,7 +1234,7 @@

    Pass 3: Hunt for failure modesWhat You GetTips

    • Run Pass 1 with focus areas if the codebase is large. - The skill asks what to go deep on — name the modules you're + The skill asks what to go deep on, so name the modules you're about to change.
    • You don't need all three passes every time. Pass 1 is the foundation. Pass 2 and 3 are for when you need @@ -1328,9 +1300,9 @@

      Tips&par

    See Also

    See also: Detecting and Fixing Context Drift -— keep architecture artifacts fresh between deep-dive sessions.

    +to keep architecture artifacts fresh between deep-dive sessions.

    See also: Detecting and Fixing Context Drift -— structural checks that complement architecture analysis.

    +for structural checks that complement architecture analysis.

    diff --git a/site/recipes/autonomous-loops/index.html b/site/recipes/autonomous-loops/index.html index 111bdf003..5ff23b67f 100644 --- a/site/recipes/autonomous-loops/index.html +++ b/site/recipes/autonomous-loops/index.html @@ -135,9 +135,9 @@ - + -