diff --git a/agents/hooks/session-start-memory.sh b/agents/hooks/session-start-memory.sh new file mode 100755 index 00000000..66273c13 --- /dev/null +++ b/agents/hooks/session-start-memory.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Hook: SessionStart — injects Argus KB user prefs, feedback, and an index +# listing into every Claude Code session via hookSpecificOutput.additionalContext. +# +# Reads from the Obsidian vault on disk (cheap), so it works even when the +# argus daemon is down. Fails silent on any error so a missing KB never +# blocks a session. +set -euo pipefail + +# Emit a valid empty SessionStart envelope without depending on jq, since +# this is the fail-soft path and jq may be missing. +emit_empty() { + printf '{"hookSpecificOutput":{"hookEventName":"SessionStart","additionalContext":""}}\n' + exit 0 +} + +# Drain stdin (Claude Code sends JSON we don't need to parse here) +cat >/dev/null + +command -v argus >/dev/null 2>&1 || emit_empty +command -v jq >/dev/null 2>&1 || emit_empty + +VAULT=$(argus kb status 2>/dev/null | awk -F': *' '/^Vault/ {print $2; exit}') +[ -n "${VAULT:-}" ] || emit_empty +[ -d "$VAULT" ] || emit_empty + +dump_folder() { + local folder="$1" + local label="$2" + local dir="$VAULT/$folder" + [ -d "$dir" ] || return 0 + + local files + files=$(find "$dir" -maxdepth 1 -name '*.md' -type f 2>/dev/null | sort) + [ -n "$files" ] || return 0 + + printf '\n## %s\n\n' "$label" + while IFS= read -r f; do + [ -n "$f" ] || continue + local rel + rel="${f#$VAULT/}" + printf '### %s\n\n' "$rel" + cat "$f" + printf '\n' + done <<< "$files" +} + +CONTEXT_FILE=$(mktemp) +trap 'rm -f "$CONTEXT_FILE"' EXIT + +{ + printf '# Argus KB — Auto-Loaded Memory\n\n' + printf 'Your preferences and corrections are loaded below. The full KB is searchable via `mcp__argus__kb_search` / `mcp__argus__kb_read` / `mcp__argus__kb_list`.\n' + + dump_folder "memory/user" "User Preferences (memory/user/)" + dump_folder "memory/feedback" "Corrections & Feedback (memory/feedback/)" + + printf '\n## KB Index (search for details)\n\n```\n' + # 200 paths fits within typical context budget even with long folder + # nesting; agents can still reach more via kb_list with a prefix. + argus kb list 2>/dev/null | head -200 + printf '```\n' +} > "$CONTEXT_FILE" + +# 50 KB cap protects the context budget. SessionStart fires every session, +# so a runaway dump (large vault, deep memory tree) would otherwise consume +# input tokens before any user prompt. +MAX_BYTES=51200 +if [ "$(wc -c < "$CONTEXT_FILE")" -gt "$MAX_BYTES" ]; then + # Truncate at the last newline before the byte cap so we never split a + # multi-byte UTF-8 character in half (Obsidian notes routinely contain + # em-dashes, smart quotes, emoji, CJK). + awk -v max="$MAX_BYTES" ' + { len += length($0) + 1; if (len > max) exit; print } + ' "$CONTEXT_FILE" > "${CONTEXT_FILE}.trim" + printf '\n\n[truncated — KB exceeds 50KB budget; use kb_search for the rest]\n' >> "${CONTEXT_FILE}.trim" + mv "${CONTEXT_FILE}.trim" "$CONTEXT_FILE" +fi + +jq -nc --rawfile ctx "$CONTEXT_FILE" \ + '{hookSpecificOutput:{hookEventName:"SessionStart",additionalContext:$ctx}}' diff --git a/agents/hooks/track-kb-change.sh b/agents/hooks/track-kb-change.sh new file mode 100755 index 00000000..bc8f22ee --- /dev/null +++ b/agents/hooks/track-kb-change.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +# Hook: PostToolUse — logs Argus kb_ingest writes to JSONL so /dream can +# triage incrementally instead of re-scanning the whole vault. +# Receives JSON on stdin from Claude Code with tool_name and tool_input.path. +# Requires: jq (system dep, brew install jq). +set -euo pipefail + +# Fail soft if jq is missing — never block a tool call. +command -v jq >/dev/null 2>&1 || exit 0 + +INPUT=$(cat) +TOOL=$(echo "$INPUT" | jq -r '.tool_name // empty') + +# Match both current (mcp__argus__) and legacy (mcp__argus-kb__) names. +case "$TOOL" in + mcp__argus__kb_ingest|mcp__argus-kb__kb_ingest) ;; + *) exit 0 ;; +esac + +KB_PATH=$(echo "$INPUT" | jq -r '.tool_input.path // empty') +[ -n "$KB_PATH" ] || exit 0 + +SESSION=$(echo "$INPUT" | jq -r '.session_id // empty') +CWD=$(echo "$INPUT" | jq -r '.cwd // empty') +TS=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + +mkdir -p ~/.dots/sys/kb-changes ~/.dots/sys/dream-runs +jq -nc --arg ts "$TS" --arg path "$KB_PATH" --arg session_id "$SESSION" --arg cwd "$CWD" \ + '{ts:$ts,path:$path,session_id:$session_id,cwd:$cwd}' \ + >> ~/.dots/sys/kb-changes/changes.jsonl diff --git a/agents/skills/dream/SKILL.md b/agents/skills/dream/SKILL.md index 0e9ce6e9..297c8b60 100644 --- a/agents/skills/dream/SKILL.md +++ b/agents/skills/dream/SKILL.md @@ -1,28 +1,46 @@ --- name: dream -description: Audit and fix knowledge base hygiene — missing frontmatter, oversized docs, naming violations, stale redirects. Use for KB maintenance, knowledge base cleanup, dream consolidation, or memory hygiene. -allowed-tools: mcp__argus-kb__kb_list, mcp__argus-kb__kb_read, mcp__argus-kb__kb_ingest +description: Audit and fix knowledge base hygiene — triage inbox captures, resolve conflicts, age out stale entries, fix frontmatter and links. Use for KB maintenance, knowledge base cleanup, dream consolidation, memory hygiene, or as a scheduled daily KB pass. +allowed-tools: mcp__argus__kb_list, mcp__argus__kb_read, mcp__argus__kb_ingest, mcp__argus__kb_delete, mcp__argus-kb__kb_list, mcp__argus-kb__kb_read, mcp__argus-kb__kb_ingest, mcp__argus-kb__kb_delete --- -# Dream — Knowledge Base Hygiene +# Dream — Knowledge Base Hygiene & Consolidation -Audit all documents in the argus-kb knowledge base against the documented schema, identify violations, auto-fix what is safe, and report what needs manual attention. +Audit the argus-kb knowledge base: triage new captures from `memory/inbox/`, resolve conflicting facts in favor of the most recent, archive entries that have aged out, and fix schema/link/naming violations. Designed to run unattended as a scheduled daily task. ## Arguments -- `$ARGUMENTS` - Optional: `--dry-run` to report violations without fixing, or a path prefix to scope the audit (e.g. `thanx/`) +- `$ARGUMENTS` — Optional flags and scoping: + - `--dry-run` — report all proposed actions without applying them + - `--auto` — skip interactive confirmation prompts and apply all safe fixes (designed for scheduled runs) + - A path prefix (no leading `--`) to scope the audit (e.g. `work/`) + +## Context + +- Argus KB available: !`command -v argus 2>/dev/null | head -1` +- Recent changes since last dream run: !`tail -100 ~/.dots/sys/kb-changes/changes.jsonl 2>/dev/null | head -100` +- Last dream run: !`ls -t ~/.dots/sys/dream-runs 2>/dev/null | head -1` +- Today's date: !`date +%Y-%m-%d` + +## MCP tool naming + +The Argus KB MCP server is registered as `argus` (current) or `argus-kb` (legacy). Use whichever tool name the harness exposes — try `mcp__argus__*` first, fall back to `mcp__argus-kb__*` if the first returns tool-not-found. ## Instructions -Run the four phases below in order. If `$ARGUMENTS` contains `--dry-run`, skip all fix steps and only report violations. +Run the seven phases below in order. -If `$ARGUMENTS` contains a path prefix (no leading `--`), pass it to kb_list as the prefix filter to scope the audit. +- If `$ARGUMENTS` contains `--dry-run`, replace every "apply" step with "report what would change." +- If `$ARGUMENTS` contains `--auto`, skip all interactive confirmation prompts; apply all safe fixes. At the end, write a summary to `memory/dream/-report.md` instead of printing it interactively. This mode is designed for scheduled runs (e.g. via Argus scheduled tasks). +- If `$ARGUMENTS` contains `--auto` AND the change log (`~/.dots/sys/kb-changes/changes.jsonl`) shows no writes since the timestamp of the last successful dream run (latest file under `~/.dots/sys/dream-runs/`), exit immediately with an empty report — saves work when the KB is quiet. +- If `$ARGUMENTS` contains a bare path prefix, pass it to `kb_list` as the prefix filter to scope the audit. The triage and decay phases still scan their respective folders (`memory/inbox/`, full vault) regardless. ### Phase 1: Orient -1. Call `kb_list` (with prefix filter if provided) to get all document paths -2. Group paths by top-level folder -3. Note the total document count for the summary +1. Call `kb_list` (with prefix filter if provided) to get all document paths. +2. Group paths by top-level folder. +3. Note the total document count for the summary. +4. Collect every filename (basename without `.md`) into a map for the duplicate-filename check used in Phase 2. ### Phase 2: Gather Signal @@ -70,9 +88,70 @@ Rules from the kb_ingest schema: Record each violation with: document path, rule violated, current value, and suggested fix. -### Phase 3: Consolidate (Auto-Fix) +### Phase 3: Triage Inbox + +The inbox holds raw captures from `/improve` (and other capture flows) that haven't been classified yet. Goal: route every inbox doc to its proper folder OR merge it into an existing entry. + +1. Filter the doc list from Phase 1 down to paths under `memory/inbox/`. +2. For each inbox doc: + - The full content is already in memory from Phase 2 — re-read via `kb_read` only if truncated. + - Run a `kb_search` using the doc's title + key entities to find existing related entries. + - Decide one of: + - **Merge** — content overlaps an existing doc. Append/integrate into that doc's body, preserve frontmatter, write back via `kb_ingest` with the existing path. Then delete the inbox source via `kb_delete`. + - **Re-file** — content is genuinely new. Determine the correct destination folder using the routing rules below, write via `kb_ingest` to the new path, then `kb_delete` the inbox copy. + - **Hold** — too ambiguous to classify (rare). Leave in inbox and flag in the report. + +**Routing rules** (apply in order, first match wins): +1. Frontmatter `tags` contain a clear domain tag matching an existing top-level folder (e.g. `homelab`, `tools`, `patterns`, `health`, `home`, `personal`, or any user-defined domain folder) → match that folder. +2. Tags include `user` / `preference` → `memory/user/.md`. +3. Tags include `feedback` / `correction` → `memory/feedback/.md`. +4. Tags include `project` or title references a project name → `memory/project/.md`. +5. Tags include `reference` / `lookup` → `memory/reference/.md`. +6. Otherwise: pick the topical folder whose existing docs best match (by tag overlap or kb_search neighborhood) — when in doubt, default to `memory/reference/`. + +When choosing a filename, follow the existing schema (kebab-case, 2-3 words, topic noun). Strip the date prefix from inbox filenames before re-filing. + +In `--auto` mode, apply the merge/re-file decisions without confirmation. In interactive mode, batch the proposals and confirm before applying. + +### Phase 4: Conflict Detection & Supersession + +Find docs that contradict each other and reconcile in favor of the most recently modified entry. Uses the link graph and clusters from Phase 2. + +1. Build clusters of related docs: + - Group by tag overlap (≥2 shared tags AND same top-level folder). + - Within each cluster, scan bodies for **contradicting facts** — same entity/topic with different values (e.g. one doc says "X uses Postgres", another says "X uses MySQL"; one lists a role as "engineer", another as "manager"). +2. For each conflict: + - Identify the **canonical** doc — the one most recently modified (use the `Modified` line in the doc body or YAML, fall back to `kb_list` modification timestamp). + - Identify the **superseded** doc(s). +3. Reconciliation strategy: + - If the conflict is a **fact update** (e.g. role changed, version changed, vendor switched): update the canonical doc to mention the prior value as historical context (`Previously: — superseded `), then mark the superseded doc with a `superseded_by: [[canonical-doc]]` field in frontmatter and add the `redirect` tag (existing dream rules already handle redirects). + - If the conflict is a **near-duplicate** (same topic, slightly different framing): merge content into the canonical doc, mark the other as redirect. + - If unsure whether two docs actually conflict (different scopes, complementary not contradictory): **do not merge** — flag in the report for manual review. + +In `--auto` mode, only auto-apply the fact-update and near-duplicate strategies when the contradiction is unambiguous (exact same key, different value). Flag everything else for the report. + +### Phase 5: Decay & Archive -Before applying any fixes, print a summary of all planned changes and ask the user for confirmation. If the user declines, treat the run as `--dry-run` for the remainder. +Age out entries that are stale and add little ongoing value. Uses the link graph from Phase 2 and the supersession data from Phase 4. + +1. For every doc not in `memory/archive/` and not tagged `redirect`: + - Compute age = today − Modified date. + - Look up incoming wikilinks from the Phase 2 link graph. + - Look up outgoing wikilinks from the Phase 2 link graph. +2. Decay decision tree (apply first match): + - Age > 365 days AND zero incoming wikilinks AND no entry in `~/.dots/sys/kb-changes/changes.jsonl` for this path in the last 90 days (i.e. not written to recently — read activity is not tracked) → **archive**. + - Age > 180 days AND superseded by a newer doc (from Phase 4) → **archive** (the supersession redirect remains as a pointer). + - Age > 180 days AND tagged with a project that has been closed/migrated (heuristic: project name appears in `memory/archive/` already) → **archive**. + - Otherwise → keep. +3. To archive: move the doc to `memory/archive/` via `kb_ingest` at the new path + `kb_delete` at the old path. Preserve all frontmatter and content; add an `archived: ` line to the body. +4. **Never delete outright** — archive only. Archive is recoverable; deletion is not. +5. If Phase 4 was skipped (e.g. due to a scoped audit), skip the supersession-based decay rule and apply only the link-graph and closed-project rules. + +In `--auto` mode, apply archive decisions automatically for entries that match decay rules. In interactive mode, confirm each batch. + +### Phase 6: Consolidate (Auto-Fix) + +Before applying any fixes, print a summary of all planned changes and ask the user for confirmation. If the user declines, treat the run as `--dry-run` for the remainder. In `--auto` mode, skip confirmation and proceed. For each approved violation, apply the fix if it is safe. Safe fixes: @@ -103,9 +182,11 @@ For each fix applied, call `kb_ingest` with the corrected document. Preserve all - Duplicate filenames (requires deciding which doc to rename) - Orphan notes (requires understanding the intended link structure) -### Phase 4: Report +### Phase 7: Report -Print a structured summary: +In interactive mode, print the summary directly. In `--auto` mode, write it to `memory/dream/-report.md` (via `kb_ingest`) and also append a one-line summary to `~/.dots/sys/dream-runs/.log` so the next run can find the timestamp of the previous run. + +Use this structure: ``` ## KB Hygiene Report @@ -157,6 +238,38 @@ Print a structured summary: If `--dry-run` was specified, label the report "KB Hygiene Report (Dry Run)" and note that no changes were made. +#### Extra report sections (Phases 1-3) + +Add these sections to the report — they cover triage, conflicts, and decay: + +``` +### Inbox Triage (Phase 3) +| Inbox Doc | Action | Destination | +|-----------|--------|------------| +| memory/inbox/ | merge / re-file / hold | | + +### Conflicts Resolved (Phase 4) +| Topic | Canonical | Superseded | Strategy | +|-------|-----------|------------|----------| +| | path | path | fact-update / dedupe / flagged | + +### Aged Out (Phase 5) +| Doc | Age (days) | Reason | Action | +|-----|-----------|--------|--------| +| path | N | no incoming links / superseded / closed project | archived | +``` + +## Scheduling + +`/dream --auto` is designed to run unattended on a schedule. Use Argus scheduled tasks to run it daily — for example, set the daemon to invoke `/dream --auto` at a low-activity hour. The `--auto` flag: + +- Skips all interactive confirmations +- Applies safe fixes (frontmatter, link conversion, tag normalization, inbox triage, unambiguous conflict resolution, aging-out under decay rules) +- Writes the report to `memory/dream/-report.md` instead of stdout +- Logs run completion to `~/.dots/sys/dream-runs/.log` + +The "skip if no writes since last run" guard is enforced by the Instructions preamble — see the bullet under `## Instructions`. + ### Obsidian Internal Link Reference The KB is an Obsidian vault. All cross-references between docs MUST use Obsidian internal links (wikilinks). When auditing or fixing docs, apply these rules: diff --git a/agents/skills/improve/SKILL.md b/agents/skills/improve/SKILL.md index 0d00eda2..a47b5a94 100644 --- a/agents/skills/improve/SKILL.md +++ b/agents/skills/improve/SKILL.md @@ -1,6 +1,7 @@ --- name: improve description: Improve skills, capture context & knowledge. Use for skill iteration, capturing learnings, or upgrading agent context. +allowed-tools: mcp__argus__kb_list, mcp__argus__kb_read, mcp__argus__kb_search, mcp__argus__kb_ingest, mcp__argus-kb__kb_list, mcp__argus-kb__kb_read, mcp__argus-kb__kb_search, mcp__argus-kb__kb_ingest --- # Improve Skills, Capture Context & Knowledge @@ -25,12 +26,23 @@ Run `/improve` at the end of any session where: - Knowledge base index: !`cat context/knowledge/index.md 2>/dev/null | head -30` - Context directory structure: !`find context -type f 2>/dev/null | head -20` - Voice profile: !`find . -maxdepth 3 -name 'voice-profile.md' -o -name 'VOICE.md' -o -name 'voice.md' 2>/dev/null | head -5` +- Argus KB available: !`command -v argus 2>/dev/null | head -1` +- Argus KB index: !`argus kb list 2>/dev/null | head -200` +- Argus KB recent changes: !`tail -30 ~/.dots/sys/kb-changes/changes.jsonl 2>/dev/null | head -30` ## Instructions When `/improve` is invoked: -### Step 0: Ensure Context Directory Exists +### Step 0a: Load Argus KB Context (if available) + +If the dynamic context above shows `argus` is on `PATH` and the KB index is non-empty, the Argus KB is the **primary durable store**. Load it before doing anything else. + +See `references/argus-kb.md` for the full load procedure (which docs to read, MCP tool name fallbacks, search heuristics). Summary: always-load `memory/user/` + `memory/feedback/` (or rely on the SessionStart hook), then `kb_search` on session-relevant keywords and read top matches. + +If `argus` is NOT available, skip this step and continue with project-local `context/` only. + +### Step 0b: Ensure Context Directory Exists Check whether the current repo has a `context/` directory at the repo root. @@ -252,6 +264,10 @@ For each agent guidance update: ### Step 8: Capture Context & Knowledge +**Part 0: Argus KB Capture (inbox-first)** + +If `argus` is available (see dynamic context), the Argus KB is the **primary** durable store. Capture there before falling back to project-local `context/`. Read `references/argus-kb.md` for the detailed capture procedure (search-first, frontmatter schema, routing rules). Summary: search the KB for an existing entry; if found, merge into it at the same path; otherwise write a raw capture to `memory/inbox/-.md` and let `/dream` triage it. + **Part A: Operational Context** Review the session for extractable operational context: @@ -261,17 +277,17 @@ Review the session for extractable operational context: - Policies and requirements (compliance, partnerships) - Decisions made and their rationale -Update existing context files in `context/` directory as appropriate (requires `context/` to exist from Step 0): +Update existing context files in `context/` directory as appropriate (requires `context/` to exist from Step 0b): - Create new files as needed for distinct topics (e.g., `context/research/`, `context/plans/`) - Update CLAUDE.md if the context applies broadly across tasks -- **Never** write to local `memory/` directories or `memory/memory.md` — all local context goes in `context/`. (argus-kb `memory/` paths are separate; see global CLAUDE.md Memory section.) +- **Never** write to local *filesystem* `memory/` directories or `memory/memory.md` — local project context goes in `context/`. The Argus KB's `memory/` namespace (Part 0 above) is separate and handled via `mcp__argus__kb_ingest`, not filesystem writes. - **Worktree safety:** Resolve all file paths against `git rev-parse --show-toplevel` (shown in the Context section as "Current repo"). Never hardcode absolute paths like `~/Development/repo/`. In a worktree session, the worktree root is the correct write target — hardcoded paths write to a different branch. **Part B: Knowledge Graph** Check whether the current project has a knowledge base by looking for `context/knowledge/index.md` (shown in the Context section above). -**If no knowledge base exists** (and the user declined to create one in Step 0), **skip Part B entirely.** Do not fall back to auto memory or any alternative. Just move on. +**If no knowledge base exists** (and the user declined to create one in Step 0b), **skip Part B entirely.** Do not fall back to auto memory or any alternative. Just move on. **If a knowledge base exists**, review the session for durable knowledge worth preserving: - Architectural decisions or constraints discovered during this session @@ -372,8 +388,8 @@ Do not overwrite existing voice profile entries — add to or refine them. If a - Do not bloat skills with edge cases that will not recur - Do not change the fundamental purpose or structure of a skill - Do not add improvements based on speculation — only from actual session experience -- Do not create a knowledge base outside the `context/` directory pattern — use Step 0 to initialize -- **Never** save context or knowledge to local `memory/` directories or `memory/memory.md` — always use `context/` directory. (argus-kb `memory/` paths are separate; see global CLAUDE.md Memory section.) +- Do not create a project-local knowledge base outside the `context/` directory pattern — use Step 0b to initialize +- **Never** save knowledge to filesystem `memory/` directories or `memory/memory.md`. Argus KB captures (Step 8 Part 0) go through `mcp__argus__kb_ingest` to the KB's `memory/` namespace; project-local context goes in `context/` ## Example Output diff --git a/agents/skills/improve/references/argus-kb.md b/agents/skills/improve/references/argus-kb.md new file mode 100644 index 00000000..ce79d90d --- /dev/null +++ b/agents/skills/improve/references/argus-kb.md @@ -0,0 +1,46 @@ +# Argus KB Capture Reference + +Detailed instructions for `/improve` Step 0a (load) and Step 8 Part 0 (capture). The main SKILL.md keeps a brief pointer; everything in this file is loaded on demand when Argus KB is available. + +## MCP tool naming + +The Argus KB MCP server is registered as `argus` (current) or `argus-kb` (legacy). Try `mcp__argus__*` first; fall back to `mcp__argus-kb__*` if the harness exposes the legacy name. + +## Step 0a: Loading KB Context + +When `argus` is on `PATH` and the KB index is non-empty: + +1. **Always-on docs** (small, high-signal): the SessionStart hook already injects `memory/user/` and `memory/feedback/` into context. If those sections are missing from the system context (older session, hook disabled), call `kb_read` for every path under `memory/user/` and `memory/feedback/` listed in the KB index. +2. **Session-relevant docs**: derive 1-3 search queries from the current session topic (e.g. project name, repo name, key entities discussed) and call `kb_search` to find related entries. Read top matches with `kb_read`. +3. **Recently-changed docs** (from the dynamic context "recent changes" log): if any of those paths are relevant, read them too. + +Use the KB content to decide whether something captured later is *new* knowledge or a *conflict/update* of an existing entry. + +## Step 8 Part 0: Capturing Knowledge (inbox-first) + +For each piece of durable knowledge worth preserving (people, decisions, conventions, debugging insights, non-obvious tool behavior, project context, user prefs, corrections): + +1. **Search first.** Call `kb_search` with relevant keywords to find existing entries. If a match exists, update it via `kb_ingest` at the same path with merged content. Same-path overwrites are fine; near-duplicates at different paths get reconciled later by `/dream`. + +2. **No match? Write to inbox.** New captures go into `memory/inbox/-.md`. The inbox is intentionally raw — don't agonize over the perfect destination. `/dream` will triage and re-file each entry into the right folder. + +3. **Frontmatter** (required by Argus KB schema): + ```yaml + --- + title: "" + tags: [] + --- + ``` + Add a `source: improve-` tag and a `captured: ` line in the body so `/dream` can reason about provenance and recency. + +4. **Body**: lead with the key insight, then supporting detail. 50-500 words. Use Obsidian wikilinks `[[topic]]` to cross-reference existing entries. + +## Routing Rules + +Only used when you're confident — otherwise default to inbox: + +- User stated a personal preference / "I prefer..." → `memory/user/.md` +- User corrected your behavior / "don't do X" → `memory/feedback/.md` +- Project convention or architecture detail → `memory/project/-.md` +- Reusable reference data (lookup tables, env IDs) → `memory/reference/.md` +- Topical knowledge already covered by an existing top-level folder in the KB (check `kb_list` output for the active folder set) → that folder diff --git a/cli/commands/install/agents.go b/cli/commands/install/agents.go index 462d81c1..56b53958 100644 --- a/cli/commands/install/agents.go +++ b/cli/commands/install/agents.go @@ -35,6 +35,12 @@ func Agents() { // Register skill usage tracking hook registerSkillTrackingHook() + // Register Argus KB memory injection on SessionStart + registerSessionStartMemoryHook() + + // Register Argus KB write logging on PostToolUse + registerKBChangeTrackingHook() + // Register status line registerStatusLine() } @@ -126,6 +132,102 @@ func registerSkillTrackingHook() { } } +// registerSessionStartMemoryHook adds a SessionStart hook that injects Argus +// KB user prefs and feedback into every Claude Code session via +// hookSpecificOutput.additionalContext. +func registerSessionStartMemoryHook() { + changed := mutateSettings(func(settings map[string]any) bool { + hookCmd := "bash \"" + path.FromDots("agents/hooks/session-start-memory.sh") + "\"" + hookEntry := map[string]any{ + "hooks": []any{ + map[string]any{ + "type": "command", + "command": hookCmd, + }, + }, + } + + hooks, _ := settings["hooks"].(map[string]any) + if hooks == nil { + hooks = make(map[string]any) + } + + sessionStart, _ := hooks["SessionStart"].([]any) + + // SessionStart entries have no `matcher` field (per Claude Code spec), + // so we can't dedupe by matcher like the PreToolUse/PostToolUse hooks + // do. Walk into each entry's inner `hooks` array and compare command + // strings instead. + for _, existing := range sessionStart { + entry, ok := existing.(map[string]any) + if !ok { + continue + } + inner, ok := entry["hooks"].([]any) + if !ok { + continue + } + for _, h := range inner { + cmd, _ := h.(map[string]any) + if cmd != nil && cmd["command"] == hookCmd { + return false // already registered + } + } + } + + sessionStart = append(sessionStart, hookEntry) + hooks["SessionStart"] = sessionStart + settings["hooks"] = hooks + return true + }) + + if changed { + log.Success("Registered Argus KB memory injection hook (SessionStart)") + } +} + +// registerKBChangeTrackingHook adds a PostToolUse hook that appends every +// kb_ingest call to a JSONL change log so /dream can triage incrementally. +func registerKBChangeTrackingHook() { + changed := mutateSettings(func(settings map[string]any) bool { + hookCmd := "bash \"" + path.FromDots("agents/hooks/track-kb-change.sh") + "\"" + // Match both legacy and current Argus MCP server names. + hookEntry := map[string]any{ + "matcher": "mcp__argus.*__kb_ingest", + "hooks": []any{ + map[string]any{ + "type": "command", + "command": hookCmd, + }, + }, + } + + hooks, _ := settings["hooks"].(map[string]any) + if hooks == nil { + hooks = make(map[string]any) + } + + postToolUse, _ := hooks["PostToolUse"].([]any) + + for _, existing := range postToolUse { + if entry, ok := existing.(map[string]any); ok { + if entry["matcher"] == "mcp__argus.*__kb_ingest" { + return false // already registered + } + } + } + + postToolUse = append(postToolUse, hookEntry) + hooks["PostToolUse"] = postToolUse + settings["hooks"] = hooks + return true + }) + + if changed { + log.Success("Registered Argus KB change tracking hook (PostToolUse)") + } +} + // registerStatusLine configures the Claude Code status line to show context // window usage and compaction proximity via ~/.claude/settings.json. func registerStatusLine() {