From 1d05145e8f3cbcffcba6b152394b005e417eb6c3 Mon Sep 17 00:00:00 2001 From: Murat Parlakisik Date: Fri, 3 Apr 2026 07:47:36 -0700 Subject: [PATCH] feat: Add hooks, steering, and skills system with multi-tool integration Add lifecycle hooks, steering files, and skills packages with full convention alignment to main's audit and compliance requirements. Key additions: - Hook discovery, runner, and security validation (internal/hook/) - Steering file parser, sync engine, and filter (internal/steering/) - Skill install, load, and remove (internal/skill/) - CLI commands: ctx hook, ctx steering, ctx skill - Setup support for Cursor, Kiro, and Cline - MCP steering search tool - Drift checks for steering sync staleness Convention alignment: - All fmt.Errorf moved to internal/err/ helpers - All cmd.Println/Printf moved to internal/write/ helpers - All raw file I/O replaced with internal/io/Safe* wrappers - All bare err := renamed to descriptive names - All flag bindings use flagbind.* helpers - exec.CommandContext moved to internal/exec/hook/ - Magic strings/numbers replaced with config constants - Line length, doc comments, mixed visibility fixes Signed-off-by: Murat Parlakisik --- .kiro/specs/hooks-and-steering/.config.kiro | 1 + .kiro/specs/hooks-and-steering/design.md | 709 ++++ .../specs/hooks-and-steering/requirements.md | 274 ++ .kiro/specs/hooks-and-steering/tasks.md | 328 ++ docs/cli/index.md | 20 + docs/cli/mcp.md | 38 + docs/cli/tools.md | 217 + docs/home/configuration.md | 20 + docs/home/context-files.md | 3 + docs/home/getting-started.md | 30 + .../2026-03-31-commit-context-tracing.md | 3508 +++++++++++++++++ internal/assets/commands/commands.yaml | 170 + internal/assets/commands/flags.yaml | 10 + internal/assets/commands/text/errors.yaml | 114 + internal/assets/commands/text/hooks.yaml | 19 - internal/assets/commands/text/mcp.yaml | 17 + internal/assets/commands/text/ui.yaml | 10 + internal/audit/cross_package_types_test.go | 12 +- internal/audit/dead_exports_test.go | 25 + internal/bootstrap/bootstrap_test.go | 95 + internal/bootstrap/cmd.go | 5 + internal/bootstrap/group.go | 6 + internal/cli/agent/cmd/root/cmd.go | 36 +- internal/cli/agent/cmd/root/run.go | 14 +- internal/cli/agent/core/budget/assemble.go | 33 +- internal/cli/agent/core/budget/budget_test.go | 129 + internal/cli/agent/core/budget/doc.go | 4 +- internal/cli/agent/core/budget/out.go | 25 +- internal/cli/agent/core/budget/render.go | 14 + internal/cli/agent/core/budget/types.go | 6 + internal/cli/agent/core/steering/doc.go | 12 + internal/cli/agent/core/steering/steering.go | 61 + internal/cli/initialize/cmd/root/run.go | 10 + internal/cli/initialize/init_test.go | 81 + internal/cli/resolve/doc.go | 13 + internal/cli/resolve/tool.go | 45 + internal/cli/setup/cmd/root/run.go | 20 +- internal/cli/setup/core/cline/cline.go | 38 + internal/cli/setup/core/cline/deploy.go | 98 + internal/cli/setup/core/cline/doc.go | 12 + internal/cli/setup/core/cline/types.go | 18 + internal/cli/setup/core/cursor/cursor.go | 42 + internal/cli/setup/core/cursor/deploy.go | 95 + internal/cli/setup/core/cursor/doc.go | 12 + internal/cli/setup/core/cursor/types.go | 18 + internal/cli/setup/core/kiro/deploy.go | 111 + internal/cli/setup/core/kiro/doc.go | 12 + internal/cli/setup/core/kiro/kiro.go | 58 + internal/cli/setup/core/kiro/types.go | 20 + internal/cli/skill/cmd/install/cmd.go | 55 + internal/cli/skill/cmd/install/doc.go | 12 + internal/cli/skill/cmd/list/cmd.go | 66 + internal/cli/skill/cmd/list/doc.go | 12 + internal/cli/skill/cmd/remove/cmd.go | 54 + internal/cli/skill/cmd/remove/doc.go | 12 + internal/cli/skill/doc.go | 13 + internal/cli/skill/skill.go | 29 + internal/cli/steering/cmd/add/cmd.go | 93 + internal/cli/steering/cmd/add/doc.go | 12 + internal/cli/steering/cmd/initcmd/cmd.go | 106 + internal/cli/steering/cmd/initcmd/doc.go | 12 + internal/cli/steering/cmd/list/cmd.go | 74 + internal/cli/steering/cmd/list/doc.go | 12 + internal/cli/steering/cmd/preview/cmd.go | 80 + internal/cli/steering/cmd/preview/doc.go | 12 + internal/cli/steering/cmd/synccmd/cmd.go | 87 + internal/cli/steering/cmd/synccmd/doc.go | 12 + internal/cli/steering/core/sync/doc.go | 12 + internal/cli/steering/core/sync/report.go | 36 + internal/cli/steering/doc.go | 13 + internal/cli/steering/steering.go | 33 + .../cli/system/cmd/message/cmd/edit/run.go | 10 +- .../cli/system/cmd/message/cmd/reset/run.go | 6 +- .../cli/system/cmd/message/cmd/show/run.go | 6 +- internal/cli/trigger/cmd/add/cmd.go | 120 + internal/cli/trigger/cmd/add/doc.go | 12 + internal/cli/trigger/cmd/disable/cmd.go | 70 + internal/cli/trigger/cmd/disable/doc.go | 12 + internal/cli/trigger/cmd/enable/cmd.go | 70 + internal/cli/trigger/cmd/enable/doc.go | 12 + internal/cli/trigger/cmd/list/cmd.go | 83 + internal/cli/trigger/cmd/list/doc.go | 12 + internal/cli/trigger/cmd/test/cmd.go | 146 + internal/cli/trigger/cmd/test/doc.go | 12 + internal/cli/trigger/doc.go | 12 + internal/cli/trigger/trigger.go | 33 + internal/compat/compat_test.go | 195 + internal/compat/doc.go | 14 + internal/compat/testmain_test.go | 19 + internal/config/dir/dir.go | 6 + internal/config/embed/cmd/skill.go | 23 + internal/config/embed/cmd/steering.go | 27 + internal/config/embed/cmd/trigger.go | 27 + internal/config/embed/flag/agent.go | 1 + internal/config/embed/flag/flag.go | 4 + internal/config/embed/text/agent.go | 3 + internal/config/embed/text/drift.go | 3 + internal/config/embed/text/err_cli.go | 12 + internal/config/embed/text/err_hook.go | 15 + .../config/embed/text/err_lifecycle_hook.go | 14 + internal/config/embed/text/err_setup.go | 15 + internal/config/embed/text/err_skill.go | 18 +- internal/config/embed/text/err_steering.go | 31 + internal/config/embed/text/hook.go | 1 - internal/config/embed/text/mcp_err.go | 1 + internal/config/embed/text/mcp_tool.go | 7 + internal/config/flag/flag.go | 2 + internal/config/fs/perm.go | 7 + internal/config/hook/hook.go | 2 + internal/config/mcp/field/field.go | 4 + internal/config/mcp/tool/tool.go | 8 + internal/config/sysinfo/proc.go | 8 +- internal/drift/check_ext.go | 183 + internal/drift/check_ext_test.go | 427 ++ internal/drift/detector.go | 12 + internal/drift/types.go | 18 + internal/entity/trigger.go | 49 + internal/err/cli/cli.go | 12 + internal/err/hook/hook.go | 119 - internal/err/mcp/doc.go | 3 +- internal/err/mcp/mcp.go | 26 + internal/err/{hook => setup}/doc.go | 8 +- internal/err/setup/setup.go | 68 + internal/err/skill/doc.go | 8 +- internal/err/skill/skill.go | 188 + internal/err/steering/doc.go | 17 + internal/err/steering/steering.go | 280 ++ internal/err/trigger/doc.go | 17 + internal/err/trigger/trigger.go | 364 ++ internal/exec/sysinfo/sysinfo_darwin.go | 12 +- internal/exec/trigger/doc.go | 13 + internal/exec/trigger/trigger.go | 28 + internal/mcp/handler/session_hooks.go | 120 + internal/mcp/handler/steering.go | 119 + internal/mcp/server/def/tool/tool.go | 377 +- internal/mcp/server/route/tool/dispatch.go | 10 +- internal/mcp/server/route/tool/steering.go | 80 + internal/mcp/server/server_test.go | 198 +- internal/rc/default.go | 10 + internal/rc/rc.go | 72 + internal/rc/rc_test.go | 166 + internal/rc/types.go | 38 + internal/skill/copy.go | 70 + internal/skill/doc.go | 12 + internal/skill/install.go | 53 + internal/skill/install_test.go | 154 + internal/skill/load.go | 70 + internal/skill/load_test.go | 187 + internal/skill/manifest.go | 65 + internal/skill/remove.go | 39 + internal/skill/remove_test.go | 54 + internal/skill/testmain_test.go | 19 + internal/skill/types.go | 22 + internal/steering/doc.go | 12 + internal/steering/filter.go | 87 + internal/steering/filter_test.go | 152 + internal/steering/format.go | 188 + internal/steering/frontmatter.go | 58 + internal/steering/match.go | 46 + internal/steering/parse.go | 86 + internal/steering/parse_prop_test.go | 196 + internal/steering/parse_test.go | 231 ++ internal/steering/sync.go | 162 + internal/steering/sync_prop_test.go | 138 + internal/steering/sync_test.go | 332 ++ internal/steering/testmain_test.go | 19 + internal/steering/types.go | 108 + internal/sysinfo/load_darwin.go | 11 +- internal/sysinfo/memory_darwin.go | 66 +- internal/trigger/discover.go | 148 + internal/trigger/discover_test.go | 235 ++ internal/trigger/doc.go | 13 + internal/trigger/exec.go | 54 + internal/trigger/helpers.go | 18 + internal/trigger/runner.go | 99 + internal/trigger/runner_test.go | 319 ++ internal/trigger/security.go | 73 + internal/trigger/security_test.go | 146 + internal/trigger/testmain_test.go | 19 + internal/trigger/types.go | 86 + internal/write/setup/hook.go | 131 + internal/write/skill/doc.go | 12 + internal/write/skill/skill.go | 71 + internal/write/steering/doc.go | 15 + internal/write/steering/steering.go | 164 + internal/write/trigger/doc.go | 14 + internal/write/trigger/trigger.go | 164 + specs/commit-context-tracing.md | 377 ++ specs/hub_implementation.md | 134 + specs/shared-context-hub.md | 367 ++ specs/task-allocation.md | 202 + 191 files changed, 16778 insertions(+), 360 deletions(-) create mode 100644 .kiro/specs/hooks-and-steering/.config.kiro create mode 100644 .kiro/specs/hooks-and-steering/design.md create mode 100644 .kiro/specs/hooks-and-steering/requirements.md create mode 100644 .kiro/specs/hooks-and-steering/tasks.md create mode 100644 docs/superpowers/plans/2026-03-31-commit-context-tracing.md create mode 100644 internal/cli/agent/core/steering/doc.go create mode 100644 internal/cli/agent/core/steering/steering.go create mode 100644 internal/cli/resolve/doc.go create mode 100644 internal/cli/resolve/tool.go create mode 100644 internal/cli/setup/core/cline/cline.go create mode 100644 internal/cli/setup/core/cline/deploy.go create mode 100644 internal/cli/setup/core/cline/doc.go create mode 100644 internal/cli/setup/core/cline/types.go create mode 100644 internal/cli/setup/core/cursor/cursor.go create mode 100644 internal/cli/setup/core/cursor/deploy.go create mode 100644 internal/cli/setup/core/cursor/doc.go create mode 100644 internal/cli/setup/core/cursor/types.go create mode 100644 internal/cli/setup/core/kiro/deploy.go create mode 100644 internal/cli/setup/core/kiro/doc.go create mode 100644 internal/cli/setup/core/kiro/kiro.go create mode 100644 internal/cli/setup/core/kiro/types.go create mode 100644 internal/cli/skill/cmd/install/cmd.go create mode 100644 internal/cli/skill/cmd/install/doc.go create mode 100644 internal/cli/skill/cmd/list/cmd.go create mode 100644 internal/cli/skill/cmd/list/doc.go create mode 100644 internal/cli/skill/cmd/remove/cmd.go create mode 100644 internal/cli/skill/cmd/remove/doc.go create mode 100644 internal/cli/skill/doc.go create mode 100644 internal/cli/skill/skill.go create mode 100644 internal/cli/steering/cmd/add/cmd.go create mode 100644 internal/cli/steering/cmd/add/doc.go create mode 100644 internal/cli/steering/cmd/initcmd/cmd.go create mode 100644 internal/cli/steering/cmd/initcmd/doc.go create mode 100644 internal/cli/steering/cmd/list/cmd.go create mode 100644 internal/cli/steering/cmd/list/doc.go create mode 100644 internal/cli/steering/cmd/preview/cmd.go create mode 100644 internal/cli/steering/cmd/preview/doc.go create mode 100644 internal/cli/steering/cmd/synccmd/cmd.go create mode 100644 internal/cli/steering/cmd/synccmd/doc.go create mode 100644 internal/cli/steering/core/sync/doc.go create mode 100644 internal/cli/steering/core/sync/report.go create mode 100644 internal/cli/steering/doc.go create mode 100644 internal/cli/steering/steering.go create mode 100644 internal/cli/trigger/cmd/add/cmd.go create mode 100644 internal/cli/trigger/cmd/add/doc.go create mode 100644 internal/cli/trigger/cmd/disable/cmd.go create mode 100644 internal/cli/trigger/cmd/disable/doc.go create mode 100644 internal/cli/trigger/cmd/enable/cmd.go create mode 100644 internal/cli/trigger/cmd/enable/doc.go create mode 100644 internal/cli/trigger/cmd/list/cmd.go create mode 100644 internal/cli/trigger/cmd/list/doc.go create mode 100644 internal/cli/trigger/cmd/test/cmd.go create mode 100644 internal/cli/trigger/cmd/test/doc.go create mode 100644 internal/cli/trigger/doc.go create mode 100644 internal/cli/trigger/trigger.go create mode 100644 internal/compat/compat_test.go create mode 100644 internal/compat/doc.go create mode 100644 internal/compat/testmain_test.go create mode 100644 internal/config/embed/cmd/skill.go create mode 100644 internal/config/embed/cmd/steering.go create mode 100644 internal/config/embed/cmd/trigger.go create mode 100644 internal/config/embed/text/err_cli.go create mode 100644 internal/config/embed/text/err_lifecycle_hook.go create mode 100644 internal/config/embed/text/err_setup.go create mode 100644 internal/config/embed/text/err_steering.go create mode 100644 internal/drift/check_ext.go create mode 100644 internal/drift/check_ext_test.go create mode 100644 internal/entity/trigger.go delete mode 100644 internal/err/hook/hook.go rename internal/err/{hook => setup}/doc.go (61%) create mode 100644 internal/err/setup/setup.go create mode 100644 internal/err/steering/doc.go create mode 100644 internal/err/steering/steering.go create mode 100644 internal/err/trigger/doc.go create mode 100644 internal/err/trigger/trigger.go create mode 100644 internal/exec/trigger/doc.go create mode 100644 internal/exec/trigger/trigger.go create mode 100644 internal/mcp/handler/session_hooks.go create mode 100644 internal/mcp/handler/steering.go create mode 100644 internal/mcp/server/route/tool/steering.go create mode 100644 internal/skill/copy.go create mode 100644 internal/skill/doc.go create mode 100644 internal/skill/install.go create mode 100644 internal/skill/install_test.go create mode 100644 internal/skill/load.go create mode 100644 internal/skill/load_test.go create mode 100644 internal/skill/manifest.go create mode 100644 internal/skill/remove.go create mode 100644 internal/skill/remove_test.go create mode 100644 internal/skill/testmain_test.go create mode 100644 internal/skill/types.go create mode 100644 internal/steering/doc.go create mode 100644 internal/steering/filter.go create mode 100644 internal/steering/filter_test.go create mode 100644 internal/steering/format.go create mode 100644 internal/steering/frontmatter.go create mode 100644 internal/steering/match.go create mode 100644 internal/steering/parse.go create mode 100644 internal/steering/parse_prop_test.go create mode 100644 internal/steering/parse_test.go create mode 100644 internal/steering/sync.go create mode 100644 internal/steering/sync_prop_test.go create mode 100644 internal/steering/sync_test.go create mode 100644 internal/steering/testmain_test.go create mode 100644 internal/steering/types.go create mode 100644 internal/trigger/discover.go create mode 100644 internal/trigger/discover_test.go create mode 100644 internal/trigger/doc.go create mode 100644 internal/trigger/exec.go create mode 100644 internal/trigger/helpers.go create mode 100644 internal/trigger/runner.go create mode 100644 internal/trigger/runner_test.go create mode 100644 internal/trigger/security.go create mode 100644 internal/trigger/security_test.go create mode 100644 internal/trigger/testmain_test.go create mode 100644 internal/trigger/types.go create mode 100644 internal/write/skill/doc.go create mode 100644 internal/write/skill/skill.go create mode 100644 internal/write/steering/doc.go create mode 100644 internal/write/steering/steering.go create mode 100644 internal/write/trigger/doc.go create mode 100644 internal/write/trigger/trigger.go create mode 100644 specs/commit-context-tracing.md create mode 100644 specs/hub_implementation.md create mode 100644 specs/shared-context-hub.md create mode 100644 specs/task-allocation.md diff --git a/.kiro/specs/hooks-and-steering/.config.kiro b/.kiro/specs/hooks-and-steering/.config.kiro new file mode 100644 index 000000000..32572f2c3 --- /dev/null +++ b/.kiro/specs/hooks-and-steering/.config.kiro @@ -0,0 +1 @@ +{"specId": "2f530946-c219-4541-8a51-b532a017ae47", "workflowType": "requirements-first", "specType": "feature"} \ No newline at end of file diff --git a/.kiro/specs/hooks-and-steering/design.md b/.kiro/specs/hooks-and-steering/design.md new file mode 100644 index 000000000..0a31dc96b --- /dev/null +++ b/.kiro/specs/hooks-and-steering/design.md @@ -0,0 +1,709 @@ +# Design Document: Hooks & Steering + +## Overview + +This design extends `ctx` from a persistence-only AI-context layer into a behavioral guidance and lifecycle automation platform. The system adds four major subsystems to the existing codebase: + +1. **Steering Layer** — Markdown files with YAML frontmatter in `.context/steering/` that define persistent behavioral rules. These are injected into AI prompts via `ctx agent`, synced to tool-native formats (Cursor `.mdc`, Cline `.md`, Kiro `.md`), and exposed via MCP. + +2. **Hooks System** — Executable scripts in `.context/hooks//` that fire at lifecycle events (`pre-tool-use`, `post-tool-use`, `session-start`, `session-end`, `file-save`, `context-add`). Scripts receive JSON via stdin and return JSON via stdout, enabling blocking, context injection, and automation. + +3. **MCP Server Extensions** — New MCP tools (`ctx_steering_get`, `ctx_search`, `ctx_session_start`, `ctx_session_end`) that expose steering retrieval, context search, and session lifecycle to any MCP-compatible AI tool. + +4. **Skills System** — Reusable instruction bundles in `.context/skills//SKILL.md` that can be installed, listed, removed, and activated via `ctx agent --skill `. + +All subsystems are additive — existing workflows (`ctx agent`, `ctx drift`, `CLAUDE.md`/`AGENTS.md` generation) continue unchanged when the new directories don't exist. The active AI tool is set once via the `tool` field in `.ctxrc`; a `--tool` CLI flag overrides it per-command. Cross-tool configuration switching reuses the existing `ctx config switch` profile mechanism (`.ctxrc.kiro`, `.ctxrc.claude`, etc.). + +### Design Principles + +- **File-based**: All artifacts are plain files (markdown, YAML, shell scripts) — git-versionable, human-readable. +- **Tool-agnostic**: Works with Claude Code, Cursor, Cline, Kiro, Codex via the `tool` field. +- **Additive**: Each subsystem is independently useful; nothing breaks existing usage. +- **ctx is source of truth**: Other tools sync *from* ctx, not the reverse. +- **Zero lock-in**: Standard formats throughout — no proprietary encoding. +- **Consistent patterns**: New packages follow existing `internal/cli//{cmd,core}/` and `internal//` conventions. + +## Architecture + +### High-Level Component Diagram + +```mermaid +graph TB + subgraph CLI["CLI Layer (internal/cli/)"] + SteeringCLI["ctx steering
add | list | preview | init | sync"] + HookCLI["ctx hook
add | list | test | enable | disable"] + SkillCLI["ctx skill
install | list | remove"] + AgentCLI["ctx agent
(extended: --skill flag)"] + InitCLI["ctx init
(extended: new dirs)"] + DriftCLI["ctx drift
(extended: new checks)"] + ConfigCLI["ctx config switch
(existing, unchanged)"] + end + + subgraph Domain["Domain Layer (internal/)"] + SteeringPkg["internal/steering/
parse, filter, sync"] + HookPkg["internal/hook/
discover, run, validate"] + SkillPkg["internal/skill/
install, load, remove"] + RCPkg["internal/rc/
(extended: tool, steering, hooks)"] + DriftPkg["internal/drift/
(extended: new checks)"] + ValidatePkg["internal/validate/
(existing: boundary, symlink)"] + end + + subgraph MCP["MCP Layer (internal/mcp/)"] + MCPHandler["handler/
(extended: SteeringGet, Search,
SessionStart, SessionEnd)"] + MCPCatalog["server/catalog/
(extended: new tool defs)"] + MCPDispatch["server/dispatch/
(extended: new routes)"] + end + + subgraph FS["File System"] + SteeringDir[".context/steering/*.md"] + HooksDir[".context/hooks//*.sh"] + SkillsDir[".context/skills//SKILL.md"] + CtxRC[".ctxrc (tool, steering, hooks)"] + NativeFiles["Tool-native files
.cursor/rules/*.mdc
.clinerules/*.md
.kiro/steering/*.md"] + end + + SteeringCLI --> SteeringPkg + HookCLI --> HookPkg + SkillCLI --> SkillPkg + AgentCLI --> SteeringPkg + AgentCLI --> SkillPkg + DriftCLI --> DriftPkg + InitCLI --> FS + + SteeringPkg --> ValidatePkg + HookPkg --> ValidatePkg + SteeringPkg --> RCPkg + HookPkg --> RCPkg + + MCPHandler --> SteeringPkg + MCPHandler --> HookPkg + + SteeringPkg --> SteeringDir + SteeringPkg --> NativeFiles + HookPkg --> HooksDir + SkillPkg --> SkillsDir + RCPkg --> CtxRC + DriftPkg --> SteeringDir + DriftPkg --> HooksDir +``` + +### Package Layout + +New packages follow the existing convention of `internal/cli//` for CLI wiring and `internal//` for domain logic: + +``` +internal/ +├── steering/ # NEW — Steering domain logic +│ ├── doc.go +│ ├── parse.go # Frontmatter parser (parse/print round-trip) +│ ├── parse_test.go +│ ├── filter.go # Inclusion mode filtering +│ ├── filter_test.go +│ ├── sync.go # Tool-native format sync +│ ├── sync_test.go +│ └── types.go # SteeringFile, InclusionMode types +├── hook/ # NEW — Hook domain logic +│ ├── doc.go +│ ├── discover.go # Hook discovery and validation +│ ├── discover_test.go +│ ├── runner.go # Hook execution (stdin/stdout JSON) +│ ├── runner_test.go +│ ├── security.go # Symlink/boundary checks +│ ├── security_test.go +│ └── types.go # HookInput, HookOutput, HookType types +├── skill/ # NEW — Skill domain logic +│ ├── doc.go +│ ├── install.go +│ ├── load.go +│ ├── remove.go +│ └── types.go +├── cli/ +│ ├── steering/ # NEW — ctx steering CLI +│ │ ├── steering.go +│ │ ├── doc.go +│ │ └── cmd/ +│ │ ├── add.go +│ │ ├── list.go +│ │ ├── preview.go +│ │ ├── init.go +│ │ └── sync.go +│ ├── hook/ # NEW — ctx hook CLI +│ │ ├── hook.go +│ │ ├── doc.go +│ │ └── cmd/ +│ │ ├── add.go +│ │ ├── list.go +│ │ ├── test.go +│ │ ├── enable.go +│ │ └── disable.go +│ └── skill/ # NEW — ctx skill CLI +│ ├── skill.go +│ ├── doc.go +│ └── cmd/ +│ ├── install.go +│ ├── list.go +│ └── remove.go +└── ...existing packages unchanged... +``` + + +## Components and Interfaces + +### 1. Steering Package (`internal/steering/`) + +#### Types (`types.go`) + +```go +// InclusionMode determines when a steering file is injected. +type InclusionMode string + +const ( + InclusionAlways InclusionMode = "always" + InclusionAuto InclusionMode = "auto" + InclusionManual InclusionMode = "manual" +) + +// SteeringFile represents a parsed steering file. +type SteeringFile struct { + Name string // from frontmatter + Description string // from frontmatter + Inclusion InclusionMode // default: manual + Tools []string // default: all tools + Priority int // default: 50 + Body string // markdown content after frontmatter + Path string // filesystem path +} +``` + +#### Parser (`parse.go`) + +```go +// Parse reads a steering file from bytes, extracting YAML frontmatter +// and markdown body. Returns an error if frontmatter is invalid YAML. +func Parse(data []byte, filePath string) (*SteeringFile, error) + +// Print serializes a SteeringFile back to frontmatter + markdown bytes. +// Round-trip property: Parse(Print(Parse(data))) == Parse(data). +func Print(sf *SteeringFile) []byte +``` + +The parser uses `gopkg.in/yaml.v3` (already a dependency) for frontmatter. Frontmatter is delimited by `---` lines. Fields not present in frontmatter get defaults: `inclusion` → `"manual"`, `tools` → `nil` (meaning all tools), `priority` → `50`. + +#### Filter (`filter.go`) + +```go +// LoadAll reads all .md files from the steering directory and parses them. +func LoadAll(steeringDir string) ([]*SteeringFile, error) + +// Filter returns steering files applicable for the given context: +// - always: included unconditionally +// - auto: included when prompt matches description (substring match) +// - manual: included only when explicitly named +// Results are sorted by ascending priority, then alphabetically by name. +func Filter(files []*SteeringFile, prompt string, manualNames []string, tool string) []*SteeringFile +``` + +The `tool` parameter filters out steering files whose `Tools` list excludes the given tool. When `Tools` is empty/nil, the file applies to all tools. + +#### Sync (`sync.go`) + +```go +// SyncTool writes steering files to the tool-native format directory. +// Skips files whose tools list excludes the target tool. +// Skips files whose content hasn't changed (idempotent). +func SyncTool(steeringDir, projectRoot, tool string) (SyncReport, error) + +// SyncAll syncs to all supported tool formats. +func SyncAll(steeringDir, projectRoot string) (SyncReport, error) + +// SyncReport summarizes what was written, skipped, or errored. +type SyncReport struct { + Written []string + Skipped []string + Errors []error +} +``` + +Tool-native format mapping: +| Tool | Output Directory | Format | +|------|-----------------|--------| +| `cursor` | `.cursor/rules/.mdc` | Cursor MDC frontmatter + markdown | +| `cline` | `.clinerules/.md` | Plain markdown (no frontmatter) | +| `kiro` | `.kiro/steering/.md` | Kiro frontmatter + markdown | +| `claude` | N/A (uses `ctx agent` directly) | — | +| `codex` | N/A (uses `ctx agent` directly) | — | + +### 2. Hook Package (`internal/hook/`) + +#### Types (`types.go`) + +```go +// HookType represents a lifecycle event category. +type HookType string + +const ( + PreToolUse HookType = "pre-tool-use" + PostToolUse HookType = "post-tool-use" + SessionStart HookType = "session-start" + SessionEnd HookType = "session-end" + FileSave HookType = "file-save" + ContextAdd HookType = "context-add" +) + +// ValidHookTypes returns all valid hook type strings. +func ValidHookTypes() []HookType + +// HookInput is the JSON object sent to hook scripts via stdin. +type HookInput struct { + HookType string `json:"hookType"` + Tool string `json:"tool"` + Parameters map[string]any `json:"parameters"` + Session HookSession `json:"session"` + Timestamp string `json:"timestamp"` // ISO 8601 + CtxVersion string `json:"ctxVersion"` +} + +type HookSession struct { + ID string `json:"id"` + Model string `json:"model"` +} + +// HookOutput is the JSON object returned by hook scripts via stdout. +type HookOutput struct { + Cancel bool `json:"cancel"` + Context string `json:"context,omitempty"` + Message string `json:"message,omitempty"` +} + +// HookInfo describes a discovered hook script. +type HookInfo struct { + Name string + Type HookType + Path string + Enabled bool // true if executable bit is set +} +``` + +#### Discovery (`discover.go`) + +```go +// Discover finds all hook scripts in the hooks directory, grouped by type. +// Skips non-executable scripts (logs warning). Skips symlinks (security). +// Returns empty map if hooks directory doesn't exist. +func Discover(hooksDir string) (map[HookType][]HookInfo, error) + +// FindByName searches all hook type directories for a hook with the given name. +func FindByName(hooksDir, name string) (*HookInfo, error) +``` + +#### Runner (`runner.go`) + +```go +// RunAll executes all enabled hooks for the given type, in alphabetical order. +// Passes input as JSON via stdin, reads output as JSON from stdout. +// If a hook returns cancel:true, stops and returns the cancellation. +// If a hook exits non-zero or returns invalid JSON, logs warning and continues. +// Enforces configurable timeout (default 10s) per hook. +func RunAll(hooksDir string, hookType HookType, input *HookInput, timeout time.Duration) (*AggregatedOutput, error) + +// AggregatedOutput collects results from all hooks in a run. +type AggregatedOutput struct { + Cancelled bool + Message string + Context string // concatenated context from all hooks + Errors []string // warnings from failed hooks +} +``` + +#### Security (`security.go`) + +```go +// ValidateHookPath checks that a hook script path: +// 1. Resolves within the hooks directory boundary +// 2. Is not a symlink +// 3. Has the executable permission bit +// Returns a descriptive error if any check fails. +func ValidateHookPath(hooksDir, hookPath string) error +``` + +This reuses the patterns from `internal/validate/path.go` (boundary check, symlink rejection via `os.Lstat`). + +### 3. Skill Package (`internal/skill/`) + +#### Types (`types.go`) + +```go +// Skill represents a parsed skill manifest. +type Skill struct { + Name string // from SKILL.md frontmatter + Description string // from SKILL.md frontmatter + Body string // markdown instruction content + Dir string // directory path +} +``` + +#### Operations + +```go +// Install copies a skill from source into .context/skills//. +// Validates that source contains a valid SKILL.md with frontmatter. +func Install(source, skillsDir string) (*Skill, error) + +// LoadAll reads all installed skills from the skills directory. +func LoadAll(skillsDir string) ([]*Skill, error) + +// Load reads a single skill by name. +func Load(skillsDir, name string) (*Skill, error) + +// Remove deletes a skill directory. +func Remove(skillsDir, name string) error +``` + +### 4. RC Package Extensions (`internal/rc/`) + +New fields added to `CtxRC`: + +```go +// Added to existing CtxRC struct: +Tool string `yaml:"tool"` // Tool_Identifier: claude, cursor, cline, kiro, codex +Steering *SteeringRC `yaml:"steering"` +Hooks *HooksRC `yaml:"hooks"` + +type SteeringRC struct { + Dir string `yaml:"dir"` // default: .context/steering + DefaultInclusion string `yaml:"default_inclusion"` // default: manual + DefaultTools []string `yaml:"default_tools"` // default: all +} + +type HooksRC struct { + Dir string `yaml:"dir"` // default: .context/hooks + Timeout int `yaml:"timeout"` // seconds, default: 10 + Enabled *bool `yaml:"enabled"` // default: true +} +``` + +New accessor functions: + +```go +func Tool() string // returns RC().Tool +func SteeringDir() string // returns RC().Steering.Dir or default +func HooksDir() string // returns RC().Hooks.Dir or default +func HookTimeout() int // returns RC().Hooks.Timeout or 10 +func HooksEnabled() bool // returns RC().Hooks.Enabled or true +``` + +The existing priority hierarchy is preserved: CLI flags > environment variables > `.ctxrc` > hardcoded defaults. + +### 5. MCP Handler Extensions (`internal/mcp/handler/`) + +New methods on the existing `Handler` struct: + +```go +// SteeringGet returns applicable steering files for the given prompt. +// If prompt is empty, returns only "always" inclusion files. +func (h *Handler) SteeringGet(prompt string) (string, error) + +// Search searches across all .context/ files for the given query. +// Returns matching excerpts with file paths and line numbers. +func (h *Handler) Search(query string) (string, error) + +// SessionStartHooks executes session-start hooks and returns aggregated context. +func (h *Handler) SessionStartHooks() (string, error) + +// SessionEndHooks executes session-end hooks with the given summary. +func (h *Handler) SessionEndHooks(summary string) (string, error) +``` + +New MCP tool definitions registered in `internal/mcp/server/catalog/`: +- `ctx_steering_get` — parameters: `prompt` (optional string) +- `ctx_search` — parameters: `query` (required string) +- `ctx_session_start` — no parameters +- `ctx_session_end` — parameters: `summary` (optional string) + +### 6. Drift Detection Extensions (`internal/drift/`) + +New check types and issue types: + +```go +// New IssueTypes: +IssueInvalidTool IssueType = "invalid_tool" // unsupported tool identifier +IssueHookNoExec IssueType = "hook_no_exec" // hook missing executable bit +IssueStaleSyncFile IssueType = "stale_sync_file" // synced file out of date + +// New CheckNames: +CheckSteeringTools CheckName = "steering_tools" // validate tool identifiers +CheckHookPerms CheckName = "hook_permissions" // check executable bits +CheckSyncStaleness CheckName = "sync_staleness" // compare synced vs source +CheckRCTool CheckName = "rc_tool_field" // validate .ctxrc tool field +``` + +### 7. Init Command Extensions (`internal/cli/initialize/`) + +The existing `ctx init` command is extended to create three additional directories: +- `.context/steering/` +- `.context/hooks/` +- `.context/skills/` + +All directories are created with `0755` permissions. Existing directories are silently skipped. + +### 8. Agent Command Extensions (`internal/cli/agent/`) + +The `AssemblePacket` function is extended with a new tier for steering files, inserted after existing tiers: + +``` +Tier 1: Constitution, read order, instruction (always) +Tier 2: Active tasks (40%) +Tier 3: Conventions (20%) +Tier 4+5: Decisions + Learnings (remaining) +NEW Tier 6: Steering files (from remaining budget after Tier 4+5) +NEW Tier 7: Skill content (--skill flag, from remaining budget) +``` + +Steering files with `inclusion: always` are included unconditionally. Files with `inclusion: auto` are included when the prompt context matches. The `--skill ` flag adds the named skill's content to the packet. + +### 9. Bootstrap Registration (`internal/bootstrap/`) + +New command groups and registrations: + +```go +// New commands registered in bootstrap: +// Group: Integration +// - ctx steering (subcommands: add, list, preview, init, sync) +// - ctx hook (subcommands: add, list, test, enable, disable) +// - ctx skill (subcommands: install, list, remove) +``` + +### 10. CLI Flag: `--tool` + +A persistent flag `--tool` is added to the root command. When provided, it overrides the `tool` field from `.ctxrc`. Commands that need a tool identifier read it via a helper: + +```go +// ResolveTool returns the active tool identifier from --tool flag or .ctxrc. +// Returns an error if neither is set and the command requires a tool. +func ResolveTool(cmd *cobra.Command) (string, error) +``` + + +## Data Models + +### Steering File On-Disk Format + +```yaml +--- +name: api-standards +description: REST API design conventions. Apply when creating or modifying endpoints. +inclusion: auto +tools: [claude, cursor, cline, codex] +priority: 50 +--- + +# API Standards +- Use RESTful conventions (nouns, not verbs) +- Always return JSON with { data, error, meta } +- Version all endpoints: /api/v1/... +``` + +**Frontmatter fields:** + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `name` | string | required | Unique identifier for the steering file | +| `description` | string | `""` | Used for `auto` inclusion matching | +| `inclusion` | enum | `"manual"` | One of: `always`, `auto`, `manual` | +| `tools` | []string | `[]` (all) | Which tools receive this file | +| `priority` | int | `50` | Lower = injected first | + +### Hook Input JSON Schema + +```json +{ + "hookType": "pre-tool-use", + "tool": "write_file", + "parameters": { "path": "src/api/users.go", "content": "..." }, + "session": { "id": "sess_abc123", "model": "claude-sonnet-4-6" }, + "timestamp": "2026-03-22T10:30:00Z", + "ctxVersion": "0.9.0" +} +``` + +### Hook Output JSON Schema + +```json +{ + "cancel": false, + "context": "Additional text injected into AI conversation", + "message": "Optional user-visible message" +} +``` + +### Skill Manifest (`SKILL.md`) + +```yaml +--- +name: react-patterns +description: React component patterns. Activate when creating or modifying React components. +--- + +# React Patterns +- Use functional components with hooks +- Co-locate tests with components +``` + +### Extended `.ctxrc` Format + +```yaml +profile: kiro +tool: kiro + +steering: + dir: .context/steering + default_inclusion: manual + default_tools: [kiro, claude] + +hooks: + dir: .context/hooks + timeout: 10 + enabled: true + +token_budget: 8000 +``` + +### Tool-Native Format Mapping + +**Cursor (`.cursor/rules/.mdc`):** +```yaml +--- +description: +globs: [] +alwaysApply: +--- + +``` + +**Cline (`.clinerules/.md`):** +```markdown +# + + +``` + +**Kiro (`.kiro/steering/.md`):** +```yaml +--- +name: +description: +mode: +--- + +``` + +### Supported Tool Identifiers + +| Identifier | Tool | Sync Support | +|-----------|------|-------------| +| `claude` | Claude Code | No sync (uses `ctx agent` directly) | +| `cursor` | Cursor | `.cursor/rules/*.mdc` | +| `cline` | Cline | `.clinerules/*.md` | +| `kiro` | Kiro | `.kiro/steering/*.md` | +| `codex` | OpenAI Codex | No sync (uses `ctx agent` directly) | + +### Hook Script Template + +Generated by `ctx hook add `: + +```bash +#!/usr/bin/env bash +# Hook: +# Type: +# Created by: ctx hook add + +set -euo pipefail + +INPUT=$(cat) + +# Parse input fields +HOOK_TYPE=$(echo "$INPUT" | jq -r '.hookType') +TOOL=$(echo "$INPUT" | jq -r '.tool // empty') + +# Your hook logic here + +# Return output +echo '{"cancel": false, "context": "", "message": ""}' +``` + +### Directory Structure After Init + +``` +.context/ +├── TASKS.md (existing) +├── DECISIONS.md (existing) +├── LEARNINGS.md (existing) +├── CONVENTIONS.md (existing) +├── ARCHITECTURE.md (existing) +├── CONSTITUTION.md (existing) +├── GLOSSARY.md (existing) +├── steering/ (NEW) +│ ├── product.md (created by ctx steering init) +│ ├── tech.md (created by ctx steering init) +│ ├── structure.md (created by ctx steering init) +│ └── workflow.md (created by ctx steering init) +├── hooks/ (NEW, empty after ctx init) +│ ├── pre-tool-use/ +│ ├── post-tool-use/ +│ ├── session-start/ +│ ├── session-end/ +│ ├── file-save/ +│ └── context-add/ +└── skills/ (NEW, empty after ctx init) +``` + +### Data Flow: Steering Inclusion + +```mermaid +sequenceDiagram + participant User + participant Agent as ctx agent + participant RC as .ctxrc + participant Steering as internal/steering + participant FS as .context/steering/ + + User->>Agent: ctx agent --budget 4000 + Agent->>RC: Read tool, steering config + Agent->>FS: Read *.md files + FS-->>Steering: Raw file bytes + Steering->>Steering: Parse frontmatter + body + Steering->>Steering: Filter by inclusion mode + tool + Steering->>Steering: Sort by priority, then name + Steering-->>Agent: Filtered SteeringFile list + Agent->>Agent: Assemble packet (existing tiers + steering tier) + Agent-->>User: Context packet with steering +``` + +### Data Flow: Hook Execution + +```mermaid +sequenceDiagram + participant MCP as MCP Client + participant Handler as MCP Handler + participant Runner as Hook Runner + participant Script as Hook Script + + MCP->>Handler: ctx_session_start + Handler->>Runner: RunAll(session-start, input, timeout) + Runner->>Runner: Discover hooks in .context/hooks/session-start/ + Runner->>Runner: Validate: no symlinks, within boundary, executable + loop For each enabled hook (alphabetical) + Runner->>Script: Write HookInput JSON to stdin + Script-->>Runner: Read HookOutput JSON from stdout + alt cancel: true + Runner-->>Handler: AggregatedOutput{Cancelled: true} + else timeout exceeded + Runner->>Script: Kill process + Runner->>Runner: Log timeout warning, continue + else non-zero exit or invalid JSON + Runner->>Runner: Log warning, continue + end + end + Runner-->>Handler: AggregatedOutput{Context: "..."} + Handler-->>MCP: Success response with aggregated context +``` diff --git a/.kiro/specs/hooks-and-steering/requirements.md b/.kiro/specs/hooks-and-steering/requirements.md new file mode 100644 index 000000000..c6a9f6fde --- /dev/null +++ b/.kiro/specs/hooks-and-steering/requirements.md @@ -0,0 +1,274 @@ +# Requirements Document + +## Introduction + +This document specifies the Hooks & Steering system for ctx — a universal AI-context layer CLI tool (`github.com/ActiveMemory/ctx`). The system extends ctx from a persistence-only tool into a behavioral guidance and lifecycle automation platform that works across all major AI coding assistants (Claude Code, Cursor, Cline, Kiro, Codex). + +The feature spans four phases: a unified Steering Layer, an event-driven Hooks System, enhanced MCP Server tools, and a Skills System. Each phase is independently useful and additive — nothing breaks existing usage. Cross-tool configuration is handled by extending the existing `.ctxrc` profile system (`.ctxrc.kiro`, `.ctxrc.claude`, etc.) with new `tool`, `steering`, and `hooks` sections — no separate profiles directory is needed. + +All new artifacts are plain files (markdown, YAML, shell scripts) stored under `.context/`, making them git-versionable, human-readable, and tool-agnostic. ctx remains the single source of truth; other tools sync from ctx. ctx does not auto-detect which AI tool the user is using — the user specifies the target tool once via the `tool` field in `.ctxrc`, and all commands read from that. A `--tool` CLI flag is available as a one-off override. + +## Glossary + +- **Steering_File**: A markdown file with YAML frontmatter stored in `.context/steering/` that defines persistent behavioral rules injected into AI tool prompts. +- **Inclusion_Mode**: One of three modes (`always`, `auto`, `manual`) that determines when a Steering_File is injected into an AI prompt. +- **Hook**: An executable script in `.context/hooks//` that fires at a specific lifecycle event, receives JSON via stdin, and returns JSON via stdout. +- **Hook_Type**: A lifecycle event category: `pre-tool-use`, `post-tool-use`, `session-start`, `session-end`, `file-save`, or `context-add`. +- **Hook_Runner**: The internal component that discovers, validates, and executes Hook scripts, passing the Hook_Input and reading the Hook_Output. +- **Hook_Input**: A JSON object sent to a Hook via stdin containing `hookType`, `tool`, `parameters`, `session`, `timestamp`, and `ctxVersion` fields. +- **Hook_Output**: A JSON object returned by a Hook via stdout containing `cancel` (boolean), `context` (optional string), and `message` (optional string) fields. +- **MCP_Server**: The Model Context Protocol server (`ctx mcp serve`) that exposes ctx operations as JSON-RPC 2.0 tools over stdin/stdout. +- **MCP_Tool**: A callable operation exposed by the MCP_Server (e.g., `ctx_status`, `ctx_steering_get`). +- **Profile**: A named `.ctxrc.` configuration file (e.g., `.ctxrc.kiro`, `.ctxrc.claude`) that defines tool-specific settings using the existing ctx config switch mechanism. +- **Skill**: A reusable instruction bundle stored in `.context/skills/` containing a `SKILL.md` manifest with YAML frontmatter and markdown instructions. +- **Context_Packet**: The assembled markdown output produced by `ctx agent` containing prioritized context files fitted to a token budget. +- **Steering_Sync**: The process of converting Steering_Files into tool-native formats (e.g., `.cursor/rules/*.mdc`, `.clinerules/*.md`, `.kiro/steering/*.md`). +- **Foundation_Files**: Auto-generated Steering_Files (`product.md`, `tech.md`, `structure.md`, `workflow.md`) created by `ctx steering init` from codebase analysis. +- **Tool_Identifier**: A string identifying a supported AI tool: `claude`, `cursor`, `cline`, `kiro`, or `codex`. +- **Frontmatter_Parser**: The component that parses YAML frontmatter delimited by `---` from the top of markdown files. + +## Requirements + +### Requirement 1: Steering File Storage and Format + +**User Story:** As a developer, I want to store behavioral rules as markdown files with structured frontmatter in `.context/steering/`, so that AI tools receive consistent guidance that is human-readable and version-controlled. + +#### Acceptance Criteria + +1. THE Steering_File SHALL contain YAML frontmatter delimited by `---` lines followed by markdown body content. +2. THE Frontmatter_Parser SHALL extract `name` (string), `description` (string), `inclusion` (one of `always`, `auto`, `manual`), `tools` (list of Tool_Identifiers), and `priority` (integer) fields from Steering_File frontmatter. +3. WHEN a Steering_File omits the `inclusion` field, THE Frontmatter_Parser SHALL default the Inclusion_Mode to `manual`. +4. WHEN a Steering_File omits the `tools` field, THE Frontmatter_Parser SHALL apply the Steering_File to all supported tools. +5. WHEN a Steering_File omits the `priority` field, THE Frontmatter_Parser SHALL default the priority to `50`. +6. WHEN a Steering_File contains invalid YAML frontmatter, THE Frontmatter_Parser SHALL return a descriptive error identifying the file path and the parsing failure. +7. THE Frontmatter_Parser SHALL format Steering_File objects back into valid frontmatter-plus-markdown files. +8. FOR ALL valid Steering_File objects, parsing then printing then parsing SHALL produce an equivalent object (round-trip property). + +### Requirement 2: Steering Inclusion Modes + +**User Story:** As a developer, I want steering files to be injected into AI prompts based on configurable inclusion modes, so that relevant rules appear automatically while irrelevant ones stay out of the way. + +#### Acceptance Criteria + +1. WHILE the Inclusion_Mode of a Steering_File is `always`, THE Steering_Layer SHALL include that Steering_File in every Context_Packet regardless of prompt content. +2. WHILE the Inclusion_Mode of a Steering_File is `auto`, THE Steering_Layer SHALL include that Steering_File in a Context_Packet only when the prompt description matches the Steering_File `description` field. +3. WHILE the Inclusion_Mode of a Steering_File is `manual`, THE Steering_Layer SHALL include that Steering_File in a Context_Packet only when the user explicitly references the Steering_File by name. +4. THE Steering_Layer SHALL inject Steering_Files ordered by ascending `priority` value (lower priority number injected first). +5. WHEN two Steering_Files share the same `priority` value, THE Steering_Layer SHALL order them alphabetically by `name`. + +### Requirement 3: Steering CLI Commands + +**User Story:** As a developer, I want CLI commands to create, list, preview, and initialize steering files, so that I can manage behavioral rules without manually editing files. + +#### Acceptance Criteria + +1. WHEN the user runs `ctx steering add `, THE CLI SHALL create a new Steering_File at `.context/steering/.md` with default frontmatter and an empty markdown body. +2. WHEN the user runs `ctx steering add` with a name that already exists, THE CLI SHALL return an error stating the file already exists. +3. WHEN the user runs `ctx steering list`, THE CLI SHALL display all Steering_Files with their name, Inclusion_Mode, priority, and target tools. +4. WHEN the user runs `ctx steering preview `, THE CLI SHALL display the list of Steering_Files that would be included for the given prompt text, respecting Inclusion_Mode rules. +5. WHEN the user runs `ctx steering init`, THE CLI SHALL generate Foundation_Files (`product.md`, `tech.md`, `structure.md`, `workflow.md`) in `.context/steering/` by analyzing the current codebase. +6. WHEN Foundation_Files already exist and the user runs `ctx steering init`, THE CLI SHALL skip existing files and report which files were skipped. +7. WHEN the `.context/` directory does not exist, THE CLI SHALL return an error instructing the user to run `ctx init` first. + +### Requirement 4: Steering Integration with Context Packet + +**User Story:** As a developer, I want `ctx agent` to include relevant steering files in its output, so that AI tools automatically receive behavioral guidance alongside project context. + +#### Acceptance Criteria + +1. WHEN `ctx agent` assembles a Context_Packet, THE Agent_Command SHALL include applicable Steering_Files after the existing priority-ordered context files. +2. THE Agent_Command SHALL respect the token budget when including Steering_Files, truncating or omitting lower-priority Steering_Files when the budget is exceeded. +3. WHEN no Steering_Files exist in `.context/steering/`, THE Agent_Command SHALL produce the same Context_Packet as the current implementation without errors. + + +### Requirement 5: Steering Sync to Tool-Native Formats + +**User Story:** As a developer, I want to sync steering files to tool-native formats for Cursor, Cline, and Kiro, so that ctx remains the single source of truth while each tool receives rules in its expected format. + +#### Acceptance Criteria + +1. WHEN the user runs `ctx steering sync` without the `--tool` flag and without the `--all` flag, THE CLI SHALL read the `tool` field from the active `.ctxrc` and sync to that tool's native format. +2. WHEN the user runs `ctx steering sync` without the `--tool` flag and without the `--all` flag and no `tool` field is set in `.ctxrc`, THE CLI SHALL return an error instructing the user to specify a tool with `--tool `, use `--all`, or set the `tool` field in `.ctxrc`. +3. WHEN the user runs `ctx steering sync --tool cursor`, THE CLI SHALL write each applicable Steering_File as a `.cursor/rules/.mdc` file with Cursor-compatible frontmatter. +4. WHEN the user runs `ctx steering sync --tool cline`, THE CLI SHALL write each applicable Steering_File as a `.clinerules/.md` file. +5. WHEN the user runs `ctx steering sync --tool kiro`, THE CLI SHALL write each applicable Steering_File as a `.kiro/steering/.md` file with Kiro-compatible frontmatter. +6. WHEN the user runs `ctx steering sync --tool` with an unsupported Tool_Identifier, THE CLI SHALL return an error listing the supported Tool_Identifiers. +7. WHEN the user runs `ctx steering sync --all`, THE CLI SHALL sync Steering_Files to all supported tool formats. +8. WHEN a synced tool-native file already exists and the source Steering_File has not changed, THE CLI SHALL skip the file and not overwrite the existing content. +9. WHEN a Steering_File specifies a `tools` list that excludes a given Tool_Identifier, THE Steering_Sync SHALL skip that Steering_File for the excluded tool. + +### Requirement 6: Hook Storage and Discovery + +**User Story:** As a developer, I want hooks stored as executable scripts in `.context/hooks//`, so that lifecycle automation is file-based, git-versionable, and language-agnostic. + +#### Acceptance Criteria + +1. THE Hook_Runner SHALL discover executable scripts in `.context/hooks//` directories where `` is one of: `pre-tool-use`, `post-tool-use`, `session-start`, `session-end`, `file-save`, `context-add`. +2. WHEN a script in a Hook directory lacks the executable permission bit, THE Hook_Runner SHALL skip that script and log a warning identifying the file path. +3. THE Hook_Runner SHALL execute discovered hooks in alphabetical order by filename within each Hook_Type directory. +4. WHEN the `.context/hooks/` directory does not exist, THE Hook_Runner SHALL return an empty hook list without error. + +### Requirement 7: Hook Input/Output Contract + +**User Story:** As a developer, I want hooks to receive structured JSON input and return structured JSON output, so that hook scripts can make informed decisions and communicate results back to the system. + +#### Acceptance Criteria + +1. THE Hook_Runner SHALL pass a Hook_Input JSON object to each hook script via stdin containing: `hookType` (string), `tool` (string), `parameters` (object), `session` (object with `id` and `model` fields), `timestamp` (ISO 8601 string), and `ctxVersion` (string). +2. THE Hook_Runner SHALL read a Hook_Output JSON object from each hook script via stdout containing: `cancel` (boolean), `context` (optional string), and `message` (optional string). +3. WHEN a hook script returns `cancel: true` in the Hook_Output, THE Hook_Runner SHALL halt execution of subsequent hooks for that event and return the cancellation message. +4. WHEN a hook script returns a non-empty `context` field in the Hook_Output, THE Hook_Runner SHALL append that text to the AI conversation context. +5. IF a hook script exits with a non-zero exit code, THEN THE Hook_Runner SHALL log the error, skip that hook, and continue executing remaining hooks for the event. +6. IF a hook script produces invalid JSON on stdout, THEN THE Hook_Runner SHALL log a warning identifying the hook file and the parse error, and continue executing remaining hooks. +7. THE Hook_Runner SHALL enforce a configurable timeout (default 10 seconds) per hook execution. +8. IF a hook script exceeds the timeout, THEN THE Hook_Runner SHALL terminate the script process and log a timeout warning. + +### Requirement 8: Hook CLI Commands + +**User Story:** As a developer, I want CLI commands to create, list, test, enable, and disable hooks, so that I can manage lifecycle automation without manually editing files. + +#### Acceptance Criteria + +1. WHEN the user runs `ctx hook add `, THE CLI SHALL create an executable script template at `.context/hooks//.sh` with the correct shebang, JSON input reading, and JSON output structure. +2. WHEN the user runs `ctx hook add` with an invalid Hook_Type, THE CLI SHALL return an error listing the valid Hook_Types. +3. WHEN the user runs `ctx hook list`, THE CLI SHALL display all hooks grouped by Hook_Type, showing name, enabled/disabled status, and file path. +4. WHEN the user runs `ctx hook test --tool --path `, THE CLI SHALL construct a mock Hook_Input, execute all enabled hooks for that Hook_Type, and display the Hook_Output from each. +5. WHEN the user runs `ctx hook disable `, THE CLI SHALL remove the executable permission bit from the hook script. +6. WHEN the user runs `ctx hook enable `, THE CLI SHALL add the executable permission bit to the hook script. +7. WHEN the user runs `ctx hook disable` or `ctx hook enable` with a name that does not match any hook file, THE CLI SHALL return an error stating the hook was not found. + +### Requirement 9: MCP Server — New Steering and Search Tools + +**User Story:** As a developer, I want the MCP server to expose steering retrieval and context search as callable tools, so that AI tools can dynamically request relevant guidance and search context mid-session. + +#### Acceptance Criteria + +1. WHEN an MCP client calls the `ctx_steering_get` MCP_Tool with an optional `prompt` parameter, THE MCP_Server SHALL return the list of applicable Steering_Files for that prompt, respecting Inclusion_Mode rules. +2. WHEN an MCP client calls the `ctx_steering_get` MCP_Tool without a `prompt` parameter, THE MCP_Server SHALL return all Steering_Files with Inclusion_Mode `always`. +3. WHEN an MCP client calls the `ctx_search` MCP_Tool with a `query` parameter, THE MCP_Server SHALL search across all `.context/` files and return matching excerpts with file paths and line numbers. +4. THE MCP_Server SHALL register `ctx_steering_get` and `ctx_search` in the MCP tool catalog with JSON Schema parameter definitions. +5. WHEN the MCP_Server receives a request for an unregistered tool name, THE MCP_Server SHALL return a JSON-RPC error response with error code `-32601` (method not found). + +### Requirement 10: MCP Server — Session Lifecycle Tools + +**User Story:** As a developer, I want the MCP server to expose session start and session end tools, so that AI tools can signal lifecycle events and trigger hooks automatically. + +#### Acceptance Criteria + +1. WHEN an MCP client calls the `ctx_session_start` MCP_Tool, THE MCP_Server SHALL execute all enabled `session-start` hooks and return the aggregated context from hook outputs. +2. WHEN an MCP client calls the `ctx_session_end` MCP_Tool with an optional `summary` parameter, THE MCP_Server SHALL execute all enabled `session-end` hooks, passing the summary in the Hook_Input parameters. +3. WHEN an MCP client calls the `ctx_session_start` MCP_Tool and no `session-start` hooks exist, THE MCP_Server SHALL return a success response with empty context. +4. THE MCP_Server SHALL register `ctx_session_start` and `ctx_session_end` in the MCP tool catalog with JSON Schema parameter definitions. + + +### Requirement 11: Tool Configuration via .ctxrc Profiles + +**User Story:** As a developer, I want to set my active AI tool once in `.ctxrc` and have all ctx commands respect it, so that I don't have to repeat `--tool` on every command. + +#### Acceptance Criteria + +1. THE RC_Package SHALL support a `tool` field in `.ctxrc` containing a single Tool_Identifier (e.g., `kiro`, `claude`, `cursor`, `cline`, `codex`). +2. WHEN the `tool` field is set in `.ctxrc`, ALL commands that accept a `--tool` flag SHALL use the `.ctxrc` value as the default. +3. WHEN the user provides a `--tool` CLI flag, THE CLI SHALL use the flag value and ignore the `.ctxrc` `tool` field. +4. WHEN the user runs `ctx config switch kiro` and a `.ctxrc.kiro` file exists, THE CLI SHALL copy `.ctxrc.kiro` to `.ctxrc`, activating the kiro tool configuration including the `tool` field. +5. THE user SHALL be able to create tool-specific `.ctxrc` profiles (e.g., `.ctxrc.kiro`, `.ctxrc.claude`) with different `tool`, `steering`, `hooks`, and `token_budget` settings. +6. WHEN the `tool` field is not set in `.ctxrc` and no `--tool` flag is provided, commands that require a Tool_Identifier SHALL return an error instructing the user to set the `tool` field or provide `--tool`. + +### Requirement 12: Skills System + +**User Story:** As a developer, I want to install, list, and remove reusable instruction bundles, so that I can share and reuse AI guidance across projects. + +#### Acceptance Criteria + +1. THE Skill SHALL be a directory in `.context/skills//` containing a `SKILL.md` file with YAML frontmatter (`name`, `description`) and markdown instruction body. +2. WHEN the user runs `ctx skill install `, THE CLI SHALL download or copy the Skill from the source path into `.context/skills//`. +3. WHEN the user runs `ctx skill install` with a source that does not contain a valid `SKILL.md`, THE CLI SHALL return an error stating the source is not a valid skill. +4. WHEN the user runs `ctx skill list`, THE CLI SHALL display all installed skills with their name and description from the `SKILL.md` frontmatter. +5. WHEN the user runs `ctx skill remove `, THE CLI SHALL delete the `.context/skills//` directory. +6. WHEN the user runs `ctx skill remove` with a name that does not match any installed skill, THE CLI SHALL return an error stating the skill was not found. +7. WHEN the user runs `ctx agent --skill `, THE Agent_Command SHALL include the named Skill's `SKILL.md` content in the Context_Packet. +8. WHEN the user runs `ctx agent --skill ` with a name that does not match any installed skill, THE Agent_Command SHALL return an error stating the skill was not found. + +### Requirement 13: Directory Initialization + +**User Story:** As a developer, I want `ctx init` to create the new directories for steering, hooks, profiles, and skills, so that the project is ready for the full hooks-and-steering system from the start. + +#### Acceptance Criteria + +1. WHEN the user runs `ctx init`, THE Initialize_Command SHALL create `.context/steering/`, `.context/hooks/`, and `.context/skills/` directories alongside existing `.context/` subdirectories. +2. WHEN the directories already exist and the user runs `ctx init`, THE Initialize_Command SHALL skip existing directories without error. +3. THE Initialize_Command SHALL set directory permissions to `0755` for all newly created directories. + +### Requirement 14: Backward Compatibility + +**User Story:** As a developer, I want the hooks-and-steering system to be fully additive, so that existing ctx workflows, CLAUDE.md generation, and AGENTS.md generation continue to work without modification. + +#### Acceptance Criteria + +1. WHEN no `.context/steering/` directory exists, THE Agent_Command SHALL produce the same Context_Packet as the current implementation. +2. WHEN no `.context/hooks/` directory exists, THE Hook_Runner SHALL return empty results without error. +3. WHEN no `tool` field is set in `.ctxrc`, commands that do not require a Tool_Identifier SHALL continue to function with default behavior. +4. WHEN no `.context/skills/` directory exists, THE CLI SHALL report an empty skill list without error. +5. THE existing `CLAUDE.md` and `AGENTS.md` generation commands SHALL continue to function without modification. + +### Requirement 15: Security Constraints + +**User Story:** As a developer, I want the hooks-and-steering system to follow ctx's existing security model, so that no new attack vectors are introduced. + +#### Acceptance Criteria + +1. THE Hook_Runner SHALL reject hook scripts that are symlinks (consistent with ctx's symlink rejection defense layer). +2. THE Hook_Runner SHALL validate that all hook script paths resolve within the `.context/hooks/` directory boundary (consistent with ctx's boundary validation). +3. THE Steering_Sync SHALL validate that all output paths resolve within the project root directory boundary. +4. THE Hook_Runner SHALL execute hook scripts with the same user permissions as the ctx process, without privilege escalation. +5. IF a hook script attempts to write outside the project root, THEN THE Hook_Runner SHALL block the write and log a security warning. + +### Requirement 16: Configuration Integration + +**User Story:** As a developer, I want hooks-and-steering settings to integrate with the existing `.ctxrc` configuration system, so that all ctx configuration remains in one place. + +#### Acceptance Criteria + +1. THE RC_Package SHALL support a `steering` section in `.ctxrc` with fields: `dir` (path override, default `.context/steering`), `default_inclusion` (default Inclusion_Mode), and `default_tools` (default Tool_Identifier list). +2. THE RC_Package SHALL support a `hooks` section in `.ctxrc` with fields: `dir` (path override, default `.context/hooks`), `timeout` (integer seconds, default 10), and `enabled` (boolean, default true). +3. WHEN the `hooks.enabled` field in `.ctxrc` is set to `false`, THE Hook_Runner SHALL skip all hook execution. +4. THE RC_Package SHALL resolve hooks-and-steering configuration using the existing priority hierarchy: CLI flags > environment variables > `.ctxrc` > hardcoded defaults. + +### Requirement 17: Drift Detection for Steering and Hooks + +**User Story:** As a developer, I want `ctx drift` to detect issues with steering files and hooks, so that I am warned about stale or misconfigured behavioral guidance. + +#### Acceptance Criteria + +1. WHEN `ctx drift` runs and a Steering_File references a Tool_Identifier not in the supported list, THE Drift_Detector SHALL report a warning for that file. +2. WHEN `ctx drift` runs and a hook script in `.context/hooks/` lacks the executable permission bit, THE Drift_Detector SHALL report a warning for that file. +3. WHEN `ctx drift` runs and synced tool-native files are out of date compared to their source Steering_Files, THE Drift_Detector SHALL report a warning listing the stale files. +4. WHEN `ctx drift` runs and the `tool` field in `.ctxrc` contains an unsupported Tool_Identifier, THE Drift_Detector SHALL report a warning. + +### Requirement 18: Use Cases + +**User Story:** As a developer, I want documented use cases that demonstrate how the hooks-and-steering system solves real workflow problems, so that I understand the practical value of each phase. + +#### Acceptance Criteria + +1. THE Documentation SHALL describe a use case where a `session-start` hook automatically injects the full Context_Packet into an AI session, replacing manual CLAUDE.md editing. +2. THE Documentation SHALL describe a use case where a `pre-tool-use` hook blocks AI writes to a frozen legacy directory and returns a decision reference. +3. THE Documentation SHALL describe a use case where a `post-tool-use` hook runs a linter after AI file writes and injects lint results into the conversation. +4. THE Documentation SHALL describe a use case where `ctx steering sync --all` propagates a single set of API design rules to Cursor, Cline, and Kiro simultaneously. +5. THE Documentation SHALL describe a use case where `ctx config switch kiro` activates a `.ctxrc.kiro` profile with kiro-specific tool, budget, and steering settings, and `ctx config switch claude` switches back to Claude Code settings. +6. THE Documentation SHALL describe a use case where a Skill bundle for React patterns is installed from a remote source and activated via `ctx agent --skill react-patterns`. + +### Requirement 19: Test Mechanisms + +**User Story:** As a developer, I want each component to have clear test mechanisms, so that correctness can be verified through automated testing. + +#### Acceptance Criteria + +1. THE Frontmatter_Parser SHALL be testable via round-trip property: for all valid Steering_File inputs, `parse(print(parse(input))) == parse(input)`. +2. THE Hook_Runner SHALL be testable via the `ctx hook test` command, which constructs mock Hook_Input and verifies Hook_Output structure. +3. THE Steering_Layer inclusion logic SHALL be testable via the `ctx steering preview` command, which shows which files match a given prompt without side effects. +4. THE Steering_Sync SHALL be testable via idempotence property: running `ctx steering sync --tool ` twice in succession SHALL produce identical output files. +5. THE MCP_Server new tools SHALL be testable via JSON-RPC requests sent over stdin, verifying response structure matches the MCP protocol specification. +6. THE Hook_Runner timeout enforcement SHALL be testable by providing a hook script that sleeps beyond the configured timeout and verifying the script is terminated. +7. THE Drift_Detector new checks SHALL be testable by constructing `.context/` directories with known issues and verifying the correct warnings are reported. +8. THE `.ctxrc` tool field SHALL be testable by verifying that commands read the `tool` value and apply it as the default Tool_Identifier. diff --git a/.kiro/specs/hooks-and-steering/tasks.md b/.kiro/specs/hooks-and-steering/tasks.md new file mode 100644 index 000000000..a45fc722a --- /dev/null +++ b/.kiro/specs/hooks-and-steering/tasks.md @@ -0,0 +1,328 @@ +# Implementation Plan: Hooks and Steering + +## Overview + +Incremental implementation of the Hooks & Steering system for ctx. Each task builds on previous work, starting with foundational types and domain logic, then CLI commands, then MCP extensions, and finally integration wiring. All new packages follow existing `internal//` and `internal/cli//` conventions. Go is the implementation language throughout. + +## Tasks + +- [x] 1. Steering domain package — types and parser + - [x] 1.1 Create `internal/steering/types.go` with `SteeringFile`, `InclusionMode`, and `SyncReport` + - Define `InclusionMode` enum (`always`, `auto`, `manual`) + - Define `SteeringFile` struct with `Name`, `Description`, `Inclusion`, `Tools`, `Priority`, `Body`, `Path` + - Define `SyncReport` struct with `Written`, `Skipped`, `Errors` + - Add `doc.go` for the package + - _Requirements: 1.1, 1.2, 1.5_ + + - [x] 1.2 Implement `internal/steering/parse.go` — frontmatter parser and printer + - `Parse(data []byte, filePath string) (*SteeringFile, error)` — extract YAML frontmatter delimited by `---` and markdown body + - `Print(sf *SteeringFile) []byte` — serialize back to frontmatter + markdown + - Apply defaults: `inclusion` → `manual`, `tools` → nil (all), `priority` → 50 + - Return descriptive error on invalid YAML identifying file path and failure + - Use `gopkg.in/yaml.v3` for YAML parsing + - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7_ + + - [x] 1.3 Write property test for steering parser round-trip + - **Property 1: Round-trip consistency** — `Parse(Print(Parse(data))) == Parse(data)` for all valid inputs + - **Validates: Requirements 1.8, 19.1** + + - [x] 1.4 Implement `internal/steering/filter.go` — inclusion mode filtering + - `LoadAll(steeringDir string) ([]*SteeringFile, error)` — read all `.md` files and parse + - `Filter(files, prompt, manualNames, tool string) []*SteeringFile` — apply inclusion rules + - `always` files included unconditionally; `auto` files included on description substring match; `manual` only when named + - Sort by ascending priority, then alphabetically by name on tie + - Filter out files whose `Tools` list excludes the given tool + - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5_ + + - [x] 1.5 Write unit tests for steering filter + - Test each inclusion mode independently + - Test priority ordering and alphabetical tie-breaking + - Test tool filtering with explicit tools list and empty (all) tools + - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5_ + +- [x] 2. Steering sync to tool-native formats + - [x] 2.1 Implement `internal/steering/sync.go` — tool-native format sync + - `SyncTool(steeringDir, projectRoot, tool string) (SyncReport, error)` — write steering files to tool-native directory + - `SyncAll(steeringDir, projectRoot string) (SyncReport, error)` — sync to all supported tools + - Cursor: `.cursor/rules/.mdc` with Cursor-compatible frontmatter + - Cline: `.clinerules/.md` plain markdown + - Kiro: `.kiro/steering/.md` with Kiro frontmatter + - Skip files whose `tools` list excludes the target tool + - Skip files whose content hasn't changed (idempotent) + - Validate output paths resolve within project root boundary + - _Requirements: 5.3, 5.4, 5.5, 5.8, 5.9, 15.3_ + + - [x] 2.2 Write property test for steering sync idempotence + - **Property 2: Sync idempotence** — running `SyncTool` twice produces identical output files + - **Validates: Requirements 19.4** + +- [x] 3. Checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 4. Hook domain package — types, discovery, runner, and security + - [x] 4.1 Create `internal/hook/types.go` with `HookType`, `HookInput`, `HookOutput`, `HookSession`, `HookInfo`, `AggregatedOutput` + - Define `HookType` constants: `pre-tool-use`, `post-tool-use`, `session-start`, `session-end`, `file-save`, `context-add` + - `ValidHookTypes()` returns all valid hook type strings + - Add `doc.go` for the package + - _Requirements: 6.1, 7.1, 7.2_ + + - [x] 4.2 Implement `internal/hook/security.go` — symlink and boundary validation + - `ValidateHookPath(hooksDir, hookPath string) error` — reject symlinks, validate boundary, check executable bit + - Reuse patterns from `internal/validate/` (boundary check, `os.Lstat` symlink rejection) + - _Requirements: 15.1, 15.2, 15.4_ + + - [x] 4.3 Implement `internal/hook/discover.go` — hook discovery + - `Discover(hooksDir string) (map[HookType][]HookInfo, error)` — find all hook scripts grouped by type + - `FindByName(hooksDir, name string) (*HookInfo, error)` — search all type directories for a hook + - Skip non-executable scripts with logged warning + - Skip symlinks (security) + - Return empty map if hooks directory doesn't exist + - _Requirements: 6.1, 6.2, 6.3, 6.4, 15.1, 15.2_ + + - [x] 4.4 Write unit tests for hook discovery + - Test discovery with mixed executable/non-executable scripts + - Test symlink rejection + - Test missing hooks directory returns empty map + - Test alphabetical ordering within each hook type + - _Requirements: 6.1, 6.2, 6.3, 6.4_ + + - [x] 4.5 Implement `internal/hook/runner.go` — hook execution engine + - `RunAll(hooksDir string, hookType HookType, input *HookInput, timeout time.Duration) (*AggregatedOutput, error)` + - Pass `HookInput` as JSON via stdin, read `HookOutput` as JSON from stdout + - If hook returns `cancel: true`, halt and return cancellation message + - Append non-empty `context` fields to aggregated context + - On non-zero exit: log error, skip hook, continue + - On invalid JSON stdout: log warning with hook file and parse error, continue + - Enforce configurable timeout (default 10s); terminate on exceed with logged warning + - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6, 7.7, 7.8_ + + - [x] 4.6 Write unit tests for hook runner + - Test cancel propagation halts subsequent hooks + - Test context aggregation from multiple hooks + - Test non-zero exit code handling (skip and continue) + - Test invalid JSON output handling (skip and continue) + - Test timeout enforcement terminates script + - **Validates: Requirements 7.3, 7.4, 7.5, 7.6, 7.7, 7.8, 19.6** + +- [x] 5. Skill domain package + - [x] 5.1 Create `internal/skill/types.go` with `Skill` struct + - Define `Skill` struct with `Name`, `Description`, `Body`, `Dir` + - Add `doc.go` for the package + - _Requirements: 12.1_ + + - [x] 5.2 Implement `internal/skill/load.go` — skill loading + - `LoadAll(skillsDir string) ([]*Skill, error)` — read all installed skills + - `Load(skillsDir, name string) (*Skill, error)` — read single skill by name + - Parse `SKILL.md` frontmatter (`name`, `description`) and markdown body + - _Requirements: 12.1, 12.4_ + + - [x] 5.3 Implement `internal/skill/install.go` and `internal/skill/remove.go` + - `Install(source, skillsDir string) (*Skill, error)` — copy skill from source, validate `SKILL.md` exists + - `Remove(skillsDir, name string) error` — delete skill directory + - Return error if source has no valid `SKILL.md` + - Return error if skill name not found on remove + - _Requirements: 12.2, 12.3, 12.5, 12.6_ + +- [x] 6. Checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 7. RC package extensions — tool, steering, hooks config + - [x] 7.1 Add `Tool`, `Steering`, and `Hooks` fields to `CtxRC` in `internal/rc/types.go` + - Add `Tool string` field with `yaml:"tool"` tag + - Add `Steering *SteeringRC` and `Hooks *HooksRC` structs + - `SteeringRC`: `Dir`, `DefaultInclusion`, `DefaultTools` + - `HooksRC`: `Dir`, `Timeout`, `Enabled` + - _Requirements: 11.1, 16.1, 16.2_ + + - [x] 7.2 Add accessor functions for new RC fields + - `Tool() string`, `SteeringDir() string`, `HooksDir() string`, `HookTimeout() int`, `HooksEnabled() bool` + - Apply defaults: steering dir → `.context/steering`, hooks dir → `.context/hooks`, timeout → 10, enabled → true + - Preserve existing priority hierarchy: CLI flags > env vars > `.ctxrc` > defaults + - _Requirements: 11.2, 16.3, 16.4_ + + - [x] 7.3 Write unit tests for RC tool field resolution + - Test that `Tool()` returns the configured value + - Test default values for steering and hooks config + - Test that `HooksEnabled()` returns false when configured + - **Validates: Requirements 19.8** + +- [x] 8. Add `--tool` persistent flag to root command + - [x] 8.1 Add `--tool` persistent flag in `internal/bootstrap/cmd.go` + - Add `ResolveTool(cmd *cobra.Command) (string, error)` helper that reads `--tool` flag, falls back to `rc.Tool()` + - Return error if neither is set and command requires a tool + - _Requirements: 11.2, 11.3, 11.6_ + +- [x] 9. Steering CLI commands + - [x] 9.1 Create `internal/cli/steering/` package with parent command and `doc.go` + - `Cmd() *cobra.Command` returning `ctx steering` parent with subcommands + - Follow existing `internal/cli//` conventions + - _Requirements: 3.1_ + + - [x] 9.2 Implement `ctx steering add ` subcommand + - Create `.context/steering/.md` with default frontmatter and empty body + - Error if file already exists + - Error if `.context/` directory does not exist + - _Requirements: 3.1, 3.2, 3.7_ + + - [x] 9.3 Implement `ctx steering list` subcommand + - Display all steering files with name, inclusion mode, priority, and target tools + - _Requirements: 3.3_ + + - [x] 9.4 Implement `ctx steering preview ` subcommand + - Show which steering files would be included for the given prompt text + - Respect inclusion mode rules using `steering.Filter` + - _Requirements: 3.4, 19.3_ + + - [x] 9.5 Implement `ctx steering init` subcommand + - Generate foundation files (`product.md`, `tech.md`, `structure.md`, `workflow.md`) in `.context/steering/` + - Skip existing files and report which were skipped + - _Requirements: 3.5, 3.6_ + + - [x] 9.6 Implement `ctx steering sync` subcommand + - Without `--tool` or `--all`: read `tool` from `.ctxrc`, sync to that tool's format + - `--tool `: sync to specified tool format; error on unsupported tool + - `--all`: sync to all supported tool formats + - Error if no tool specified and no `tool` field in `.ctxrc` + - _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 5.7_ + +- [x] 10. Hook CLI commands + - [x] 10.1 Create `internal/cli/hook/` package with parent command and `doc.go` + - `Cmd() *cobra.Command` returning `ctx hook` parent with subcommands + - _Requirements: 8.1_ + + - [x] 10.2 Implement `ctx hook add ` subcommand + - Create executable script template at `.context/hooks//.sh` + - Include shebang, JSON input reading, JSON output structure + - Error on invalid hook type listing valid types + - _Requirements: 8.1, 8.2_ + + - [x] 10.3 Implement `ctx hook list` subcommand + - Display all hooks grouped by hook type with name, enabled/disabled status, file path + - _Requirements: 8.3_ + + - [x] 10.4 Implement `ctx hook test ` subcommand + - Accept `--tool` and `--path` flags + - Construct mock `HookInput`, execute enabled hooks, display `HookOutput` from each + - _Requirements: 8.4, 19.2_ + + - [x] 10.5 Implement `ctx hook enable ` and `ctx hook disable ` subcommands + - `enable`: add executable permission bit + - `disable`: remove executable permission bit + - Error if hook name not found + - _Requirements: 8.5, 8.6, 8.7_ + +- [x] 11. Skill CLI commands + - [x] 11.1 Create `internal/cli/skill/` package with parent command and `doc.go` + - `Cmd() *cobra.Command` returning `ctx skill` parent with subcommands + - _Requirements: 12.1_ + + - [x] 11.2 Implement `ctx skill install ` subcommand + - Download or copy skill from source into `.context/skills//` + - Error if source has no valid `SKILL.md` + - _Requirements: 12.2, 12.3_ + + - [x] 11.3 Implement `ctx skill list` subcommand + - Display all installed skills with name and description + - Return empty list without error if `.context/skills/` doesn't exist + - _Requirements: 12.4, 14.4_ + + - [x] 11.4 Implement `ctx skill remove ` subcommand + - Delete `.context/skills//` directory + - Error if skill name not found + - _Requirements: 12.5, 12.6_ + +- [x] 12. Checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 13. Bootstrap registration — wire new CLI commands + - [x] 13.1 Register `steering`, `hook`, and `skill` commands in `internal/bootstrap/group.go` + - Add `steering.Cmd` and `hook.Cmd` to `integrations()` group + - Add `skill.Cmd` to `contextCmds()` group + - Import new CLI packages + - _Requirements: 3.1, 8.1, 12.1_ + +- [x] 14. Extend `ctx init` to create new directories + - [x] 14.1 Extend `internal/cli/initialize/` to create `.context/steering/`, `.context/hooks/`, and `.context/skills/` + - Create directories with `0755` permissions + - Skip existing directories without error + - _Requirements: 13.1, 13.2, 13.3_ + +- [x] 15. Extend `ctx agent` for steering and skill integration + - [x] 15.1 Add steering file inclusion to `ctx agent` context packet assembly + - After existing tiers, add Tier 6 for steering files (from remaining budget) + - Include `always` files unconditionally, `auto` files on prompt match + - Respect token budget — truncate/omit lower-priority steering files when exceeded + - Produce same output when no `.context/steering/` exists + - _Requirements: 4.1, 4.2, 4.3, 14.1_ + + - [x] 15.2 Add `--skill ` flag to `ctx agent` + - Include named skill's `SKILL.md` content in context packet as Tier 7 + - Error if skill name not found + - _Requirements: 12.7, 12.8_ + +- [x] 16. MCP server extensions — new tools + - [x] 16.1 Implement `SteeringGet` and `Search` handler methods in `internal/mcp/handler/` + - `SteeringGet(prompt string) (string, error)` — return applicable steering files; if no prompt, return `always` files only + - `Search(query string) (string, error)` — search across `.context/` files, return excerpts with paths and line numbers + - _Requirements: 9.1, 9.2, 9.3_ + + - [x] 16.2 Implement `SessionStartHooks` and `SessionEndHooks` handler methods + - `SessionStartHooks() (string, error)` — execute `session-start` hooks, return aggregated context + - `SessionEndHooks(summary string) (string, error)` — execute `session-end` hooks with summary in parameters + - Return success with empty context when no hooks exist + - _Requirements: 10.1, 10.2, 10.3_ + + - [x] 16.3 Register new MCP tools in catalog and dispatch + - Add `ctx_steering_get`, `ctx_search`, `ctx_session_start`, `ctx_session_end` to `internal/mcp/server/catalog/` + - Add JSON Schema parameter definitions for each tool + - Add dispatch routes in `internal/mcp/server/dispatch/` + - Unregistered tool names return JSON-RPC error `-32601` + - _Requirements: 9.4, 9.5, 10.4_ + + - [x] 16.4 Write unit tests for MCP steering and session tools + - Test `ctx_steering_get` with and without prompt parameter + - Test `ctx_session_start` with no hooks returns success + - Test `ctx_session_end` passes summary to hook input + - **Validates: Requirements 19.5** + +- [x] 17. Checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 18. Drift detection extensions + - [x] 18.1 Add new issue types and check names to `internal/drift/types.go` + - `IssueInvalidTool`, `IssueHookNoExec`, `IssueStaleSyncFile` + - `CheckSteeringTools`, `CheckHookPerms`, `CheckSyncStaleness`, `CheckRCTool` + - _Requirements: 17.1, 17.2, 17.3, 17.4_ + + - [x] 18.2 Implement new drift checks in `internal/drift/detector.go` + - Check steering files for unsupported tool identifiers + - Check hook scripts for missing executable permission bit + - Check synced tool-native files are up to date vs source steering files + - Check `.ctxrc` `tool` field for unsupported tool identifier + - _Requirements: 17.1, 17.2, 17.3, 17.4_ + + - [x] 18.3 Write unit tests for new drift checks + - Construct `.context/` directories with known issues, verify correct warnings + - Test each new check type independently + - **Validates: Requirements 19.7** + +- [x] 19. Backward compatibility verification + - [x] 19.1 Verify backward compatibility across all extensions + - Confirm `ctx agent` produces same output when no `.context/steering/` exists + - Confirm hook runner returns empty results when no `.context/hooks/` exists + - Confirm commands work without `tool` field when tool is not required + - Confirm skill list returns empty when no `.context/skills/` exists + - Confirm existing `CLAUDE.md` and `AGENTS.md` generation is unchanged + - _Requirements: 14.1, 14.2, 14.3, 14.4, 14.5_ + +- [x] 20. Final checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +## Notes + +- Tasks marked with `*` are optional and can be skipped for faster MVP +- Each task references specific requirements for traceability +- Checkpoints ensure incremental validation +- Property tests validate universal correctness properties (round-trip, idempotence) +- All new packages include `doc.go` and `testmain_test.go` following existing conventions +- Security validation (symlink rejection, boundary checks) reuses `internal/validate/` patterns diff --git a/docs/cli/index.md b/docs/cli/index.md index 983659d1a..faa8bc3ad 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -25,6 +25,7 @@ All commands support these flags: | `--version` | Show version | | `--context-dir ` | Override context directory (default: `.context/`) | | `--allow-outside-cwd` | Allow context directory outside current working directory | +| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | **Initialization required.** Most commands require a `.context/` directory created by `ctx init`. Running a command without one produces: @@ -75,6 +76,9 @@ own guards and no-op gracefully. | [`ctx trace`](trace.md#ctx-trace) | Show context behind git commits | | [`ctx doctor`](doctor.md#ctx-doctor) | Structural health check (hooks, drift, config) | | [`ctx mcp`](mcp.md#ctx-mcp) | MCP server for AI tool integration (stdin/stdout) | +| [`ctx steering`](tools.md#ctx-steering) | Manage steering files (behavioral rules for AI tools) | +| [`ctx hook`](tools.md#ctx-hook) | Manage lifecycle hooks (shell scripts for automation) | +| [`ctx skill`](tools.md#ctx-skill) | Manage reusable instruction bundles | | [`ctx config`](config.md#ctx-config) | Manage runtime configuration profiles | | [`ctx system`](system.md#ctx-system) | System diagnostics and hook commands | @@ -140,6 +144,15 @@ notify: # Webhook notification settings - nudge - relay # - heartbeat # Every-prompt session-alive signal +tool: "" # Active AI tool: claude, cursor, cline, kiro, codex +steering: # Steering layer configuration + dir: .context/steering # Steering files directory + default_inclusion: manual # Default inclusion mode (always, auto, manual) + default_tools: [] # Default tool filter for new steering files +hooks: # Hook system configuration + dir: .context/hooks # Hook scripts directory + timeout: 10 # Per-hook execution timeout in seconds + enabled: true # Whether hook execution is enabled ``` | Field | Type | Default | Description | @@ -163,6 +176,13 @@ notify: # Webhook notification settings | `session_prefixes` | `[]string` | `["Session:"]` | Recognized Markdown session header prefixes. Extend to parse sessions written in other languages | | `freshness_files` | `[]object` | *(none)* | Files to track for staleness (path, desc, optional review_url). Hook warns after 6 months without modification | | `notify.events` | `[]string` | *(all)* | Event filter for webhook notifications (empty = all) | +| `tool` | `string` | *(empty)* | Active AI tool identifier (`claude`, `cursor`, `cline`, `kiro`, `codex`) | +| `steering.dir` | `string` | `.context/steering` | Steering files directory | +| `steering.default_inclusion` | `string` | `manual` | Default inclusion mode for new steering files (`always`, `auto`, `manual`) | +| `steering.default_tools` | `[]string` | *(all)* | Default tool filter for new steering files (empty = all tools) | +| `hooks.dir` | `string` | `.context/hooks` | Hook scripts directory | +| `hooks.timeout` | `int` | `10` | Per-hook execution timeout in seconds | +| `hooks.enabled` | `bool` | `true` | Whether hook execution is enabled | **Priority order:** CLI flags > Environment variables > `.ctxrc` > Defaults diff --git a/docs/cli/mcp.md b/docs/cli/mcp.md index 14ae36e90..3f856daff 100644 --- a/docs/cli/mcp.md +++ b/docs/cli/mcp.md @@ -221,6 +221,44 @@ persistence ceremony - human confirmation required. | `type` | string | Yes | Event type: start, end | | `caller` | string | No | Caller identifier (cursor, windsurf, vscode, claude-desktop) | +### `ctx_steering_get` + +Retrieve applicable steering files for a prompt. Without a prompt, +returns always-included files only. + +| Argument | Type | Required | Description | +|----------|--------|----------|------------------------------------------------------------| +| `prompt` | string | No | Prompt text to match against steering file descriptions | + +**Read-only.** + +### `ctx_search` + +Search across `.context/` files for a query string. Returns matching +lines with file paths and line numbers. + +| Argument | Type | Required | Description | +|----------|--------|----------|--------------------------------| +| `query` | string | Yes | Search string to match against | + +**Read-only.** + +### `ctx_session_start` + +Execute session-start hooks and return aggregated context from hook +outputs. + +**Arguments:** None. + +### `ctx_session_end` + +Execute session-end hooks with an optional summary. Returns aggregated +context from hook outputs. + +| Argument | Type | Required | Description | +|-----------|--------|----------|--------------------------------------| +| `summary` | string | No | Session summary passed to hook scripts | + ### `ctx_remind` List pending session-scoped reminders. diff --git a/docs/cli/tools.md b/docs/cli/tools.md index 09cdfb3e3..df79bc8a3 100644 --- a/docs/cli/tools.md +++ b/docs/cli/tools.md @@ -62,6 +62,8 @@ ctx setup [flags] |---------------|----------------------------------------------| | `claude-code` | Redirects to plugin install instructions | | `cursor` | Cursor IDE | +| `kiro` | Kiro IDE | +| `cline` | Cline (VS Code extension) | | `aider` | Aider CLI | | `copilot` | GitHub Copilot | | `windsurf` | Windsurf IDE | @@ -79,6 +81,11 @@ ctx setup aider # Generate and write .github/copilot-instructions.md ctx setup copilot --write + +# Generate MCP config and sync steering files +ctx setup kiro --write +ctx setup cursor --write +ctx setup cline --write ``` --- @@ -1051,3 +1058,213 @@ ctx why invariants # Pipe to a pager ctx why manifesto | less ``` + +--- + +### `ctx steering` + +Manage steering files: persistent behavioral rules for AI tools. + +Steering files live in `.context/steering/` as Markdown files with +YAML frontmatter that controls inclusion mode, tool targeting, and +priority. + +```bash +ctx steering +``` + +#### `ctx steering init` + +Create a starter set of steering files in `.context/steering/`. + +```bash +ctx steering init +``` + +#### `ctx steering add` + +Create a new steering file with default frontmatter. + +```bash +ctx steering add +``` + +**Arguments**: + +- `name`: Steering file name (without `.md` extension) + +**Example**: + +```bash +ctx steering add security +# Created .context/steering/security.md +``` + +The generated file uses `inclusion: manual` and `priority: 50` by +default. Edit the frontmatter to change behavior: + +```yaml +--- +name: security +description: Security rules for all code changes +inclusion: always # always | auto | manual +tools: [] # empty = all tools +priority: 10 # lower = injected first +--- +``` + +#### `ctx steering list` + +List all steering files with their inclusion mode and priority. + +```bash +ctx steering list +``` + +#### `ctx steering preview` + +Preview which steering files would be included for a given prompt. + +```bash +ctx steering preview [prompt] +``` + +#### `ctx steering sync` + +Sync steering files to tool-native formats (e.g. `.cursor/rules/`, +`.kiro/steering/`, `.clinerules/`). + +```bash +ctx steering sync +``` + +--- + +### `ctx hook` + +Manage lifecycle hooks: shell scripts that fire at specific events +during AI sessions. + +Hooks live in `.context/hooks//` directories, organized by +event type. Each hook is an executable script that receives JSON +via stdin and returns JSON via stdout. + +```bash +ctx hook +``` + +**Hook types**: + +| Type | When it fires | +|------------------|----------------------------------| +| `session-start` | AI session begins | +| `session-end` | AI session ends | +| `pre-tool-use` | Before an AI tool invocation | +| `post-tool-use` | After an AI tool invocation | +| `file-save` | When a file is saved | +| `context-add` | When context is added | + +#### `ctx hook add` + +Create a new hook script from a template. + +```bash +ctx hook add +``` + +**Arguments**: + +- `type`: Hook type (e.g. `session-start`, `pre-tool-use`) +- `name`: Script name (without `.sh` extension) + +**Example**: + +```bash +ctx hook add session-start greet +# Created .context/hooks/session-start/greet.sh +``` + +#### `ctx hook list` + +List all discovered hooks with their type and enabled status. + +```bash +ctx hook list +``` + +#### `ctx hook enable` + +Enable a hook by setting the executable permission bit. + +```bash +ctx hook enable +``` + +#### `ctx hook disable` + +Disable a hook by removing the executable permission bit. + +```bash +ctx hook disable +``` + +#### `ctx hook test` + +Run a hook with synthetic input and display the output. + +```bash +ctx hook test +``` + +--- + +### `ctx skill` + +Manage reusable instruction bundles that can be installed into +`.context/skills/`. + +A skill is a directory containing a `SKILL.md` file with YAML +frontmatter (`name`, `description`) and a Markdown instruction body. + +```bash +ctx skill +``` + +#### `ctx skill install` + +Install a skill from a source directory. + +```bash +ctx skill install +``` + +**Arguments**: + +- `source`: Path to a directory containing `SKILL.md` + +**Example**: + +```bash +ctx skill install ./my-skills/code-review +# Installed code-review → .context/skills/code-review +``` + +#### `ctx skill list` + +List all installed skills. + +```bash +ctx skill list +``` + +#### `ctx skill remove` + +Remove an installed skill. + +```bash +ctx skill remove +``` + +**Arguments**: + +- `name`: Skill name to remove diff --git a/docs/home/configuration.md b/docs/home/configuration.md index c39b07f2d..d9be7f6ec 100644 --- a/docs/home/configuration.md +++ b/docs/home/configuration.md @@ -97,6 +97,18 @@ A commented `.ctxrc` showing all options and their defaults: # - nudge # - relay # +# tool: "" # Active AI tool: claude, cursor, cline, kiro, codex +# +# steering: # Steering layer configuration +# dir: .context/steering +# default_inclusion: manual +# default_tools: [] +# +# hooks: # Hook system configuration +# dir: .context/hooks +# timeout: 10 +# enabled: true +# # priority_order: # - CONSTITUTION.md # - TASKS.md @@ -131,6 +143,13 @@ A commented `.ctxrc` showing all options and their defaults: | `task_nudge_interval` | `int` | `5` | Edit/Write calls between task completion nudges | | `notify.events` | `[]string` | *(all)* | Event filter for webhook notifications (empty = all) | | `priority_order` | `[]string` | *(see below)* | Custom file loading priority for context assembly | +| `tool` | `string` | *(empty)* | Active AI tool identifier (`claude`, `cursor`, `cline`, `kiro`, `codex`). Used by steering sync and hook dispatch | +| `steering.dir` | `string` | `.context/steering` | Steering files directory | +| `steering.default_inclusion` | `string` | `manual` | Default inclusion mode for new steering files (`always`, `auto`, `manual`) | +| `steering.default_tools` | `[]string` | *(all)* | Default tool filter for new steering files (empty = all tools) | +| `hooks.dir` | `string` | `.context/hooks` | Hook scripts directory | +| `hooks.timeout` | `int` | `10` | Per-hook execution timeout in seconds | +| `hooks.enabled` | `bool` | `true` | Whether hook execution is enabled | **Default priority order** (*used when `priority_order` is not set*): @@ -180,6 +199,7 @@ CLI flags have the highest priority and override both environment variables and |------------------------|-----------------------------------------------------------| | `--context-dir ` | Override context directory (default: `.context/`) | | `--allow-outside-cwd` | Allow context directory outside current working directory | +| `--tool ` | Override active AI tool identifier (e.g. `kiro`, `cursor`) | | `--version` | Show version and exit | | `--help` | Show command help and exit | diff --git a/docs/home/context-files.md b/docs/home/context-files.md index 5bb23de08..9c29ce5ba 100644 --- a/docs/home/context-files.md +++ b/docs/home/context-files.md @@ -30,6 +30,9 @@ Files are designed to be human-readable, AI-parseable, and token-efficient. | `GLOSSARY.md` | Domain terms and abbreviations | 7 | | `AGENT_PLAYBOOK.md` | Instructions for AI tools | 8 (lowest) | | `templates/` | Entry format templates for `ctx add` | (optional) | +| `steering/` | Behavioral rules with YAML frontmatter | (optional) | +| `hooks/` | Lifecycle hook scripts | (optional) | +| `skills/` | Reusable instruction bundles | (optional) | ## Read Order Rationale diff --git a/docs/home/getting-started.md b/docs/home/getting-started.md index 47908be86..c69dfac34 100644 --- a/docs/home/getting-started.md +++ b/docs/home/getting-started.md @@ -226,6 +226,36 @@ For other tools, paste the output of: ctx agent --budget 8000 ``` +### 3b. Set Up for Your AI Tool + +If you use an MCP-compatible tool, generate the integration config +with `ctx setup`: + +=== "Kiro" + + ```bash + ctx setup kiro --write + # Creates .kiro/settings/mcp.json and syncs steering files + ``` + +=== "Cursor" + + ```bash + ctx setup cursor --write + # Creates .cursor/mcp.json and syncs steering files + ``` + +=== "Cline" + + ```bash + ctx setup cline --write + # Creates .vscode/mcp.json and syncs steering files + ``` + +This registers the ctx MCP server and syncs any +[steering files](../cli/tools.md#ctx-steering) into the tool's +native format. Re-run after adding or changing steering files. + ### 4. Verify It Works Ask your AI: **"Do you remember?"** diff --git a/docs/superpowers/plans/2026-03-31-commit-context-tracing.md b/docs/superpowers/plans/2026-03-31-commit-context-tracing.md new file mode 100644 index 000000000..597d1ff77 --- /dev/null +++ b/docs/superpowers/plans/2026-03-31-commit-context-tracing.md @@ -0,0 +1,3508 @@ +# Commit Context Tracing Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Link every git commit back to the decisions, tasks, learnings, and sessions that motivated it via `ctx trace`. + +**Architecture:** New `internal/trace` package provides the core logic (pending context recording, three-source detection, history/override storage, reference resolution). A new `internal/cli/trace` package wires it into the Cobra CLI as `ctx trace`. Existing commands (`ctx add`, `ctx complete`) gain a one-line `trace.Record()` side-effect. A `ctx trace hook` subcommand generates a prepare-commit-msg shell script that delegates to `ctx trace collect`. + +**Tech Stack:** Go, Cobra CLI, JSONL storage, git trailers, prepare-commit-msg hook + +--- + +## File Structure + +### New files + +| File | Responsibility | +|------|---------------| +| `internal/trace/pending.go` | Record/read/truncate pending context refs in `state/pending-context.jsonl` | +| `internal/trace/pending_test.go` | Tests for pending context operations | +| `internal/trace/staged.go` | Detect context refs from staged `.context/` file diffs | +| `internal/trace/staged_test.go` | Tests for staged file analysis | +| `internal/trace/working.go` | Detect context refs from current working state (in-progress tasks, session env) | +| `internal/trace/working_test.go` | Tests for working state detection | +| `internal/trace/collect.go` | Merge + deduplicate refs from all three sources | +| `internal/trace/collect_test.go` | Tests for collection/merge | +| `internal/trace/history.go` | Read/write `trace/history.jsonl` and `trace/overrides.jsonl` | +| `internal/trace/history_test.go` | Tests for history/override storage | +| `internal/trace/resolve.go` | Resolve ref strings to human-readable context (read DECISIONS.md, etc.) | +| `internal/trace/resolve_test.go` | Tests for reference resolution | +| `internal/trace/types.go` | Shared types: `PendingEntry`, `HistoryEntry`, `OverrideEntry`, `Ref`, `ResolvedRef` | +| `internal/trace/doc.go` | Package documentation | +| `internal/cli/trace/trace.go` | Top-level `Cmd()` that returns the `trace` cobra.Command | +| `internal/cli/trace/cmd/show/cmd.go` | `ctx trace ` and `ctx trace --last N` command definition | +| `internal/cli/trace/cmd/show/run.go` | Execution logic for showing commit context | +| `internal/cli/trace/cmd/file/cmd.go` | `ctx trace file ` command definition | +| `internal/cli/trace/cmd/file/run.go` | Execution logic for file tracing | +| `internal/cli/trace/cmd/tag/cmd.go` | `ctx trace tag ` command definition | +| `internal/cli/trace/cmd/tag/run.go` | Execution logic for manual tagging | +| `internal/cli/trace/cmd/collect/cmd.go` | `ctx trace collect` — called by the hook to collect and output trailer | +| `internal/cli/trace/cmd/collect/run.go` | Execution logic for collect | +| `internal/cli/trace/cmd/hook/cmd.go` | `ctx trace hook enable/disable` — manages prepare-commit-msg hook | +| `internal/cli/trace/cmd/hook/run.go` | Hook management logic | +| `internal/config/embed/cmd/trace.go` | Use strings and DescKey constants for trace commands | +| `internal/err/trace/trace.go` | Error constructors for trace operations | +| `internal/err/trace/doc.go` | Package documentation | +| `internal/write/trace/trace.go` | Output formatters for trace results | + +### Modified files + +| File | Change | +|------|--------| +| `internal/cli/add/cmd/root/run.go` | Add `trace.Record()` call after successful write | +| `internal/cli/task/cmd/complete/run.go` | Add `trace.Record()` call after marking complete | +| `internal/bootstrap/group.go` | Register `trace.Cmd` in the diagnostics group | +| `internal/config/embed/cmd/base.go` | Add `UseTrace` and `DescKeyTrace` constants | +| `internal/config/dir/dir.go` | Add `Trace = "trace"` constant | +| `internal/assets/commands/commands.yaml` | Add trace command descriptions | +| `internal/assets/commands/text/write.yaml` | Add trace output format strings | + +--- + +## Task 1: Core Types and Pending Context Recording + +**Files:** +- Create: `internal/trace/doc.go` +- Create: `internal/trace/types.go` +- Create: `internal/trace/pending.go` +- Create: `internal/trace/pending_test.go` +- Modify: `internal/config/dir/dir.go` + +### Steps + +- [ ] **Step 1: Add Trace directory constant** + +In `internal/config/dir/dir.go`, add the `Trace` constant: + +```go +// Trace is the subdirectory for commit context tracing within .context/. +Trace = "trace" +``` + +Add it after the `State` constant in the same `const` block. + +- [ ] **Step 2: Create trace package doc** + +Create `internal/trace/doc.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides commit context tracing — linking git commits +// back to the decisions, tasks, learnings, and sessions that motivated them. +package trace +``` + +- [ ] **Step 3: Create shared types** + +Create `internal/trace/types.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import "time" + +// PendingEntry is a single pending context reference accumulated +// between commits. +type PendingEntry struct { + Ref string `json:"ref"` + Timestamp time.Time `json:"timestamp"` +} + +// HistoryEntry is a permanent record of a commit's context references. +type HistoryEntry struct { + Commit string `json:"commit"` + Refs []string `json:"refs"` + Message string `json:"message"` + Timestamp time.Time `json:"timestamp"` +} + +// OverrideEntry is a manual context tag added to an existing commit. +type OverrideEntry struct { + Commit string `json:"commit"` + Refs []string `json:"refs"` + Timestamp time.Time `json:"timestamp"` +} + +// ResolvedRef holds a resolved context reference with its display text. +type ResolvedRef struct { + Raw string // Original ref string (e.g., "decision:12") + Type string // "decision", "learning", "task", "convention", "session", "note" + Number int // Entry number (0 for session/note types) + Title string // Resolved title or content + Detail string // Additional detail (rationale, status, etc.) + Found bool // Whether the reference was resolved +} +``` + +- [ ] **Step 4: Write failing test for Record** + +Create `internal/trace/pending_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" +) + +func TestRecord(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + if err := Record("decision:1", stateDir); err != nil { + t.Fatalf("Record failed: %v", err) + } + + data, err := os.ReadFile(filepath.Join(stateDir, pendingFile)) + if err != nil { + t.Fatalf("read pending file: %v", err) + } + + var entry PendingEntry + if err := json.Unmarshal(data, &entry); err != nil { + t.Fatalf("unmarshal: %v", err) + } + + if entry.Ref != "decision:1" { + t.Errorf("got ref %q, want %q", entry.Ref, "decision:1") + } + if entry.Timestamp.IsZero() { + t.Error("timestamp should not be zero") + } +} + +func TestRecordMultiple(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + _ = Record("decision:1", stateDir) + _ = Record("task:3", stateDir) + _ = Record("session:abc123", stateDir) + + data, err := os.ReadFile(filepath.Join(stateDir, pendingFile)) + if err != nil { + t.Fatal(err) + } + + lines := strings.Split(strings.TrimSpace(string(data)), "\n") + if len(lines) != 3 { + t.Fatalf("expected 3 lines, got %d", len(lines)) + } +} + +func TestReadPending(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + _ = Record("decision:1", stateDir) + _ = Record("task:3", stateDir) + + entries, err := ReadPending(stateDir) + if err != nil { + t.Fatalf("ReadPending: %v", err) + } + + if len(entries) != 2 { + t.Fatalf("expected 2 entries, got %d", len(entries)) + } + if entries[0].Ref != "decision:1" { + t.Errorf("first ref: got %q, want %q", entries[0].Ref, "decision:1") + } + if entries[1].Ref != "task:3" { + t.Errorf("second ref: got %q, want %q", entries[1].Ref, "task:3") + } +} + +func TestReadPendingEmpty(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + entries, err := ReadPending(stateDir) + if err != nil { + t.Fatalf("ReadPending on missing file: %v", err) + } + if len(entries) != 0 { + t.Errorf("expected 0 entries, got %d", len(entries)) + } +} + +func TestTruncatePending(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + _ = Record("decision:1", stateDir) + _ = Record("task:3", stateDir) + + if err := TruncatePending(stateDir); err != nil { + t.Fatalf("TruncatePending: %v", err) + } + + entries, err := ReadPending(stateDir) + if err != nil { + t.Fatal(err) + } + if len(entries) != 0 { + t.Errorf("expected 0 entries after truncate, got %d", len(entries)) + } +} +``` + +- [ ] **Step 5: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestRecord -v` +Expected: FAIL — functions not defined + +- [ ] **Step 6: Implement pending.go** + +Create `internal/trace/pending.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "bufio" + "encoding/json" + "os" + "path/filepath" + "time" + + "github.com/ActiveMemory/ctx/internal/config/fs" +) + +const pendingFile = "pending-context.jsonl" + +// Record appends a context reference to the pending context file. +// This is best-effort: errors are returned but callers should treat +// them as non-fatal. +// +// Parameters: +// - ref: Context reference string (e.g., "decision:12", "task:3") +// - stateDir: Path to the state directory (.context/state/) +// +// Returns: +// - error: Non-nil if the file cannot be opened or written +func Record(ref, stateDir string) error { + if err := os.MkdirAll(stateDir, fs.PermRestrictedDir); err != nil { + return err + } + + p := filepath.Join(stateDir, pendingFile) + + f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, fs.PermFile) + if err != nil { + return err + } + defer f.Close() + + entry := PendingEntry{Ref: ref, Timestamp: time.Now().UTC()} + return json.NewEncoder(f).Encode(entry) +} + +// ReadPending reads all pending context entries from the state directory. +// Returns an empty slice if the file does not exist. +// +// Parameters: +// - stateDir: Path to the state directory (.context/state/) +// +// Returns: +// - []PendingEntry: Parsed entries +// - error: Non-nil on read or parse failure +func ReadPending(stateDir string) ([]PendingEntry, error) { + p := filepath.Join(stateDir, pendingFile) + + f, err := os.Open(filepath.Clean(p)) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + defer f.Close() + + var entries []PendingEntry + scanner := bufio.NewScanner(f) + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + var entry PendingEntry + if jsonErr := json.Unmarshal([]byte(line), &entry); jsonErr != nil { + continue // skip malformed lines + } + entries = append(entries, entry) + } + + return entries, scanner.Err() +} + +// TruncatePending clears the pending context file after a commit. +// +// Parameters: +// - stateDir: Path to the state directory (.context/state/) +// +// Returns: +// - error: Non-nil if truncation fails +func TruncatePending(stateDir string) error { + p := filepath.Join(stateDir, pendingFile) + return os.Truncate(p, 0) +} +``` + +- [ ] **Step 7: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -v` +Expected: All PASS + +- [ ] **Step 8: Commit** + +```bash +git add internal/trace/doc.go internal/trace/types.go internal/trace/pending.go internal/trace/pending_test.go internal/config/dir/dir.go +git commit -m "feat(trace): add pending context recording" +``` + +--- + +## Task 2: History and Override Storage + +**Files:** +- Create: `internal/trace/history.go` +- Create: `internal/trace/history_test.go` + +### Steps + +- [ ] **Step 1: Write failing test for history operations** + +Create `internal/trace/history_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "os" + "path/filepath" + "testing" +) + +func TestWriteHistory(t *testing.T) { + tmpDir := t.TempDir() + traceDir := filepath.Join(tmpDir, "trace") + if err := os.MkdirAll(traceDir, 0750); err != nil { + t.Fatal(err) + } + + entry := HistoryEntry{ + Commit: "abc123", + Refs: []string{"decision:12", "task:8"}, + Message: "Fix auth token expiry", + } + + if err := WriteHistory(entry, traceDir); err != nil { + t.Fatalf("WriteHistory: %v", err) + } + + entries, err := ReadHistory(traceDir) + if err != nil { + t.Fatalf("ReadHistory: %v", err) + } + + if len(entries) != 1 { + t.Fatalf("expected 1 entry, got %d", len(entries)) + } + if entries[0].Commit != "abc123" { + t.Errorf("commit: got %q, want %q", entries[0].Commit, "abc123") + } + if len(entries[0].Refs) != 2 { + t.Errorf("refs count: got %d, want 2", len(entries[0].Refs)) + } +} + +func TestReadHistoryForCommit(t *testing.T) { + tmpDir := t.TempDir() + traceDir := filepath.Join(tmpDir, "trace") + if err := os.MkdirAll(traceDir, 0750); err != nil { + t.Fatal(err) + } + + _ = WriteHistory(HistoryEntry{ + Commit: "abc123", Refs: []string{"decision:12"}, Message: "First", + }, traceDir) + _ = WriteHistory(HistoryEntry{ + Commit: "def456", Refs: []string{"task:3"}, Message: "Second", + }, traceDir) + + entry, found := ReadHistoryForCommit("abc123", traceDir) + if !found { + t.Fatal("expected to find commit abc123") + } + if entry.Commit != "abc123" { + t.Errorf("got commit %q", entry.Commit) + } + + _, found = ReadHistoryForCommit("missing", traceDir) + if found { + t.Error("should not find missing commit") + } +} + +func TestWriteOverride(t *testing.T) { + tmpDir := t.TempDir() + traceDir := filepath.Join(tmpDir, "trace") + if err := os.MkdirAll(traceDir, 0750); err != nil { + t.Fatal(err) + } + + entry := OverrideEntry{ + Commit: "abc123", + Refs: []string{`"Hotfix for production outage"`}, + } + + if err := WriteOverride(entry, traceDir); err != nil { + t.Fatalf("WriteOverride: %v", err) + } + + entries, err := ReadOverrides(traceDir) + if err != nil { + t.Fatalf("ReadOverrides: %v", err) + } + + if len(entries) != 1 { + t.Fatalf("expected 1 entry, got %d", len(entries)) + } + if entries[0].Commit != "abc123" { + t.Errorf("commit: got %q", entries[0].Commit) + } +} + +func TestReadOverridesForCommit(t *testing.T) { + tmpDir := t.TempDir() + traceDir := filepath.Join(tmpDir, "trace") + if err := os.MkdirAll(traceDir, 0750); err != nil { + t.Fatal(err) + } + + _ = WriteOverride(OverrideEntry{ + Commit: "abc123", Refs: []string{`"Note one"`}, + }, traceDir) + _ = WriteOverride(OverrideEntry{ + Commit: "abc123", Refs: []string{`"Note two"`}, + }, traceDir) + _ = WriteOverride(OverrideEntry{ + Commit: "def456", Refs: []string{"decision:5"}, + }, traceDir) + + refs := ReadOverridesForCommit("abc123", traceDir) + if len(refs) != 2 { + t.Fatalf("expected 2 override refs for abc123, got %d", len(refs)) + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestWriteHistory -v` +Expected: FAIL — functions not defined + +- [ ] **Step 3: Implement history.go** + +Create `internal/trace/history.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "bufio" + "encoding/json" + "os" + "path/filepath" + "strings" + "time" + + "github.com/ActiveMemory/ctx/internal/config/fs" +) + +const ( + historyFile = "history.jsonl" + overrideFile = "overrides.jsonl" +) + +// WriteHistory appends a commit context record to history.jsonl. +// +// Parameters: +// - entry: The history entry to write +// - traceDir: Path to the trace directory (.context/trace/) +// +// Returns: +// - error: Non-nil if the file cannot be opened or written +func WriteHistory(entry HistoryEntry, traceDir string) error { + if err := os.MkdirAll(traceDir, fs.PermRestrictedDir); err != nil { + return err + } + + if entry.Timestamp.IsZero() { + entry.Timestamp = time.Now().UTC() + } + + p := filepath.Join(traceDir, historyFile) + f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, fs.PermFile) + if err != nil { + return err + } + defer f.Close() + + return json.NewEncoder(f).Encode(entry) +} + +// ReadHistory reads all history entries from the trace directory. +// +// Parameters: +// - traceDir: Path to the trace directory (.context/trace/) +// +// Returns: +// - []HistoryEntry: Parsed entries (may be empty) +// - error: Non-nil on read or parse failure +func ReadHistory(traceDir string) ([]HistoryEntry, error) { + return readJSONL[HistoryEntry](filepath.Join(traceDir, historyFile)) +} + +// ReadHistoryForCommit finds the history entry for a specific commit. +// Matches by prefix to support short commit hashes. +// +// Parameters: +// - commitHash: Full or abbreviated commit hash +// - traceDir: Path to the trace directory +// +// Returns: +// - HistoryEntry: The matching entry +// - bool: True if found +func ReadHistoryForCommit(commitHash, traceDir string) (HistoryEntry, bool) { + entries, err := ReadHistory(traceDir) + if err != nil { + return HistoryEntry{}, false + } + + for _, e := range entries { + if strings.HasPrefix(e.Commit, commitHash) || strings.HasPrefix(commitHash, e.Commit) { + return e, true + } + } + return HistoryEntry{}, false +} + +// WriteOverride appends a manual tag entry to overrides.jsonl. +// +// Parameters: +// - entry: The override entry to write +// - traceDir: Path to the trace directory (.context/trace/) +// +// Returns: +// - error: Non-nil if the file cannot be opened or written +func WriteOverride(entry OverrideEntry, traceDir string) error { + if err := os.MkdirAll(traceDir, fs.PermRestrictedDir); err != nil { + return err + } + + if entry.Timestamp.IsZero() { + entry.Timestamp = time.Now().UTC() + } + + p := filepath.Join(traceDir, overrideFile) + f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, fs.PermFile) + if err != nil { + return err + } + defer f.Close() + + return json.NewEncoder(f).Encode(entry) +} + +// ReadOverrides reads all override entries from the trace directory. +// +// Parameters: +// - traceDir: Path to the trace directory (.context/trace/) +// +// Returns: +// - []OverrideEntry: Parsed entries (may be empty) +// - error: Non-nil on read or parse failure +func ReadOverrides(traceDir string) ([]OverrideEntry, error) { + return readJSONL[OverrideEntry](filepath.Join(traceDir, overrideFile)) +} + +// ReadOverridesForCommit collects all override refs for a specific commit. +// +// Parameters: +// - commitHash: Full or abbreviated commit hash +// - traceDir: Path to the trace directory +// +// Returns: +// - []string: All override refs for this commit +func ReadOverridesForCommit(commitHash, traceDir string) []string { + entries, err := ReadOverrides(traceDir) + if err != nil { + return nil + } + + var refs []string + for _, e := range entries { + if strings.HasPrefix(e.Commit, commitHash) || strings.HasPrefix(commitHash, e.Commit) { + refs = append(refs, e.Refs...) + } + } + return refs +} + +// readJSONL is a generic helper for reading JSONL files. +func readJSONL[T any](path string) ([]T, error) { + f, err := os.Open(filepath.Clean(path)) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + defer f.Close() + + var entries []T + scanner := bufio.NewScanner(f) + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + var entry T + if jsonErr := json.Unmarshal([]byte(line), &entry); jsonErr != nil { + continue + } + entries = append(entries, entry) + } + + return entries, scanner.Err() +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/trace/history.go internal/trace/history_test.go +git commit -m "feat(trace): add history and override storage" +``` + +--- + +## Task 3: Staged File Analysis (Source 2) + +**Files:** +- Create: `internal/trace/staged.go` +- Create: `internal/trace/staged_test.go` + +### Steps + +- [ ] **Step 1: Write failing test for staged detection** + +Create `internal/trace/staged_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import "testing" + +func TestParseAddedDecisions(t *testing.T) { + diff := `+## [2026-03-14-100000] Use short-lived tokens ++ ++**Context:** Security review ++ ++## [2026-03-14-110000] Rate limiting strategy` + + refs := ParseAddedEntries(diff, "decision") + if len(refs) != 2 { + t.Fatalf("expected 2 refs, got %d", len(refs)) + } + if refs[0] != "decision:1" || refs[1] != "decision:2" { + t.Errorf("got refs %v", refs) + } +} + +func TestParseAddedLearnings(t *testing.T) { + diff := `+## [2026-03-14-100000] Always check for nil + ## [2026-03-01-090000] Existing learning` + + refs := ParseAddedEntries(diff, "learning") + if len(refs) != 1 { + t.Fatalf("expected 1 ref, got %d", len(refs)) + } + if refs[0] != "learning:1" { + t.Errorf("got ref %q, want %q", refs[0], "learning:1") + } +} + +func TestParseAddedTasks(t *testing.T) { + diff := `+- [x] Implement auth handler #done:2026-03-14-100000 + - [ ] Write tests ++- [x] Add rate limiting #done:2026-03-14-110000` + + refs := ParseCompletedTasks(diff) + if len(refs) != 2 { + t.Fatalf("expected 2 refs, got %d", len(refs)) + } +} + +func TestParseNoAdditions(t *testing.T) { + diff := ` ## [2026-03-01-090000] Existing entry + - [ ] Existing task` + + refs := ParseAddedEntries(diff, "decision") + if len(refs) != 0 { + t.Errorf("expected 0 refs, got %d", len(refs)) + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestParseAdded -v` +Expected: FAIL — functions not defined + +- [ ] **Step 3: Implement staged.go** + +Create `internal/trace/staged.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/ActiveMemory/ctx/internal/config/ctx" + "github.com/ActiveMemory/ctx/internal/config/regex" +) + +// StagedRefs detects context references from staged .context/ files +// by examining git diff output. +// +// Parameters: +// - contextDir: Path to the .context/ directory +// +// Returns: +// - []string: Detected references (e.g., "decision:1", "task:3") +func StagedRefs(contextDir string) []string { + var refs []string + + files := []struct { + name string + entryType string + }{ + {ctx.Decision, "decision"}, + {ctx.Learning, "learning"}, + {ctx.Convention, "convention"}, + } + + for _, f := range files { + diff := stagedDiff(filepath.Join(contextDir, f.name)) + if diff == "" { + continue + } + refs = append(refs, ParseAddedEntries(diff, f.entryType)...) + } + + // Check TASKS.md for newly completed tasks + taskDiff := stagedDiff(filepath.Join(contextDir, ctx.Task)) + if taskDiff != "" { + refs = append(refs, ParseCompletedTasks(taskDiff)...) + } + + return refs +} + +// ParseAddedEntries extracts entry numbers from added lines in a diff. +// Only lines prefixed with "+" that match the entry header pattern are counted. +// +// Parameters: +// - diff: Git diff output +// - entryType: The reference type prefix ("decision", "learning", "convention") +// +// Returns: +// - []string: Refs like "decision:1", "decision:2" +func ParseAddedEntries(diff, entryType string) []string { + var refs []string + count := 0 + + for _, line := range strings.Split(diff, "\n") { + if !strings.HasPrefix(line, "+") { + continue + } + // Remove the leading "+" to match the regex + content := line[1:] + if regex.EntryHeader.MatchString(content) { + count++ + refs = append(refs, fmt.Sprintf("%s:%d", entryType, count)) + } + } + + return refs +} + +// ParseCompletedTasks extracts task refs from newly completed tasks +// in a diff. Lines that are added ("+") and contain "[x]" are counted. +// +// Parameters: +// - diff: Git diff output for TASKS.md +// +// Returns: +// - []string: Refs like "task:1", "task:2" +func ParseCompletedTasks(diff string) []string { + var refs []string + count := 0 + + for _, line := range strings.Split(diff, "\n") { + if !strings.HasPrefix(line, "+") { + continue + } + content := line[1:] + match := regex.Task.FindStringSubmatch(content) + if match != nil && (len(match) > 2 && match[2] == "x") { + count++ + refs = append(refs, fmt.Sprintf("task:%d", count)) + } + } + + return refs +} + +// stagedDiff returns the staged diff for a specific file. +// Returns empty string if the file is not staged or git is not available. +func stagedDiff(filePath string) string { + cmd := exec.Command("git", "diff", "--cached", "--", filePath) + out, err := cmd.Output() + if err != nil { + return "" + } + return string(out) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run "TestParseAdded|TestParseCompleted|TestParseNo" -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/trace/staged.go internal/trace/staged_test.go +git commit -m "feat(trace): add staged file analysis for context detection" +``` + +--- + +## Task 4: Working State Detection (Source 3) + +**Files:** +- Create: `internal/trace/working.go` +- Create: `internal/trace/working_test.go` + +### Steps + +- [ ] **Step 1: Write failing test for working state** + +Create `internal/trace/working_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "os" + "path/filepath" + "testing" +) + +func TestWorkingRefsInProgressTasks(t *testing.T) { + tmpDir := t.TempDir() + contextDir := tmpDir + + tasksContent := `# Tasks + +- [ ] Implement auth handler +- [x] Write unit tests +- [ ] Add rate limiting +` + if err := os.WriteFile( + filepath.Join(contextDir, "TASKS.md"), + []byte(tasksContent), 0644, + ); err != nil { + t.Fatal(err) + } + + refs := WorkingRefs(contextDir) + + // Should find 2 in-progress tasks: task:1 and task:2 + found := map[string]bool{} + for _, r := range refs { + found[r] = true + } + + if !found["task:1"] { + t.Error("expected task:1 for 'Implement auth handler'") + } + if !found["task:2"] { + t.Error("expected task:2 for 'Add rate limiting'") + } + if found["task:3"] { + t.Error("should not find task:3 — completed tasks are excluded") + } +} + +func TestWorkingRefsSessionEnv(t *testing.T) { + tmpDir := t.TempDir() + contextDir := tmpDir + + // Write empty TASKS.md + if err := os.WriteFile( + filepath.Join(contextDir, "TASKS.md"), + []byte("# Tasks\n"), 0644, + ); err != nil { + t.Fatal(err) + } + + t.Setenv("CTX_SESSION_ID", "test-session-42") + + refs := WorkingRefs(contextDir) + + found := false + for _, r := range refs { + if r == "session:test-session-42" { + found = true + } + } + if !found { + t.Error("expected session:test-session-42 from env") + } +} + +func TestWorkingRefsNoTasksFile(t *testing.T) { + tmpDir := t.TempDir() + refs := WorkingRefs(tmpDir) + + // No TASKS.md should not panic, just return empty or session-only + _ = refs +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestWorkingRefs -v` +Expected: FAIL — function not defined + +- [ ] **Step 3: Implement working.go** + +Create `internal/trace/working.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + ctxCfg "github.com/ActiveMemory/ctx/internal/config/ctx" + "github.com/ActiveMemory/ctx/internal/config/regex" + "github.com/ActiveMemory/ctx/internal/task" +) + +const envSessionID = "CTX_SESSION_ID" + +// WorkingRefs detects context references from the current working state. +// This includes in-progress tasks and the active AI session. +// +// Parameters: +// - contextDir: Path to the .context/ directory +// +// Returns: +// - []string: Detected references +func WorkingRefs(contextDir string) []string { + var refs []string + + refs = append(refs, inProgressTaskRefs(contextDir)...) + + if sessionID := os.Getenv(envSessionID); sessionID != "" { + refs = append(refs, "session:"+sessionID) + } + + return refs +} + +// inProgressTaskRefs reads TASKS.md and returns refs for in-progress +// (pending, non-subtask) tasks. +func inProgressTaskRefs(contextDir string) []string { + tasksPath := filepath.Join(contextDir, ctxCfg.Task) + content, err := os.ReadFile(filepath.Clean(tasksPath)) + if err != nil { + return nil + } + + var refs []string + pendingCount := 0 + lines := strings.Split(string(content), "\n") + + for _, line := range lines { + match := regex.Task.FindStringSubmatch(line) + if match == nil { + continue + } + if task.Sub(match) { + continue // skip subtasks + } + if task.Pending(match) { + pendingCount++ + refs = append(refs, fmt.Sprintf("task:%d", pendingCount)) + } + } + + return refs +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestWorkingRefs -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/trace/working.go internal/trace/working_test.go +git commit -m "feat(trace): add working state detection for in-progress tasks and sessions" +``` + +--- + +## Task 5: Collect — Merge and Deduplicate from All Sources + +**Files:** +- Create: `internal/trace/collect.go` +- Create: `internal/trace/collect_test.go` + +### Steps + +- [ ] **Step 1: Write failing test for Collect** + +Create `internal/trace/collect_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "os" + "path/filepath" + "testing" +) + +func TestCollectDeduplicates(t *testing.T) { + tmpDir := t.TempDir() + contextDir := tmpDir + stateDir := filepath.Join(contextDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + // Write TASKS.md with one in-progress task + if err := os.WriteFile( + filepath.Join(contextDir, "TASKS.md"), + []byte("# Tasks\n\n- [ ] Implement auth handler\n"), 0644, + ); err != nil { + t.Fatal(err) + } + + // Record the same task in pending context + _ = Record("task:1", stateDir) + // And a decision + _ = Record("decision:5", stateDir) + + refs := Collect(contextDir) + + // task:1 appears in both pending and working state — should be deduplicated + taskCount := 0 + decisionCount := 0 + for _, r := range refs { + if r == "task:1" { + taskCount++ + } + if r == "decision:5" { + decisionCount++ + } + } + + if taskCount != 1 { + t.Errorf("task:1 should appear exactly once, got %d", taskCount) + } + if decisionCount != 1 { + t.Errorf("decision:5 should appear exactly once, got %d", decisionCount) + } +} + +func TestCollectEmptyReturnsNil(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + + // Empty TASKS.md, no pending context + if err := os.WriteFile( + filepath.Join(tmpDir, "TASKS.md"), + []byte("# Tasks\n"), 0644, + ); err != nil { + t.Fatal(err) + } + + refs := Collect(tmpDir) + if len(refs) != 0 { + t.Errorf("expected empty refs, got %v", refs) + } +} + +func TestFormatTrailer(t *testing.T) { + refs := []string{"decision:12", "task:8", "session:abc123"} + trailer := FormatTrailer(refs) + want := "ctx-context: decision:12, task:8, session:abc123" + if trailer != want { + t.Errorf("got %q, want %q", trailer, want) + } +} + +func TestFormatTrailerEmpty(t *testing.T) { + trailer := FormatTrailer(nil) + if trailer != "" { + t.Errorf("expected empty trailer, got %q", trailer) + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestCollect -v` +Expected: FAIL — functions not defined + +- [ ] **Step 3: Implement collect.go** + +Create `internal/trace/collect.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "path/filepath" + "strings" + + "github.com/ActiveMemory/ctx/internal/config/dir" +) + +const trailerKey = "ctx-context" + +// Collect gathers context references from all three sources +// (pending, staged, working state), merges, and deduplicates them. +// +// Parameters: +// - contextDir: Path to the .context/ directory +// +// Returns: +// - []string: Deduplicated context references +func Collect(contextDir string) []string { + stateDir := filepath.Join(contextDir, dir.State) + + var all []string + + // Source 1: Pending context + pending, _ := ReadPending(stateDir) + for _, p := range pending { + all = append(all, p.Ref) + } + + // Source 2: Staged file analysis + all = append(all, StagedRefs(contextDir)...) + + // Source 3: Current working state + all = append(all, WorkingRefs(contextDir)...) + + return deduplicate(all) +} + +// FormatTrailer formats refs as a git commit trailer string. +// Returns empty string if refs is empty. +// +// Parameters: +// - refs: Context references to include +// +// Returns: +// - string: Formatted trailer line (e.g., "ctx-context: decision:12, task:8") +func FormatTrailer(refs []string) string { + if len(refs) == 0 { + return "" + } + return trailerKey + ": " + strings.Join(refs, ", ") +} + +// deduplicate removes duplicate refs while preserving order. +func deduplicate(refs []string) []string { + seen := make(map[string]bool, len(refs)) + var result []string + for _, r := range refs { + if !seen[r] { + seen[r] = true + result = append(result, r) + } + } + return result +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/trace/collect.go internal/trace/collect_test.go +git commit -m "feat(trace): add three-source collection with deduplication" +``` + +--- + +## Task 6: Reference Resolution + +**Files:** +- Create: `internal/trace/resolve.go` +- Create: `internal/trace/resolve_test.go` + +### Steps + +- [ ] **Step 1: Write failing test for resolution** + +Create `internal/trace/resolve_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "os" + "path/filepath" + "testing" +) + +func TestParseRef(t *testing.T) { + tests := []struct { + input string + wantType string + wantNum int + wantText string + }{ + {"decision:12", "decision", 12, ""}, + {"learning:5", "learning", 5, ""}, + {"task:8", "task", 8, ""}, + {"convention:3", "convention", 3, ""}, + {"session:abc123", "session", 0, "abc123"}, + {`"Hotfix for prod outage"`, "note", 0, "Hotfix for prod outage"}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + typ, num, text := ParseRef(tt.input) + if typ != tt.wantType { + t.Errorf("type: got %q, want %q", typ, tt.wantType) + } + if num != tt.wantNum { + t.Errorf("number: got %d, want %d", num, tt.wantNum) + } + if text != tt.wantText { + t.Errorf("text: got %q, want %q", text, tt.wantText) + } + }) + } +} + +func TestResolveDecision(t *testing.T) { + tmpDir := t.TempDir() + contextDir := tmpDir + + decisionsContent := `# Decisions + +## [2026-03-10-100000] Use short-lived tokens + +**Context:** Security review needed a token strategy. + +**Rationale:** Short-lived tokens reduce blast radius of token theft. + +**Consequences:** Need refresh token handling. + +## [2026-03-01-090000] Use PostgreSQL + +**Context:** Database selection. + +**Rationale:** Well-supported. + +**Consequences:** Team needs training. +` + if err := os.WriteFile( + filepath.Join(contextDir, "DECISIONS.md"), + []byte(decisionsContent), 0644, + ); err != nil { + t.Fatal(err) + } + + resolved := Resolve("decision:1", contextDir) + if !resolved.Found { + t.Fatal("expected to resolve decision:1") + } + if resolved.Title != "Use short-lived tokens" { + t.Errorf("title: got %q", resolved.Title) + } + if resolved.Type != "decision" { + t.Errorf("type: got %q", resolved.Type) + } +} + +func TestResolveTask(t *testing.T) { + tmpDir := t.TempDir() + contextDir := tmpDir + + tasksContent := `# Tasks + +- [ ] Implement auth handler +- [x] Write unit tests +- [ ] Add rate limiting +` + if err := os.WriteFile( + filepath.Join(contextDir, "TASKS.md"), + []byte(tasksContent), 0644, + ); err != nil { + t.Fatal(err) + } + + resolved := Resolve("task:1", contextDir) + if !resolved.Found { + t.Fatal("expected to resolve task:1") + } + if resolved.Title != "Implement auth handler" { + t.Errorf("title: got %q", resolved.Title) + } +} + +func TestResolveNotFound(t *testing.T) { + tmpDir := t.TempDir() + resolved := Resolve("decision:999", tmpDir) + if resolved.Found { + t.Error("should not resolve decision:999") + } +} + +func TestResolveNote(t *testing.T) { + tmpDir := t.TempDir() + resolved := Resolve(`"Hotfix for production outage"`, tmpDir) + if !resolved.Found { + t.Fatal("notes should always resolve") + } + if resolved.Title != "Hotfix for production outage" { + t.Errorf("title: got %q", resolved.Title) + } +} +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run TestParseRef -v` +Expected: FAIL — functions not defined + +- [ ] **Step 3: Implement resolve.go** + +Create `internal/trace/resolve.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + + ctxCfg "github.com/ActiveMemory/ctx/internal/config/ctx" + "github.com/ActiveMemory/ctx/internal/config/regex" + "github.com/ActiveMemory/ctx/internal/index" + "github.com/ActiveMemory/ctx/internal/task" +) + +// ParseRef breaks a reference string into its type, number, and text. +// +// Examples: +// - "decision:12" → ("decision", 12, "") +// - "session:abc" → ("session", 0, "abc") +// - "\"Some note\"" → ("note", 0, "Some note") +// +// Parameters: +// - ref: Raw reference string +// +// Returns: +// - refType: "decision", "learning", "task", "convention", "session", or "note" +// - number: Entry number (0 for session/note) +// - text: Session ID or note text (empty for numbered entries) +func ParseRef(ref string) (refType string, number int, text string) { + // Check for quoted free-form note + if strings.HasPrefix(ref, `"`) && strings.HasSuffix(ref, `"`) { + return "note", 0, strings.Trim(ref, `"`) + } + + parts := strings.SplitN(ref, ":", 2) + if len(parts) != 2 { + return "note", 0, ref + } + + refType = parts[0] + value := parts[1] + + if num, err := strconv.Atoi(value); err == nil { + return refType, num, "" + } + + return refType, 0, value +} + +// Resolve looks up a reference and returns its resolved form. +// +// Parameters: +// - ref: Raw reference string +// - contextDir: Path to the .context/ directory +// +// Returns: +// - ResolvedRef: Resolved reference with title and detail +func Resolve(ref, contextDir string) ResolvedRef { + refType, number, text := ParseRef(ref) + + resolved := ResolvedRef{ + Raw: ref, + Type: refType, + Number: number, + } + + switch refType { + case "decision": + return resolveEntry(resolved, contextDir, ctxCfg.Decision, number) + case "learning": + return resolveEntry(resolved, contextDir, ctxCfg.Learning, number) + case "convention": + return resolveEntry(resolved, contextDir, ctxCfg.Convention, number) + case "task": + return resolveTask(resolved, contextDir, number) + case "session": + resolved.Title = text + resolved.Found = true + return resolved + case "note": + resolved.Title = text + resolved.Found = true + return resolved + default: + resolved.Title = ref + return resolved + } +} + +// resolveEntry resolves a numbered entry from a context file +// (DECISIONS.md, LEARNINGS.md, CONVENTIONS.md). +func resolveEntry(resolved ResolvedRef, contextDir, fileName string, number int) ResolvedRef { + filePath := filepath.Join(contextDir, fileName) + content, err := os.ReadFile(filepath.Clean(filePath)) + if err != nil { + return resolved + } + + entries := index.ParseHeaders(string(content)) + if number < 1 || number > len(entries) { + return resolved + } + + entry := entries[number-1] + resolved.Title = entry.Title + resolved.Detail = fmt.Sprintf("Date: %s", entry.Date) + resolved.Found = true + + return resolved +} + +// resolveTask resolves a task number from TASKS.md. +// Task numbers count only top-level pending tasks in file order. +func resolveTask(resolved ResolvedRef, contextDir string, number int) ResolvedRef { + filePath := filepath.Join(contextDir, ctxCfg.Task) + content, err := os.ReadFile(filepath.Clean(filePath)) + if err != nil { + return resolved + } + + lines := strings.Split(string(content), "\n") + count := 0 + + for _, line := range lines { + match := regex.Task.FindStringSubmatch(line) + if match == nil { + continue + } + + // Count all top-level tasks (both pending and completed) + if !task.Sub(match) { + count++ + if count == number { + resolved.Title = task.Content(match) + if task.Completed(match) { + resolved.Detail = "Status: completed" + } else { + resolved.Detail = "Status: pending" + } + resolved.Found = true + return resolved + } + } + } + + return resolved +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /Users/parlakisik/projects/github/ctx && go test ./internal/trace/ -run "TestParseRef|TestResolve" -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/trace/resolve.go internal/trace/resolve_test.go +git commit -m "feat(trace): add reference resolution from context files" +``` + +--- + +## Task 7: Wire Recording into Existing Commands + +**Files:** +- Modify: `internal/cli/add/cmd/root/run.go` +- Modify: `internal/cli/task/cmd/complete/run.go` + +### Steps + +- [ ] **Step 1: Understand entry numbering for add command** + +The `ctx add` command does not return an entry number. Since entries are prepended (newest first), a newly added decision is always entry #1 in the file. We need to count entries after write to determine the new entry's number. + +- [ ] **Step 2: Modify add command to record pending context** + +In `internal/cli/add/cmd/root/run.go`, add the trace recording after the successful write. The entry number is determined by counting entries in the file after write, and the new entry is always #1 (prepended for decisions/learnings) or the last entry (appended for tasks/conventions). + +Add import: + +```go +"github.com/ActiveMemory/ctx/internal/trace" +"github.com/ActiveMemory/ctx/internal/cli/system/core/state" +``` + +After `writeAdd.Added(cmd, fName)` and before `return nil`, add: + +```go + // Best-effort: record pending context for commit tracing. + // Decisions and learnings are prepended (newest = #1). + // Tasks and conventions are appended (newest = last). + if fType == cfgEntry.Decision || fType == cfgEntry.Learning || + fType == cfgEntry.Convention { + _ = trace.Record(fType+":1", state.Dir()) + } +``` + +Note: We record as entry #1 for prepended types because new entries are always inserted at the top. For tasks, recording happens in the `complete` command instead, since tasks are tracked by completion, not creation. + +- [ ] **Step 3: Modify complete command to record pending context** + +In `internal/cli/task/cmd/complete/run.go`, add trace recording after a successful completion. + +Add import: + +```go +"github.com/ActiveMemory/ctx/internal/trace" +"github.com/ActiveMemory/ctx/internal/cli/system/core/state" +``` + +In the `Run` function, after `complete.Completed(cmd, matchedTask)` and before `return nil`, add: + +```go + // Best-effort: record pending context for commit tracing. + _ = trace.Record("task:"+args[0], state.Dir()) +``` + +- [ ] **Step 4: Run existing tests to verify no regressions** + +Run: `cd /Users/parlakisik/projects/github/ctx && CTX_SKIP_PATH_CHECK=1 go test ./internal/cli/add/ ./internal/cli/task/... -v` +Expected: All PASS + +- [ ] **Step 5: Commit** + +```bash +git add internal/cli/add/cmd/root/run.go internal/cli/task/cmd/complete/run.go +git commit -m "feat(trace): wire pending context recording into add and complete commands" +``` + +--- + +## Task 8: CLI — `ctx trace` Command Structure + +**Files:** +- Create: `internal/cli/trace/trace.go` +- Create: `internal/cli/trace/cmd/show/cmd.go` +- Create: `internal/cli/trace/cmd/show/run.go` +- Create: `internal/config/embed/cmd/trace.go` +- Modify: `internal/config/embed/cmd/base.go` +- Modify: `internal/bootstrap/group.go` +- Modify: `internal/assets/commands/commands.yaml` + +### Steps + +- [ ] **Step 1: Add trace Use and DescKey constants** + +Create `internal/config/embed/cmd/trace.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +const UseTrace = "trace [commit]" + +const ( + DescKeyTrace = "trace" + DescKeyTraceFile = "trace.file" + DescKeyTraceTag = "trace.tag" + DescKeyTraceCollect = "trace.collect" + DescKeyTraceHook = "trace.hook" +) +``` + +- [ ] **Step 2: Add trace command descriptions to commands.yaml** + +In `internal/assets/commands/commands.yaml`, add the trace command descriptions: + +```yaml +trace: + long: |- + Show the context behind git commits. + + ctx trace links commits back to the decisions, tasks, learnings, + and sessions that motivated them. + + Usage: + ctx trace Show context for a specific commit + ctx trace --last 5 Show context for last N commits + ctx trace file Show context trail for a file + ctx trace tag Manually tag a commit with context + ctx trace collect Collect context refs (used by hook) + ctx trace hook enable Install prepare-commit-msg hook + + Examples: + ctx trace abc123 + ctx trace --last 10 + ctx trace file src/auth.go + ctx trace tag HEAD --note "Hotfix for production outage" + short: Show context behind git commits +trace.file: + long: |- + Show the context trail for a file. + + Combines git log with trailer resolution to show what decisions, + tasks, and learnings motivated changes to a specific file. + + Supports optional line range with colon syntax: + ctx trace file src/auth.go:42-60 + + Examples: + ctx trace file src/auth.go + ctx trace file src/auth.go:42-60 + short: Show context trail for a file +trace.tag: + long: |- + Manually tag a commit with context. + + For commits made without the hook, or to add extra context + after the fact. Tags are stored in .context/trace/overrides.jsonl + since git trailers cannot be modified without rewriting history. + + Examples: + ctx trace tag HEAD --note "Hotfix for production outage" + ctx trace tag abc123 --note "Part of Q1 compliance initiative" + short: Manually tag a commit with context +trace.collect: + long: |- + Collect context references from all sources. + + Gathers pending context, staged file analysis, and working state, + then outputs a ctx-context trailer line. Used by the + prepare-commit-msg hook. + + This command is not typically called directly. + short: Collect context refs for hook +trace.hook: + long: |- + Enable or disable the prepare-commit-msg hook for automatic + context tracing. The hook injects ctx-context trailers into + commit messages. + + Usage: + ctx trace hook enable Install the hook + ctx trace hook disable Remove the hook + + Examples: + ctx trace hook enable + ctx trace hook disable + short: Manage prepare-commit-msg hook +``` + +- [ ] **Step 3: Create show subcommand (ctx trace [commit] / ctx trace --last N)** + +Create `internal/cli/trace/cmd/show/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package show + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + cFlag "github.com/ActiveMemory/ctx/internal/config/flag" +) + +// Cmd returns the trace show command (the default action for ctx trace). +// +// Returns: +// - *cobra.Command: Configured trace command +func Cmd() *cobra.Command { + var ( + last int + jsonOutput bool + ) + + short, long := desc.Command(cmd.DescKeyTrace) + + c := &cobra.Command{ + Use: cmd.UseTrace, + Short: short, + Long: long, + Args: cobra.MaximumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + return Run(cmd, args, last, jsonOutput) + }, + } + + c.Flags().IntVar(&last, cFlag.Last, 0, "Show context for last N commits") + c.Flags().BoolVar(&jsonOutput, cFlag.JSON, false, "Output as JSON") + + return c +} +``` + +- [ ] **Step 4: Create show run logic** + +Create `internal/cli/trace/cmd/show/run.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package show + +import ( + "encoding/json" + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trace" +) + +// Run executes the trace show command. +// +// Parameters: +// - cmd: Cobra command for output +// - args: Optional commit hash as first argument +// - last: Number of recent commits to show (0 = disabled) +// - jsonOutput: Whether to output as JSON +// +// Returns: +// - error: Non-nil on failure +func Run(cmd *cobra.Command, args []string, last int, jsonOutput bool) error { + contextDir := rc.ContextDir() + traceDir := filepath.Join(contextDir, dir.Trace) + + if last > 0 { + return showLast(cmd, last, contextDir, traceDir, jsonOutput) + } + + if len(args) == 0 { + return showLast(cmd, 10, contextDir, traceDir, jsonOutput) + } + + return showCommit(cmd, args[0], contextDir, traceDir, jsonOutput) +} + +func showCommit(cmd *cobra.Command, commitHash, contextDir, traceDir string, jsonOutput bool) error { + // Resolve full hash + fullHash := resolveCommitHash(commitHash) + if fullHash == "" { + fullHash = commitHash + } + + // Collect refs from all sources + refs := collectRefsForCommit(fullHash, traceDir) + + if len(refs) == 0 { + cmd.Printf("Commit: %s\n\nContext: (none)\n", shortHash(fullHash)) + return nil + } + + if jsonOutput { + return outputJSON(cmd, fullHash, refs, contextDir) + } + + // Get commit message + message := commitMessage(fullHash) + date := commitDate(fullHash) + + cmd.Printf("Commit: %s %q\n", shortHash(fullHash), message) + if date != "" { + cmd.Printf("Date: %s\n", date) + } + cmd.Println() + cmd.Println("Context:") + + for _, ref := range refs { + resolved := trace.Resolve(ref, contextDir) + printResolved(cmd, resolved) + } + + return nil +} + +func showLast(cmd *cobra.Command, n int, contextDir, traceDir string, jsonOutput bool) error { + // Get last N commit hashes + out, err := exec.Command("git", "log", fmt.Sprintf("-%d", n), "--format=%H %s").Output() + if err != nil { + return fmt.Errorf("git log: %w", err) + } + + lines := strings.Split(strings.TrimSpace(string(out)), "\n") + if len(lines) == 0 || (len(lines) == 1 && lines[0] == "") { + cmd.Println("No commits found.") + return nil + } + + for _, line := range lines { + parts := strings.SplitN(line, " ", 2) + if len(parts) < 2 { + continue + } + hash := parts[0] + message := parts[1] + + refs := collectRefsForCommit(hash, traceDir) + + if len(refs) > 0 { + cmd.Printf("%s %-40s \u2192 %s\n", shortHash(hash), message, strings.Join(refs, ", ")) + } else { + cmd.Printf("%s %-40s (no context)\n", shortHash(hash), message) + } + } + + return nil +} + +func collectRefsForCommit(commitHash, traceDir string) []string { + var allRefs []string + + // Source 1: history.jsonl (primary) + entry, found := trace.ReadHistoryForCommit(commitHash, traceDir) + if found { + allRefs = append(allRefs, entry.Refs...) + } + + // Source 2: git trailer + allRefs = append(allRefs, readTrailerRefs(commitHash)...) + + // Source 3: overrides.jsonl + allRefs = append(allRefs, trace.ReadOverridesForCommit(commitHash, traceDir)...) + + // Deduplicate + seen := make(map[string]bool, len(allRefs)) + var result []string + for _, r := range allRefs { + if !seen[r] { + seen[r] = true + result = append(result, r) + } + } + return result +} + +func readTrailerRefs(commitHash string) []string { + out, err := exec.Command("git", "log", "-1", "--format=%(trailers:key=ctx-context,valueonly)", commitHash).Output() + if err != nil { + return nil + } + + raw := strings.TrimSpace(string(out)) + if raw == "" { + return nil + } + + var refs []string + for _, part := range strings.Split(raw, ",") { + trimmed := strings.TrimSpace(part) + if trimmed != "" { + refs = append(refs, trimmed) + } + } + return refs +} + +func printResolved(cmd *cobra.Command, r trace.ResolvedRef) { + prefix := strings.Title(r.Type) + if r.Number > 0 { + prefix = fmt.Sprintf("%s #%d", prefix, r.Number) + } + + if r.Found { + cmd.Printf(" %s: %s\n", prefix, r.Title) + if r.Detail != "" { + cmd.Printf(" %s\n", r.Detail) + } + } else { + cmd.Printf(" %s: [not found \u2014 may have been archived]\n", prefix) + } + cmd.Println() +} + +func outputJSON(cmd *cobra.Command, hash string, refs []string, contextDir string) error { + type jsonRef struct { + Raw string `json:"raw"` + Type string `json:"type"` + Number int `json:"number,omitempty"` + Title string `json:"title,omitempty"` + Detail string `json:"detail,omitempty"` + Found bool `json:"found"` + } + + type jsonOutput struct { + Commit string `json:"commit"` + Message string `json:"message"` + Refs []jsonRef `json:"refs"` + } + + var jRefs []jsonRef + for _, ref := range refs { + resolved := trace.Resolve(ref, contextDir) + jRefs = append(jRefs, jsonRef{ + Raw: resolved.Raw, + Type: resolved.Type, + Number: resolved.Number, + Title: resolved.Title, + Detail: resolved.Detail, + Found: resolved.Found, + }) + } + + out := jsonOutput{ + Commit: hash, + Message: commitMessage(hash), + Refs: jRefs, + } + + data, err := json.MarshalIndent(out, "", " ") + if err != nil { + return err + } + + cmd.Println(string(data)) + return nil +} + +func resolveCommitHash(short string) string { + out, err := exec.Command("git", "rev-parse", short).Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} + +func commitMessage(hash string) string { + out, err := exec.Command("git", "log", "-1", "--format=%s", hash).Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} + +func commitDate(hash string) string { + out, err := exec.Command("git", "log", "-1", "--format=%ci", hash).Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} + +func shortHash(hash string) string { + if len(hash) > 7 { + return hash[:7] + } + return hash +} +``` + +- [ ] **Step 5: Create trace.go top-level command** + +Create `internal/cli/trace/trace.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides the ctx trace CLI command for commit context tracing. +package trace + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/show" +) + +// Cmd returns the trace command with all subcommands. +// +// Returns: +// - *cobra.Command: The trace command +func Cmd() *cobra.Command { + return show.Cmd() +} +``` + +- [ ] **Step 6: Register trace command in bootstrap** + +In `internal/bootstrap/group.go`, add the import: + +```go +"github.com/ActiveMemory/ctx/internal/cli/trace" +``` + +Add `{trace.Cmd, embedCmd.GroupDiagnostics}` to the `diagnostics()` function return slice. + +- [ ] **Step 7: Run build to verify compilation** + +Run: `cd /Users/parlakisik/projects/github/ctx && go build ./cmd/ctx/` +Expected: BUILD SUCCESS + +- [ ] **Step 8: Commit** + +```bash +git add internal/cli/trace/ internal/config/embed/cmd/trace.go internal/bootstrap/group.go internal/assets/commands/commands.yaml +git commit -m "feat(trace): add ctx trace command for querying commit context" +``` + +--- + +## Task 9: CLI — `ctx trace file` Subcommand + +**Files:** +- Create: `internal/cli/trace/cmd/file/cmd.go` +- Create: `internal/cli/trace/cmd/file/run.go` +- Modify: `internal/cli/trace/trace.go` + +### Steps + +- [ ] **Step 1: Create file subcommand** + +Create `internal/cli/trace/cmd/file/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package file + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + cFlag "github.com/ActiveMemory/ctx/internal/config/flag" +) + +// Cmd returns the trace file subcommand. +// +// Returns: +// - *cobra.Command: Configured trace file command +func Cmd() *cobra.Command { + var last int + + short, long := desc.Command(cmd.DescKeyTraceFile) + + c := &cobra.Command{ + Use: "file ", + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + return Run(cmd, args[0], last) + }, + } + + c.Flags().IntVarP(&last, cFlag.Last, cFlag.ShortLast, 20, "Max commits to show") + + return c +} +``` + +- [ ] **Step 2: Create file run logic** + +Create `internal/cli/trace/cmd/file/run.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package file + +import ( + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trace" +) + +// Run executes the trace file command. +// +// Parameters: +// - cmd: Cobra command for output +// - pathArg: File path, optionally with line range (e.g., "src/auth.go:42-60") +// - last: Maximum number of commits to show +// +// Returns: +// - error: Non-nil on failure +func Run(cmd *cobra.Command, pathArg string, last int) error { + filePath, lineRange := parsePathArg(pathArg) + contextDir := rc.ContextDir() + traceDir := filepath.Join(contextDir, dir.Trace) + + // Get commit hashes for this file + gitArgs := []string{"log", fmt.Sprintf("-%d", last), "--format=%H %ci %s"} + if lineRange != "" { + // Use -L for line ranges + gitArgs = []string{"log", fmt.Sprintf("-%d", last), "--format=%H %ci %s", "-L", lineRange + ":" + filePath} + } else { + gitArgs = append(gitArgs, "--", filePath) + } + + out, err := exec.Command("git", gitArgs...).Output() + if err != nil { + return fmt.Errorf("git log for %s: %w", filePath, err) + } + + lines := strings.Split(strings.TrimSpace(string(out)), "\n") + if len(lines) == 0 || (len(lines) == 1 && lines[0] == "") { + cmd.Printf("No commits found for %s\n", pathArg) + return nil + } + + for _, line := range lines { + parts := strings.SplitN(line, " ", 4) + if len(parts) < 4 { + continue + } + hash := parts[0] + date := parts[1] + message := parts[3] + + refs := collectRefsForCommit(hash, traceDir) + if len(refs) > 0 { + cmd.Printf("%s %s %-35s \u2192 %s\n", shortHash(hash), date, message, strings.Join(refs, ", ")) + } else { + cmd.Printf("%s %s %-35s (no context)\n", shortHash(hash), date, message) + } + } + + return nil +} + +func parsePathArg(arg string) (path, lineRange string) { + // Check for path:line-range format (e.g., "src/auth.go:42-60") + lastColon := strings.LastIndex(arg, ":") + if lastColon == -1 { + return arg, "" + } + + potential := arg[lastColon+1:] + if strings.Contains(potential, "-") || isNumeric(potential) { + return arg[:lastColon], potential + } + + return arg, "" +} + +func isNumeric(s string) bool { + for _, c := range s { + if c < '0' || c > '9' { + return false + } + } + return len(s) > 0 +} + +func collectRefsForCommit(commitHash, traceDir string) []string { + var allRefs []string + + entry, found := trace.ReadHistoryForCommit(commitHash, traceDir) + if found { + allRefs = append(allRefs, entry.Refs...) + } + + allRefs = append(allRefs, trace.ReadOverridesForCommit(commitHash, traceDir)...) + + seen := make(map[string]bool, len(allRefs)) + var result []string + for _, r := range allRefs { + if !seen[r] { + seen[r] = true + result = append(result, r) + } + } + return result +} + +func shortHash(hash string) string { + if len(hash) > 7 { + return hash[:7] + } + return hash +} +``` + +- [ ] **Step 3: Register file subcommand** + +Update `internal/cli/trace/trace.go` to add the file subcommand: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides the ctx trace CLI command for commit context tracing. +package trace + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/file" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/show" +) + +// Cmd returns the trace command with all subcommands. +// +// Returns: +// - *cobra.Command: The trace command +func Cmd() *cobra.Command { + c := show.Cmd() + c.AddCommand(file.Cmd()) + return c +} +``` + +- [ ] **Step 4: Run build to verify compilation** + +Run: `cd /Users/parlakisik/projects/github/ctx && go build ./cmd/ctx/` +Expected: BUILD SUCCESS + +- [ ] **Step 5: Commit** + +```bash +git add internal/cli/trace/cmd/file/ internal/cli/trace/trace.go +git commit -m "feat(trace): add ctx trace file subcommand for file history" +``` + +--- + +## Task 10: CLI — `ctx trace tag` Subcommand + +**Files:** +- Create: `internal/cli/trace/cmd/tag/cmd.go` +- Create: `internal/cli/trace/cmd/tag/run.go` +- Modify: `internal/cli/trace/trace.go` + +### Steps + +- [ ] **Step 1: Create tag subcommand** + +Create `internal/cli/trace/cmd/tag/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package tag + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the trace tag subcommand. +// +// Returns: +// - *cobra.Command: Configured trace tag command +func Cmd() *cobra.Command { + var note string + + short, long := desc.Command(cmd.DescKeyTraceTag) + + c := &cobra.Command{ + Use: "tag ", + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + return Run(cmd, args[0], note) + }, + } + + c.Flags().StringVar(¬e, "note", "", "Free-form context note") + + return c +} +``` + +- [ ] **Step 2: Create tag run logic** + +Create `internal/cli/trace/cmd/tag/run.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package tag + +import ( + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trace" +) + +// Run executes the trace tag command. +// +// Parameters: +// - cmd: Cobra command for output +// - commitRef: Git commit reference (hash or "HEAD") +// - note: Free-form context note +// +// Returns: +// - error: Non-nil on failure +func Run(cmd *cobra.Command, commitRef, note string) error { + if note == "" { + return fmt.Errorf("--note is required") + } + + // Resolve commit hash + hash, err := resolveHash(commitRef) + if err != nil { + return fmt.Errorf("cannot resolve %q: %w", commitRef, err) + } + + contextDir := rc.ContextDir() + traceDir := filepath.Join(contextDir, dir.Trace) + + entry := trace.OverrideEntry{ + Commit: hash, + Refs: []string{fmt.Sprintf("%q", note)}, + } + + if writeErr := trace.WriteOverride(entry, traceDir); writeErr != nil { + return fmt.Errorf("write override: %w", writeErr) + } + + cmd.Printf("Tagged %s with: %s\n", shortHash(hash), note) + return nil +} + +func resolveHash(ref string) (string, error) { + out, err := exec.Command("git", "rev-parse", ref).Output() + if err != nil { + return "", err + } + return strings.TrimSpace(string(out)), nil +} + +func shortHash(hash string) string { + if len(hash) > 7 { + return hash[:7] + } + return hash +} +``` + +- [ ] **Step 3: Register tag subcommand in trace.go** + +Update `internal/cli/trace/trace.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides the ctx trace CLI command for commit context tracing. +package trace + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/file" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/show" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/tag" +) + +// Cmd returns the trace command with all subcommands. +// +// Returns: +// - *cobra.Command: The trace command +func Cmd() *cobra.Command { + c := show.Cmd() + c.AddCommand(file.Cmd()) + c.AddCommand(tag.Cmd()) + return c +} +``` + +- [ ] **Step 4: Run build to verify compilation** + +Run: `cd /Users/parlakisik/projects/github/ctx && go build ./cmd/ctx/` +Expected: BUILD SUCCESS + +- [ ] **Step 5: Commit** + +```bash +git add internal/cli/trace/cmd/tag/ internal/cli/trace/trace.go +git commit -m "feat(trace): add ctx trace tag subcommand for manual commit tagging" +``` + +--- + +## Task 11: CLI — `ctx trace collect` and `ctx trace hook` + +**Files:** +- Create: `internal/cli/trace/cmd/collect/cmd.go` +- Create: `internal/cli/trace/cmd/collect/run.go` +- Create: `internal/cli/trace/cmd/hook/cmd.go` +- Create: `internal/cli/trace/cmd/hook/run.go` +- Modify: `internal/cli/trace/trace.go` + +### Steps + +- [ ] **Step 1: Create collect subcommand** + +Create `internal/cli/trace/cmd/collect/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package collect + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the trace collect subcommand. +// +// Returns: +// - *cobra.Command: Configured trace collect command +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeyTraceCollect) + + c := &cobra.Command{ + Use: "collect", + Short: short, + Long: long, + Hidden: true, + RunE: func(cmd *cobra.Command, args []string) error { + return Run(cmd) + }, + } + + return c +} +``` + +- [ ] **Step 2: Create collect run logic** + +Create `internal/cli/trace/cmd/collect/run.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package collect + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trace" +) + +// Run executes the trace collect command. +// Outputs the ctx-context trailer line to stdout for the hook to consume. +// +// Parameters: +// - cmd: Cobra command for output +// +// Returns: +// - error: Non-nil on failure +func Run(cmd *cobra.Command) error { + contextDir := rc.ContextDir() + refs := trace.Collect(contextDir) + + trailer := trace.FormatTrailer(refs) + if trailer != "" { + cmd.Println(trailer) + } + + return nil +} +``` + +- [ ] **Step 3: Create hook subcommand** + +Create `internal/cli/trace/cmd/hook/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package hook + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the trace hook subcommand. +// +// Returns: +// - *cobra.Command: Configured trace hook command +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeyTraceHook) + + c := &cobra.Command{ + Use: "hook ", + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + return Run(cmd, args[0]) + }, + } + + return c +} +``` + +- [ ] **Step 4: Create hook run logic** + +Create `internal/cli/trace/cmd/hook/run.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package hook + +import ( + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/fs" +) + +const hookScript = `#!/bin/sh +# ctx: prepare-commit-msg hook for commit context tracing. +# Installed by: ctx trace hook enable +# Remove with: ctx trace hook disable + +COMMIT_MSG_FILE="$1" +COMMIT_SOURCE="$2" + +# Only inject on normal commits (not merges, squashes, or amends) +case "$COMMIT_SOURCE" in + merge|squash) exit 0 ;; +esac + +# Collect context refs +TRAILER=$(ctx trace collect 2>/dev/null) + +if [ -n "$TRAILER" ]; then + # Append trailer with a blank line separator + echo "" >> "$COMMIT_MSG_FILE" + echo "$TRAILER" >> "$COMMIT_MSG_FILE" +fi +` + +// Run executes the trace hook command. +// +// Parameters: +// - cmd: Cobra command for output +// - action: "enable" or "disable" +// +// Returns: +// - error: Non-nil on failure +func Run(cmd *cobra.Command, action string) error { + switch strings.ToLower(action) { + case "enable": + return enable(cmd) + case "disable": + return disable(cmd) + default: + return fmt.Errorf("unknown action %q: use 'enable' or 'disable'", action) + } +} + +func enable(cmd *cobra.Command) error { + hookPath, err := hookFilePath() + if err != nil { + return err + } + + // Check if hook already exists + if _, statErr := os.Stat(hookPath); statErr == nil { + content, readErr := os.ReadFile(filepath.Clean(hookPath)) + if readErr == nil && strings.Contains(string(content), "ctx trace collect") { + cmd.Println("Hook already installed.") + return nil + } + return fmt.Errorf("a prepare-commit-msg hook already exists at %s; remove it first or add ctx integration manually", hookPath) + } + + if writeErr := os.WriteFile(hookPath, []byte(hookScript), fs.PermExec); writeErr != nil { + return fmt.Errorf("write hook: %w", writeErr) + } + + cmd.Printf("Installed prepare-commit-msg hook at %s\n", hookPath) + return nil +} + +func disable(cmd *cobra.Command) error { + hookPath, err := hookFilePath() + if err != nil { + return err + } + + if _, statErr := os.Stat(hookPath); os.IsNotExist(statErr) { + cmd.Println("No hook installed.") + return nil + } + + // Verify it's our hook before removing + content, readErr := os.ReadFile(filepath.Clean(hookPath)) + if readErr != nil { + return fmt.Errorf("read hook: %w", readErr) + } + + if !strings.Contains(string(content), "ctx trace collect") { + return fmt.Errorf("hook at %s is not a ctx trace hook; not removing", hookPath) + } + + if removeErr := os.Remove(hookPath); removeErr != nil { + return fmt.Errorf("remove hook: %w", removeErr) + } + + cmd.Printf("Removed prepare-commit-msg hook from %s\n", hookPath) + return nil +} + +func hookFilePath() (string, error) { + // Get git hooks directory + out, err := exec.Command("git", "rev-parse", "--git-dir").Output() + if err != nil { + return "", fmt.Errorf("not in a git repository: %w", err) + } + + gitDir := strings.TrimSpace(string(out)) + hooksDir := filepath.Join(gitDir, "hooks") + + if mkdirErr := os.MkdirAll(hooksDir, fs.PermExec); mkdirErr != nil { + return "", fmt.Errorf("create hooks dir: %w", mkdirErr) + } + + return filepath.Join(hooksDir, "prepare-commit-msg"), nil +} +``` + +- [ ] **Step 5: Register collect and hook subcommands in trace.go** + +Update `internal/cli/trace/trace.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides the ctx trace CLI command for commit context tracing. +package trace + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/collect" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/file" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/hook" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/show" + "github.com/ActiveMemory/ctx/internal/cli/trace/cmd/tag" +) + +// Cmd returns the trace command with all subcommands. +// +// Returns: +// - *cobra.Command: The trace command +func Cmd() *cobra.Command { + c := show.Cmd() + c.AddCommand(collect.Cmd()) + c.AddCommand(file.Cmd()) + c.AddCommand(hook.Cmd()) + c.AddCommand(tag.Cmd()) + return c +} +``` + +- [ ] **Step 6: Run build to verify compilation** + +Run: `cd /Users/parlakisik/projects/github/ctx && go build ./cmd/ctx/` +Expected: BUILD SUCCESS + +- [ ] **Step 7: Commit** + +```bash +git add internal/cli/trace/cmd/collect/ internal/cli/trace/cmd/hook/ internal/cli/trace/trace.go +git commit -m "feat(trace): add collect and hook subcommands for prepare-commit-msg integration" +``` + +--- + +## Task 12: Error Package and Output Formatting + +**Files:** +- Create: `internal/err/trace/doc.go` +- Create: `internal/err/trace/trace.go` + +### Steps + +- [ ] **Step 1: Create trace error package** + +Create `internal/err/trace/doc.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trace provides error constructors for trace operations. +package trace +``` + +Create `internal/err/trace/trace.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "errors" + "fmt" +) + +// CommitNotFound returns an error when a commit hash cannot be found. +// +// Parameters: +// - hash: The commit hash that was not found +// +// Returns: +// - error: Descriptive error +func CommitNotFound(hash string) error { + return fmt.Errorf("commit not found: %s", hash) +} + +// NotInGitRepo returns an error when the command is run outside a git repo. +// +// Returns: +// - error: Descriptive error +func NotInGitRepo() error { + return errors.New("not in a git repository") +} + +// NoteRequired returns an error when --note flag is missing. +// +// Returns: +// - error: Descriptive error +func NoteRequired() error { + return errors.New("--note is required") +} +``` + +- [ ] **Step 2: Commit** + +```bash +git add internal/err/trace/ +git commit -m "feat(trace): add error package for trace operations" +``` + +--- + +## Task 13: Integration Tests + +**Files:** +- Create: `internal/cli/trace/trace_test.go` + +### Steps + +- [ ] **Step 1: Write integration test** + +Create `internal/cli/trace/trace_test.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trace + +import ( + "os" + "os/exec" + "path/filepath" + "strings" + "testing" + + "github.com/ActiveMemory/ctx/internal/cli/initialize" + "github.com/ActiveMemory/ctx/internal/trace" +) + +func TestTraceTagAndShow(t *testing.T) { + tmpDir := t.TempDir() + + origDir, _ := os.Getwd() + if err := os.Chdir(tmpDir); err != nil { + t.Fatalf("chdir: %v", err) + } + defer func() { _ = os.Chdir(origDir) }() + + // Init git repo + run(t, "git", "init") + run(t, "git", "config", "user.email", "test@test.com") + run(t, "git", "config", "user.name", "Test") + + // Init ctx + initCmd := initialize.Cmd() + initCmd.SetArgs([]string{}) + if err := initCmd.Execute(); err != nil { + t.Fatalf("init: %v", err) + } + + // Create a file and commit + if err := os.WriteFile("test.go", []byte("package main\n"), 0644); err != nil { + t.Fatal(err) + } + run(t, "git", "add", ".") + run(t, "git", "commit", "-m", "Initial commit") + + // Record some pending context + stateDir := filepath.Join(".context", "state") + if err := os.MkdirAll(stateDir, 0750); err != nil { + t.Fatal(err) + } + _ = trace.Record("decision:1", stateDir) + + // Write history for the commit + traceDir := filepath.Join(".context", "trace") + hash := strings.TrimSpace(runOutput(t, "git", "rev-parse", "HEAD")) + + err := trace.WriteHistory(trace.HistoryEntry{ + Commit: hash, + Refs: []string{"decision:1"}, + Message: "Initial commit", + }, traceDir) + if err != nil { + t.Fatalf("WriteHistory: %v", err) + } + + // Test ctx trace + traceCmd := Cmd() + traceCmd.SetArgs([]string{hash[:7]}) + if err := traceCmd.Execute(); err != nil { + t.Errorf("trace show failed: %v", err) + } + + // Test ctx trace tag + traceCmd = Cmd() + traceCmd.SetArgs([]string{"tag", "HEAD", "--note", "Test tag"}) + if err := traceCmd.Execute(); err != nil { + t.Errorf("trace tag failed: %v", err) + } + + // Verify override was written + overrides, err := trace.ReadOverrides(traceDir) + if err != nil { + t.Fatalf("ReadOverrides: %v", err) + } + if len(overrides) != 1 { + t.Errorf("expected 1 override, got %d", len(overrides)) + } +} + +func run(t *testing.T, name string, args ...string) { + t.Helper() + cmd := exec.Command(name, args...) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + t.Fatalf("%s %v failed: %v", name, args, err) + } +} + +func runOutput(t *testing.T, name string, args ...string) string { + t.Helper() + out, err := exec.Command(name, args...).Output() + if err != nil { + t.Fatalf("%s %v failed: %v", name, args, err) + } + return string(out) +} +``` + +- [ ] **Step 2: Run integration test** + +Run: `cd /Users/parlakisik/projects/github/ctx && CTX_SKIP_PATH_CHECK=1 go test ./internal/cli/trace/ -v -run TestTraceTagAndShow` +Expected: PASS + +- [ ] **Step 3: Run all tests** + +Run: `cd /Users/parlakisik/projects/github/ctx && make test` +Expected: All PASS + +- [ ] **Step 4: Run lint** + +Run: `cd /Users/parlakisik/projects/github/ctx && make lint` +Expected: No errors + +- [ ] **Step 5: Commit** + +```bash +git add internal/cli/trace/trace_test.go +git commit -m "test(trace): add integration tests for trace command" +``` + +--- + +## Task 14: Hook Post-Commit — Write History Entry + +The prepare-commit-msg hook injects the trailer before the commit is finalized. But we also need to record the commit in `history.jsonl` after the commit succeeds. This is done by adding a post-commit behavior to the collect flow. + +**Files:** +- Modify: `internal/cli/trace/cmd/collect/run.go` + +### Steps + +- [ ] **Step 1: Add commit-msg-file argument to collect** + +The prepare-commit-msg hook passes the commit message file path. After outputting the trailer, we need to also record it. However, since the commit hasn't happened yet at prepare-commit-msg time, we need a separate mechanism. + +Update the collect command to also accept a `--record` flag that writes to history after the fact. Or, more pragmatically, enhance the hook script to also call `ctx trace collect --record` in a post-commit hook. + +Actually, the simpler approach: modify the hook script to write the history entry at prepare-commit-msg time (before commit), using a temporary marker. Then the trailer is the canonical source for the data. The `ctx trace` command already reads from trailers as a fallback. + +Let's keep it simple: the hook injects the trailer, and `ctx trace` reads from the trailer at query time. The `history.jsonl` is a performance optimization we can add in a follow-up. For now, we'll only write history from the `ctx trace collect --record ` subcommand, which can be called from a post-commit hook. + +Update `internal/cli/trace/cmd/collect/cmd.go`: + +```go +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package collect + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the trace collect subcommand. +// +// Returns: +// - *cobra.Command: Configured trace collect command +func Cmd() *cobra.Command { + var record string + + short, long := desc.Command(cmd.DescKeyTraceCollect) + + c := &cobra.Command{ + Use: "collect", + Short: short, + Long: long, + Hidden: true, + RunE: func(cmd *cobra.Command, args []string) error { + if record != "" { + return RecordCommit(cmd, record) + } + return Run(cmd) + }, + } + + c.Flags().StringVar(&record, "record", "", "Record history entry for commit hash (called from post-commit)") + + return c +} +``` + +Update `internal/cli/trace/cmd/collect/run.go` — add `RecordCommit`: + +```go +// RecordCommit writes a history entry for a completed commit. +// Called from the post-commit hook with the commit hash. +// +// Parameters: +// - cmd: Cobra command for output +// - commitHash: The commit hash to record +// +// Returns: +// - error: Non-nil on failure +func RecordCommit(cmd *cobra.Command, commitHash string) error { + contextDir := rc.ContextDir() + stateDir := filepath.Join(contextDir, dir.State) + traceDir := filepath.Join(contextDir, dir.Trace) + + // Read pending context before truncating + refs := trace.Collect(contextDir) + if len(refs) == 0 { + return nil + } + + // Get commit message + message := commitMessage(commitHash) + + entry := trace.HistoryEntry{ + Commit: commitHash, + Refs: refs, + Message: message, + } + + if err := trace.WriteHistory(entry, traceDir); err != nil { + return err + } + + // Truncate pending context + _ = trace.TruncatePending(stateDir) + + return nil +} + +func commitMessage(hash string) string { + out, err := exec.Command("git", "log", "-1", "--format=%s", hash).Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} +``` + +Add necessary imports to collect/run.go: + +```go +import ( + "os/exec" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trace" +) +``` + +- [ ] **Step 2: Update hook script to include post-commit** + +Update the hook script in `internal/cli/trace/cmd/hook/run.go` to also install a post-commit hook: + +Add a `postCommitScript` constant: + +```go +const postCommitScript = `#!/bin/sh +# ctx: post-commit hook for recording commit context history. +# Installed by: ctx trace hook enable +# Remove with: ctx trace hook disable + +COMMIT_HASH=$(git rev-parse HEAD) +ctx trace collect --record "$COMMIT_HASH" 2>/dev/null || true +` +``` + +Update `enable` to install both hooks: + +```go +func enable(cmd *cobra.Command) error { + prepareHook, err := hookFilePath("prepare-commit-msg") + if err != nil { + return err + } + postHook, err := hookFilePath("post-commit") + if err != nil { + return err + } + + if err := installHook(prepareHook, hookScript, "prepare-commit-msg"); err != nil { + return err + } + if err := installHook(postHook, postCommitScript, "post-commit"); err != nil { + return err + } + + cmd.Printf("Installed prepare-commit-msg and post-commit hooks\n") + return nil +} + +func installHook(path, script, name string) error { + if _, statErr := os.Stat(path); statErr == nil { + content, readErr := os.ReadFile(filepath.Clean(path)) + if readErr == nil && strings.Contains(string(content), "ctx trace") { + return nil // already installed + } + return fmt.Errorf("a %s hook already exists at %s; remove it first or add ctx integration manually", name, path) + } + return os.WriteFile(path, []byte(script), fs.PermExec) +} +``` + +Update `disable` to remove both hooks: + +```go +func disable(cmd *cobra.Command) error { + prepareHook, err := hookFilePath("prepare-commit-msg") + if err != nil { + return err + } + postHook, err := hookFilePath("post-commit") + if err != nil { + return err + } + + removeHook(prepareHook) + removeHook(postHook) + + cmd.Println("Removed ctx trace hooks") + return nil +} + +func removeHook(path string) { + content, err := os.ReadFile(filepath.Clean(path)) + if err != nil { + return + } + if strings.Contains(string(content), "ctx trace") { + _ = os.Remove(path) + } +} +``` + +Update `hookFilePath` to accept the hook name: + +```go +func hookFilePath(hookName string) (string, error) { + out, err := exec.Command("git", "rev-parse", "--git-dir").Output() + if err != nil { + return "", fmt.Errorf("not in a git repository: %w", err) + } + + gitDir := strings.TrimSpace(string(out)) + hooksDir := filepath.Join(gitDir, "hooks") + + if mkdirErr := os.MkdirAll(hooksDir, fs.PermExec); mkdirErr != nil { + return "", fmt.Errorf("create hooks dir: %w", mkdirErr) + } + + return filepath.Join(hooksDir, hookName), nil +} +``` + +- [ ] **Step 3: Run build to verify** + +Run: `cd /Users/parlakisik/projects/github/ctx && go build ./cmd/ctx/` +Expected: BUILD SUCCESS + +- [ ] **Step 4: Commit** + +```bash +git add internal/cli/trace/cmd/collect/ internal/cli/trace/cmd/hook/ +git commit -m "feat(trace): add post-commit history recording and dual hook management" +``` + +--- + +## Task 15: Final Verification + +### Steps + +- [ ] **Step 1: Run full test suite** + +Run: `cd /Users/parlakisik/projects/github/ctx && make test` +Expected: All PASS + +- [ ] **Step 2: Run linter** + +Run: `cd /Users/parlakisik/projects/github/ctx && make lint` +Expected: No errors + +- [ ] **Step 3: Run build** + +Run: `cd /Users/parlakisik/projects/github/ctx && make build` +Expected: BUILD SUCCESS + +- [ ] **Step 4: Manual smoke test** + +Run these commands to verify the feature works end-to-end: + +```bash +# Build and install +cd /Users/parlakisik/projects/github/ctx && go build -o /tmp/ctx ./cmd/ctx/ + +# Test trace --last (should show existing commits with no context) +/tmp/ctx trace --last 5 + +# Test trace tag +/tmp/ctx trace tag HEAD --note "Test: commit context tracing feature" + +# Verify tag was written +cat .context/trace/overrides.jsonl + +# Test trace on HEAD (should show the manual tag) +/tmp/ctx trace $(git rev-parse --short HEAD) + +# Test hook enable (don't actually enable in this repo) +# /tmp/ctx trace hook enable +``` + +- [ ] **Step 5: Final commit (if any fixes needed)** + +```bash +git add -A +git commit -m "fix(trace): final adjustments from smoke testing" +``` diff --git a/internal/assets/commands/commands.yaml b/internal/assets/commands/commands.yaml index 287231ce8..8b17d70ec 100644 --- a/internal/assets/commands/commands.yaml +++ b/internal/assets/commands/commands.yaml @@ -1374,3 +1374,173 @@ why: ctx why about Show the about page ctx why invariants Show the design invariants short: Read the philosophy behind ctx + +trigger: + long: |- + Manage lifecycle triggers that fire at specific events during AI sessions. + + Triggers are executable scripts in .context/hooks// that + receive JSON via stdin and return JSON via stdout. They enable blocking, + context injection, and automation at lifecycle events. + + Valid trigger types: pre-tool-use, post-tool-use, session-start, + session-end, file-save, context-add. + + Subcommands: + add Create a new trigger script + list List all triggers by type + test Test triggers for a given type + enable Enable a trigger (add executable bit) + disable Disable a trigger (remove executable bit) + short: Manage lifecycle triggers for AI sessions +trigger.add: + long: |- + Create a new trigger script with a template. + + The script is created at .context/hooks//.sh with + a bash shebang, JSON input reading via jq, and JSON output structure. + + Valid trigger types: pre-tool-use, post-tool-use, session-start, + session-end, file-save, context-add. + + Examples: + ctx trigger add session-start inject-context + ctx trigger add pre-tool-use block-legacy + short: Create a new trigger script +trigger.list: + short: List all triggers grouped by type +trigger.test: + long: |- + Test triggers for a given trigger type by constructing a mock input + and executing all enabled triggers. + + Use --tool and --path to customize the mock input. + + Examples: + ctx trigger test session-start + ctx trigger test pre-tool-use --tool write_file --path src/main.go + short: Test triggers for a given trigger type +trigger.enable: + long: |- + Enable a trigger by adding the executable permission bit. + + Searches all trigger type directories for a trigger matching the given + name. + + Examples: + ctx trigger enable inject-context + short: Enable a trigger (add executable bit) +trigger.disable: + long: |- + Disable a trigger by removing the executable permission bit. + + Searches all trigger type directories for a trigger matching the given + name. + + Examples: + ctx trigger disable inject-context + short: Disable a trigger (remove executable bit) +steering: + long: |- + Manage steering files that define persistent behavioral rules + for AI coding assistants. + + Steering files are markdown files with YAML frontmatter stored in + .context/steering/. They control how AI tools behave by injecting + rules into prompts based on inclusion modes (always, auto, manual). + + Subcommands: + add Create a new steering file + list List all steering files + preview Preview which files match a prompt + init Generate foundation steering files + sync Sync to tool-native formats + short: Manage steering files for AI behavioral rules +steering.add: + long: |- + Create a new steering file with default frontmatter. + + The file is created at .context/steering/.md with default + frontmatter (inclusion: manual, priority: 50) and an empty body. + + Examples: + ctx steering add api-standards + ctx steering add code-review + short: Create a new steering file +steering.list: + short: List all steering files with inclusion mode and priority +steering.preview: + long: |- + Show which steering files would be included for a given prompt. + + Respects inclusion mode rules: always files are always shown, + auto files match on description substring, manual files are excluded. + + Examples: + ctx steering preview "create a REST API endpoint" + ctx steering preview "refactor the database layer" + short: Preview which steering files match a prompt +steering.init: + long: |- + Generate foundation steering files in .context/steering/. + + Creates the following files if they don't already exist: + product.md Product context and goals + tech.md Technology stack and constraints + structure.md Project structure conventions + workflow.md Development workflow rules + + Existing files are skipped and reported. + + Examples: + ctx steering init + short: Generate foundation steering files +steering.sync: + long: |- + Sync steering files to tool-native formats. + + Without flags, reads the tool from .ctxrc and syncs to that format. + Use --tool to specify a target tool, or --all for all supported tools. + + Supported tools: cursor, cline, kiro. + + Examples: + ctx steering sync # Sync to .ctxrc tool + ctx steering sync --tool cursor # Sync to Cursor format + ctx steering sync --all # Sync to all tools + short: Sync steering files to tool-native formats +skill: + long: |- + Manage reusable instruction bundles (skills) for AI coding assistants. + + Skills are directories in .context/skills// containing a SKILL.md + file with YAML frontmatter and markdown instructions. They can be + installed from local paths, listed, and removed. + + Subcommands: + install Install a skill from a source directory + list List all installed skills + remove Remove an installed skill + short: Manage reusable instruction bundles +skill.install: + long: |- + Install a skill from a source directory. + + The source must contain a valid SKILL.md file with YAML frontmatter + including a name field. The skill is copied into .context/skills//. + + Examples: + ctx skill install ./my-skill + ctx skill install /path/to/react-patterns + short: Install a skill from a source directory +skill.list: + short: List all installed skills +skill.remove: + long: |- + Remove an installed skill by name. + + Deletes the .context/skills// directory and all its contents. + + Examples: + ctx skill remove react-patterns + short: Remove an installed skill diff --git a/internal/assets/commands/flags.yaml b/internal/assets/commands/flags.yaml index 38d41fb71..42a660101 100644 --- a/internal/assets/commands/flags.yaml +++ b/internal/assets/commands/flags.yaml @@ -26,6 +26,8 @@ agent.format: short: 'Output format: md or json' agent.session: short: Session identifier for cooldown isolation (e.g., $PPID) +agent.skill: + short: Include named skill content in context packet allow-outside-cwd: short: Allow context directory outside current working directory changes.since: @@ -52,6 +54,10 @@ guide.commands: short: List all CLI commands guide.skills: short: List all available skills +trigger.test.path: + short: File path for mock input +trigger.test.tool: + short: Tool name for mock input setup.write: short: Write the configuration file instead of printing initialize.force: @@ -174,6 +180,8 @@ status.json: short: Output as JSON status.verbose: short: Include file content previews +steering.sync.all: + short: Sync to all supported tool formats sync.dry-run: short: Show what would change without modifying system.backup.json: @@ -234,6 +242,8 @@ trace.tag.note: short: Context note to attach to the commit task.archive.dry-run: short: Preview changes without modifying files +tool: + short: 'Override active AI tool (e.g., claude, cursor, cline, kiro, codex)' watch.dry-run: short: Show updates without applying watch.log: diff --git a/internal/assets/commands/text/errors.yaml b/internal/assets/commands/text/errors.yaml index 4d8ad4edc..9d19b79ab 100644 --- a/internal/assets/commands/text/errors.yaml +++ b/internal/assets/commands/text/errors.yaml @@ -52,6 +52,8 @@ err.backup.write-smb: short: 'write to SMB: %w' err.context.dir-not-found: short: 'context directory not found: ' +err.cli.no-tool-specified: + short: 'no tool specified: use --tool or set the tool field in .ctxrc' err.config.golden-not-found: short: "no .claude/settings.golden.json found - run 'ctx permission snapshot' first" err.config.invalid-tool: @@ -168,18 +170,54 @@ err.fs.write-merged: short: 'failed to write merged %s: %w' err.git.not-in-git-repo: short: 'not in a git repository: %w' +err.lifecycle-hook.boundary: + short: 'hook path %q escapes hooks directory %q' +err.lifecycle-hook.not-executable: + short: 'hook script %q is not executable' +err.lifecycle-hook.symlink: + short: 'hook script %q is a symlink' +err.hook.chmod: + short: 'chmod hook: %w' +err.hook.create-dir: + short: 'create hook directory: %w' +err.hook.discover: + short: 'discover hooks: %w' err.hook.embedded-template-not-found: short: embedded template not found for %s/%s +err.hook.exit: + short: 'exit: %w' +err.hook.invalid-json-output: + short: 'invalid JSON output: %w' +err.hook.invalid-type: + short: 'invalid hook type %q; valid types: %s' +err.hook.marshal-input: + short: 'marshal hook input: %w' +err.hook.not-found: + short: 'hook not found: %s' err.hook.override-exists: short: "override already exists at %s\nEdit it directly or use `ctx system message reset %s %s` first" err.hook.remove-override: short: 'failed to remove override %s: %w' +err.hook.resolve-hooks-dir: + short: 'resolve hooks directory %q: %w' +err.hook.resolve-path: + short: 'resolve hook path %q: %w' +err.hook.script-exists: + short: 'hook script already exists: %s' +err.hook.stat: + short: 'stat hook: %w' +err.hook.stat-path: + short: 'stat hook path %q: %w' +err.hook.timeout: + short: 'timeout after %s' err.hook.unknown-hook: short: "unknown hook: %s\nRun `ctx system message list` to see available hooks" err.hook.unknown-variant: short: "unknown variant %q for hook %q\nRun `ctx system message list` to see available variants" err.hook.write-override: short: 'failed to write override %s: %w' +err.hook.write-script: + short: 'write hook script: %w' err.http.parse-url: short: 'parse URL: %w' err.http.too-many-redirects: @@ -390,10 +428,86 @@ err.site.no-site-config: short: no zensical.toml found in %s err.site.zensical-not-found: short: 'zensical not found. Install with: pipx install zensical (requires Python >= 3.10)' +err.setup.create-dir: + short: 'create %s: %w' +err.setup.marshal-config: + short: 'marshal mcp config: %w' +err.setup.write-file: + short: 'write %s: %w' +err.setup.sync-steering: + short: 'sync steering: %w' +err.skill.create-dest: + short: 'skill: create destination: %w' +err.skill.install: + short: 'skill: install %s: %w' +err.skill.invalid-manifest: + short: 'skill: source has invalid %s: %w' +err.skill.invalid-yaml: + short: 'skill: %s: invalid YAML frontmatter: %w' +err.skill.load: + short: 'skill: %s: %w' +err.skill.missing-closing-delimiter: + short: missing closing frontmatter delimiter (---) +err.skill.missing-name: + short: 'skill: %s is missing required ''name'' field' +err.skill.missing-opening-delimiter: + short: missing opening frontmatter delimiter (---) +err.skill.not-found: + short: 'skill %q not found' +err.skill.not-valid-dir: + short: 'skill: %q is not a valid skill directory' +err.skill.not-valid-source: + short: 'skill: source is not a valid skill: %w' +err.skill.read-dir: + short: 'skill: read directory %s: %w' +err.skill.remove: + short: 'skill: remove %s: %w' err.skill.skill-list: short: 'failed to list skills: %w' +err.skill.skill-load: + short: 'skill %q: %w' err.skill.skill-read: short: 'failed to read skill %s: %w' +err.steering.compute-rel-path: + short: 'compute relative path: %w' +err.steering.context-dir-missing: + short: .context/ directory does not exist; run ctx init first +err.steering.create-dir: + short: 'create steering directory: %w' +err.steering.file-exists: + short: 'steering file already exists: %s' +err.steering.invalid-yaml: + short: 'steering: %s: invalid YAML frontmatter: %w' +err.steering.missing-closing-delimiter: + short: missing closing frontmatter delimiter (---) +err.steering.missing-opening-delimiter: + short: missing opening frontmatter delimiter (---) +err.steering.no-tool: + short: 'no tool specified: use --tool , --all, or set the tool field in .ctxrc' +err.steering.output-escapes-root: + short: 'output path %s escapes project root %s' +err.steering.parse: + short: 'steering: %s: %w' +err.steering.read-dir: + short: 'steering: read directory %s: %w' +err.steering.read-file: + short: 'steering: read file %s: %w' +err.steering.resolve-output: + short: 'resolve output path: %w' +err.steering.resolve-root: + short: 'resolve project root: %w' +err.steering.sync-all: + short: 'steering: sync %s: %w' +err.steering.sync-name: + short: 'steering: %s: %w' +err.steering.unsupported-tool: + short: 'steering: unsupported sync tool %q; supported: %s' +err.steering.write-file: + short: 'steering: write %s: %w' +err.steering.write-steering-file: + short: 'write steering file: %w' +err.steering.write-init-file: + short: 'write %s: %w' err.state.load-state: short: 'loading state: %w' err.state.reading-state-dir: diff --git a/internal/assets/commands/text/hooks.yaml b/internal/assets/commands/text/hooks.yaml index 5ca487622..9cb9cd6f8 100644 --- a/internal/assets/commands/text/hooks.yaml +++ b/internal/assets/commands/text/hooks.yaml @@ -442,25 +442,6 @@ hook.copilot-cli: Run with --write to generate all files: ctx setup copilot-cli --write -hook.cursor: - short: | - Cursor IDE Integration - ====================== - - Add to your .cursorrules file: - - ```markdown - # Project Context - - Always read these files before making changes: - - .context/CONSTITUTION.md (NEVER violate these rules) - - .context/TASKS.md (current work) - - .context/CONVENTIONS.md (how we write code) - - .context/ARCHITECTURE.md (system structure) - - Run 'ctx agent' for a context summary. - Run 'ctx drift' to check for stale context. - ``` hook.supported-tools: short: | Supported tools: diff --git a/internal/assets/commands/text/mcp.yaml b/internal/assets/commands/text/mcp.yaml index fcb649a3f..3ec1241e4 100644 --- a/internal/assets/commands/text/mcp.yaml +++ b/internal/assets/commands/text/mcp.yaml @@ -50,6 +50,8 @@ mcp.err-parse: short: parse error mcp.err-query-required: short: query is required +mcp.err-search-read: + short: 'search: read %s: %w' mcp.res-agent: short: All context files assembled in priority read order mcp.res-architecture: @@ -363,3 +365,18 @@ mcp.gov-persist-nudge: short: '⚠ %d tool calls since last context write. Persist decisions, learnings, or completed tasks with ctx_add() or ctx_complete().' mcp.gov-violation-critical: short: '🚨 CRITICAL: %s — %s (at %s). Review this action immediately. If unintended, revert it.' + +mcp.tool-steering-get-desc: + short: Retrieve applicable steering files for a prompt. Without a prompt, returns always-included files only. +mcp.tool-search-desc: + short: Search across .context/ files for a query string. Returns matching lines with file paths and line numbers. +mcp.tool-session-start-desc: + short: Execute session-start hooks and return aggregated context from hook outputs. +mcp.tool-session-end-desc: + short: Execute session-end hooks with an optional summary. Returns aggregated context from hook outputs. +mcp.tool-prop-prompt: + short: Optional prompt text for steering file inclusion matching +mcp.tool-prop-search-query: + short: Text to search for across context files +mcp.tool-prop-summary: + short: Optional session summary passed to session-end hooks diff --git a/internal/assets/commands/text/ui.yaml b/internal/assets/commands/text/ui.yaml index 2ec1ca9fa..8d411ffde 100644 --- a/internal/assets/commands/text/ui.yaml +++ b/internal/assets/commands/text/ui.yaml @@ -38,6 +38,10 @@ agent.section-summaries: short: '## Also Noted' agent.section-tasks: short: '## Current Tasks' +agent.section-steering: + short: '## Steering' +agent.section-skill: + short: '## Skill' changes.fallback-label: short: 24 hour(s) ago (default) changes.code-authors: @@ -184,6 +188,12 @@ drift.stale-header: short: 'comment header in %s does not match template: run ctx init --force to sync' drift.check-template-header: short: All context file headers match templates +drift.invalid-tool: + short: 'unsupported tool identifier %q (supported: claude, cursor, cline, kiro, codex)' +drift.hook-no-exec: + short: 'hook script missing executable permission bit' +drift.stale-sync-file: + short: 'synced file is out of date vs source steering file' guide.default: short: | ctx - persistent AI context diff --git a/internal/audit/cross_package_types_test.go b/internal/audit/cross_package_types_test.go index e567d9034..8dc426eb0 100644 --- a/internal/audit/cross_package_types_test.go +++ b/internal/audit/cross_package_types_test.go @@ -201,12 +201,10 @@ func sameModule(a, b string) bool { } // cli/* consuming any domain module is the // standard consumer layer pattern. - if strings.HasPrefix(ma, "cli/") && - !strings.HasPrefix(mb, "cli/") { + if isConsumerLayer(ma) && !isConsumerLayer(mb) { return true } - if strings.HasPrefix(mb, "cli/") && - !strings.HasPrefix(ma, "cli/") { + if isConsumerLayer(mb) && !isConsumerLayer(ma) { return true } // err/ consumed from cli/ or . @@ -275,3 +273,9 @@ func moduleRoot(pkgPath string) string { parts := strings.SplitN(rest, "/", 2) return parts[0] } + +// isConsumerLayer returns true if the module root is a +// consumer layer that naturally imports domain types. +func isConsumerLayer(mod string) bool { + return strings.HasPrefix(mod, "cli/") +} diff --git a/internal/audit/dead_exports_test.go b/internal/audit/dead_exports_test.go index 346b07cd6..ec6878dd7 100644 --- a/internal/audit/dead_exports_test.go +++ b/internal/audit/dead_exports_test.go @@ -51,6 +51,25 @@ var testOnlyExports = map[string]bool{ "github.com/ActiveMemory/ctx/internal/task.MatchFull": true, } +// linuxOnlyExports lists exported symbols used only from +// _linux.go source files. These appear dead on non-Linux +// builds because go/packages loads only the current +// platform's file set. +var linuxOnlyExports = map[string]bool{ + "github.com/ActiveMemory/ctx/internal/config/sysinfo.ProcLoadavg": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.ProcMeminfo": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.LoadavgFmt": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.MemInfoSuffix": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.BytesPerKB": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldMemTotal": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldMemAvailable": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldMemFree": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldBuffers": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldCached": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldSwapTotal": true, + "github.com/ActiveMemory/ctx/internal/config/sysinfo.FieldSwapFree": true, +} + func TestNoDeadExports(t *testing.T) { pkgs := loadPackages(t) @@ -132,6 +151,12 @@ func TestNoDeadExports(t *testing.T) { delete(defs, key) } + // Phase 3b: remove Linux-only exports (used from + // _linux.go files not loaded on this platform). + for key := range linuxOnlyExports { + delete(defs, key) + } + // Phase 4: report survivors as dead exports. var violations []string for _, info := range defs { diff --git a/internal/bootstrap/bootstrap_test.go b/internal/bootstrap/bootstrap_test.go index deba99b33..6f6280229 100644 --- a/internal/bootstrap/bootstrap_test.go +++ b/internal/bootstrap/bootstrap_test.go @@ -11,6 +11,7 @@ import ( "path/filepath" "testing" + "github.com/ActiveMemory/ctx/internal/cli/resolve" "github.com/ActiveMemory/ctx/internal/config/cli" "github.com/ActiveMemory/ctx/internal/config/ctx" "github.com/ActiveMemory/ctx/internal/config/flag" @@ -322,3 +323,97 @@ func TestInitGuard_AllowsInitializedCommand(t *testing.T) { t.Fatalf("initialized command should succeed: %v", execErr) } } + +func TestRootCmdToolFlag(t *testing.T) { + cmd := RootCmd() + + f := cmd.PersistentFlags().Lookup(flag.Tool) + if f == nil { + t.Fatal("--tool flag not found") + } + if f.DefValue != "" { + t.Errorf("--tool default = %q, want empty", f.DefValue) + } +} + +func TestResolveTool_FlagOverridesRC(t *testing.T) { + rc.Reset() + t.Cleanup(func() { rc.Reset() }) + + cmd := RootCmd() + dummy := &cobra.Command{ + Use: "dummy", + Annotations: map[string]string{cli.AnnotationSkipInit: "true"}, + RunE: func(cmd *cobra.Command, args []string) error { + tool, err := resolve.Tool(cmd) + if err != nil { + return err + } + if tool != "cursor" { + t.Errorf("resolve.Tool() = %q, want %q", tool, "cursor") + } + return nil + }, + } + cmd.AddCommand(dummy) + cmd.SetArgs([]string{"--allow-outside-cwd", "--tool", "cursor", "dummy"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("Execute() error: %v", err) + } +} + +func TestResolveTool_FallsBackToRC(t *testing.T) { + // When --tool is not set, ResolveTool falls back to rc.Tool(). + // With a fresh rc (no .ctxrc), rc.Tool() returns "" so this + // should return an error. We test the fallback path indirectly. + rc.Reset() + t.Cleanup(func() { rc.Reset() }) + + cmd := RootCmd() + dummy := &cobra.Command{ + Use: "dummy", + Annotations: map[string]string{cli.AnnotationSkipInit: "true"}, + RunE: func(cmd *cobra.Command, args []string) error { + _, err := resolve.Tool(cmd) + if err == nil { + t.Error("resolve.Tool() should return error when no tool is set") + } + return nil // swallow error so Execute succeeds + }, + } + cmd.AddCommand(dummy) + cmd.SetArgs([]string{"--allow-outside-cwd", "dummy"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("Execute() error: %v", err) + } +} + +func TestResolveTool_ErrorMessage(t *testing.T) { + rc.Reset() + t.Cleanup(func() { rc.Reset() }) + + cmd := RootCmd() + dummy := &cobra.Command{ + Use: "dummy", + Annotations: map[string]string{cli.AnnotationSkipInit: "true"}, + RunE: func(cmd *cobra.Command, args []string) error { + _, err := resolve.Tool(cmd) + if err == nil { + t.Fatal("expected error") + } + want := "no tool specified: use --tool or set the tool field in .ctxrc" + if err.Error() != want { + t.Errorf("error = %q, want %q", err.Error(), want) + } + return nil + }, + } + cmd.AddCommand(dummy) + cmd.SetArgs([]string{"--allow-outside-cwd", "dummy"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("Execute() error: %v", err) + } +} diff --git a/internal/bootstrap/cmd.go b/internal/bootstrap/cmd.go index 5295b91b7..9eaf19f5d 100644 --- a/internal/bootstrap/cmd.go +++ b/internal/bootstrap/cmd.go @@ -122,6 +122,11 @@ func RootCmd() *cobra.Command { flag.AllowOutsideCwd, embedFlag.DescKeyAllowOutsideCwd, ) + c.PersistentFlags().String( + flag.Tool, + "", + desc.Flag(embedFlag.DescKeyTool), + ) return c } diff --git a/internal/bootstrap/group.go b/internal/bootstrap/group.go index 88a91d6a6..08845d3b6 100644 --- a/internal/bootstrap/group.go +++ b/internal/bootstrap/group.go @@ -34,11 +34,14 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/serve" "github.com/ActiveMemory/ctx/internal/cli/setup" "github.com/ActiveMemory/ctx/internal/cli/site" + "github.com/ActiveMemory/ctx/internal/cli/skill" "github.com/ActiveMemory/ctx/internal/cli/status" + "github.com/ActiveMemory/ctx/internal/cli/steering" "github.com/ActiveMemory/ctx/internal/cli/sync" "github.com/ActiveMemory/ctx/internal/cli/system" "github.com/ActiveMemory/ctx/internal/cli/task" "github.com/ActiveMemory/ctx/internal/cli/trace" + "github.com/ActiveMemory/ctx/internal/cli/trigger" "github.com/ActiveMemory/ctx/internal/cli/watch" "github.com/ActiveMemory/ctx/internal/cli/why" embedCmd "github.com/ActiveMemory/ctx/internal/config/embed/cmd" @@ -65,6 +68,7 @@ func contextCmds() []registration { {add.Cmd, embedCmd.GroupContext}, {load.Cmd, embedCmd.GroupContext}, {agent.Cmd, embedCmd.GroupContext}, + {skill.Cmd, embedCmd.GroupContext}, {sync.Cmd, embedCmd.GroupContext}, {drift.Cmd, embedCmd.GroupContext}, {compact.Cmd, embedCmd.GroupContext}, @@ -117,6 +121,8 @@ func runtimeCmds() []registration { func integrations() []registration { return []registration{ {setup.Cmd, embedCmd.GroupIntegration}, + {steering.Cmd, embedCmd.GroupIntegration}, + {trigger.Cmd, embedCmd.GroupIntegration}, {mcp.Cmd, embedCmd.GroupIntegration}, {watch.Cmd, embedCmd.GroupIntegration}, {notify.Cmd, embedCmd.GroupIntegration}, diff --git a/internal/cli/agent/cmd/root/cmd.go b/internal/cli/agent/cmd/root/cmd.go index fb4d2b750..47cbe6c60 100644 --- a/internal/cli/agent/cmd/root/cmd.go +++ b/internal/cli/agent/cmd/root/cmd.go @@ -12,6 +12,7 @@ import ( "github.com/spf13/cobra" "github.com/ActiveMemory/ctx/internal/assets/read/desc" + coreSteering "github.com/ActiveMemory/ctx/internal/cli/agent/core/steering" "github.com/ActiveMemory/ctx/internal/config/agent" "github.com/ActiveMemory/ctx/internal/config/embed/cmd" "github.com/ActiveMemory/ctx/internal/config/embed/flag" @@ -25,22 +26,24 @@ import ( // // The command reads context files from .context/ and outputs a concise packet // optimized for AI consumption, including constitution rules, active tasks, -// conventions, and recent decisions. +// conventions, recent decisions, steering files, and optional skill content. // // Flags: // - --budget: Token budget for the context packet (default 8000) // - --format: Output format, "md" for Markdown or "json" (default "md") // - --cooldown: Suppress repeated output within this duration (default 10m) // - --session: Session identifier for cooldown tombstone isolation +// - --skill: Include named skill content in context packet // // Returns: // - *cobra.Command: Configured agent command with flags registered func Cmd() *cobra.Command { var ( - budget int - format string - cooldown time.Duration - session string + budget int + format string + cooldown time.Duration + session string + skillName string ) short, long := desc.Command(cmd.DescKeyAgent) @@ -53,7 +56,24 @@ func Cmd() *cobra.Command { if !cmd.Flags().Changed(cFlag.Budget) { budget = rc.TokenBudget() } - return Run(cmd, budget, format, cooldown, session) + + // Tier 6: Load applicable steering files. + steeringBodies := coreSteering.LoadBodies() + + // Tier 7: Load skill content if --skill is provided. + var skillBody string + if skillName != "" { + sk, loadErr := coreSteering.LoadSkill(skillName) + if loadErr != nil { + return loadErr + } + skillBody = sk + } + + return Run( + cmd, budget, format, cooldown, session, + steeringBodies, skillBody, + ) }, } @@ -76,6 +96,10 @@ func Cmd() *cobra.Command { c, &session, cFlag.Session, flag.DescKeyAgentSession, ) + flagbind.StringFlag( + c, &skillName, + cFlag.Skill, flag.DescKeyAgentSkill, + ) return c } diff --git a/internal/cli/agent/cmd/root/run.go b/internal/cli/agent/cmd/root/run.go index eb24f3b80..5ab25c52a 100644 --- a/internal/cli/agent/cmd/root/run.go +++ b/internal/cli/agent/cmd/root/run.go @@ -35,6 +35,8 @@ import ( // - cooldown: duration to suppress repeated output (0 to disable) // - session: session identifier for tombstone isolation (empty to // disable cooldown) +// - steeringBodies: pre-loaded steering file bodies (may be nil) +// - skillBody: pre-loaded skill content (empty to omit) // // Returns: // - error: Non-nil if context loading fails or .context/ is not found @@ -44,6 +46,8 @@ func Run( format string, cooldown time.Duration, session string, + steeringBodies []string, + skillBody string, ) error { if coreCooldown.Active(session, cooldown) { return nil @@ -59,9 +63,15 @@ func Run( var outputErr error if format == fmt.FormatJSON { - outputErr = coreBudget.OutputAgentJSON(cmd, ctx, budget) + outputErr = coreBudget.OutputAgentJSON( + cmd, ctx, budget, + steeringBodies, skillBody, + ) } else { - outputErr = coreBudget.OutputAgentMarkdown(cmd, ctx, budget) + outputErr = coreBudget.OutputAgentMarkdown( + cmd, ctx, budget, + steeringBodies, skillBody, + ) } if outputErr == nil { diff --git a/internal/cli/agent/core/budget/assemble.go b/internal/cli/agent/core/budget/assemble.go index bad3a0cca..8eead0cc7 100644 --- a/internal/cli/agent/core/budget/assemble.go +++ b/internal/cli/agent/core/budget/assemble.go @@ -27,14 +27,23 @@ import ( // - Tier 2 (40%): active tasks // - Tier 3 (20%): conventions // - Tier 4+5 (remaining): decisions and learnings, scored by relevance +// - Tier 6 (remaining after 4+5): steering files +// - Tier 7 (remaining after 6): skill content (--skill flag) // // Parameters: // - ctx: Loaded context containing the files // - budget: Token budget to respect +// - steeringBodies: Pre-filtered steering file bodies to include +// - skillBody: Skill content to include (empty string if none) // // Returns: // - *AssembledPacket: Assembled packet within budget -func AssemblePacket(ctx *entity.Context, budget int) *AssembledPacket { +func AssemblePacket( + ctx *entity.Context, + budget int, + steeringBodies []string, + skillBody string, +) *AssembledPacket { now := time.Now() pkt := &AssembledPacket{ Budget: budget, @@ -102,10 +111,30 @@ func AssemblePacket(ctx *entity.Context, budget int) *AssembledPacket { pkt.Learnings, learnSummaries = FillSection(scoredLearnings, learnTokens) pkt.Summaries = append(pkt.Summaries, learnSummaries...) - pkt.TokensUsed = tier1Tokens + taskTokens + convTokens + + usedSoFar := tier1Tokens + taskTokens + convTokens + EstimateSliceTokens(pkt.Decisions) + EstimateSliceTokens(pkt.Learnings) + EstimateSliceTokens(pkt.Summaries) + remaining = budget - usedSoFar + + // Tier 6: Steering files (from remaining budget) + if remaining > 0 && len(steeringBodies) > 0 { + pkt.Steering = FitItems(steeringBodies, remaining) + steeringTokens := EstimateSliceTokens(pkt.Steering) + remaining -= steeringTokens + usedSoFar += steeringTokens + } + + // Tier 7: Skill content (from remaining budget) + if remaining > 0 && skillBody != "" { + skillTokens := ctxToken.EstimateString(skillBody) + if skillTokens <= remaining { + pkt.Skill = skillBody + usedSoFar += skillTokens + } + } + + pkt.TokensUsed = usedSoFar return pkt } diff --git a/internal/cli/agent/core/budget/budget_test.go b/internal/cli/agent/core/budget/budget_test.go index 69992c7df..c79f8e694 100644 --- a/internal/cli/agent/core/budget/budget_test.go +++ b/internal/cli/agent/core/budget/budget_test.go @@ -305,3 +305,132 @@ func TestRenderMarkdownPacket_Empty(t *testing.T) { t.Error("should not render empty tasks section") } } + +func TestRenderMarkdownPacket_WithSteering(t *testing.T) { + pkt := &AssembledPacket{ + ReadOrder: []string{".context/CONSTITUTION.md"}, + Constitution: []string{"Never violate"}, + Steering: []string{"Use RESTful conventions", "Always return JSON"}, + Instruction: "Confirm context reading.", + Budget: 8000, + TokensUsed: 500, + } + + output := RenderMarkdownPacket(pkt) + + checks := []string{ + "## Steering", + "Use RESTful conventions", + "Always return JSON", + } + for _, check := range checks { + if !strings.Contains(output, check) { + t.Errorf("output missing %q", check) + } + } +} + +func TestRenderMarkdownPacket_WithSkill(t *testing.T) { + pkt := &AssembledPacket{ + ReadOrder: []string{".context/CONSTITUTION.md"}, + Skill: "# React Patterns\n\nUse functional components.", + Instruction: "Confirm context reading.", + Budget: 8000, + TokensUsed: 500, + } + + output := RenderMarkdownPacket(pkt) + + checks := []string{ + "## Skill", + "React Patterns", + "Use functional components.", + } + for _, check := range checks { + if !strings.Contains(output, check) { + t.Errorf("output missing %q", check) + } + } +} + +func TestRenderMarkdownPacket_NoSteeringOrSkill(t *testing.T) { + pkt := &AssembledPacket{ + ReadOrder: []string{".context/CONSTITUTION.md"}, + Instruction: "Confirm context reading.", + Budget: 8000, + TokensUsed: 500, + } + + output := RenderMarkdownPacket(pkt) + + if strings.Contains(output, "## Steering") { + t.Error("should not render empty steering section") + } + if strings.Contains(output, "## Skill") { + t.Error("should not render empty skill section") + } +} + +func TestAssemblePacket_WithSteering(t *testing.T) { + ctx := &entity.Context{} + bodies := []string{"Rule one", "Rule two"} + + pkt := AssemblePacket(ctx, 8000, bodies, "") + + if len(pkt.Steering) == 0 { + t.Error("expected steering files in packet") + } + if pkt.Steering[0] != "Rule one" { + t.Errorf("expected first steering body %q, got %q", "Rule one", pkt.Steering[0]) + } +} + +func TestAssemblePacket_WithSkill(t *testing.T) { + ctx := &entity.Context{} + skillBody := "# My Skill\n\nDo things." + + pkt := AssemblePacket(ctx, 8000, nil, skillBody) + + if pkt.Skill != skillBody { + t.Errorf("expected skill body %q, got %q", skillBody, pkt.Skill) + } +} + +func TestAssemblePacket_NoSteeringNoSkill(t *testing.T) { + ctx := &entity.Context{} + + pkt := AssemblePacket(ctx, 8000, nil, "") + + if len(pkt.Steering) != 0 { + t.Errorf("expected no steering, got %d", len(pkt.Steering)) + } + if pkt.Skill != "" { + t.Errorf("expected empty skill, got %q", pkt.Skill) + } +} + +func TestAssemblePacket_SteeringRespectsBudget(t *testing.T) { + ctx := &entity.Context{} + // Use a very small budget so steering gets truncated + bigBody := strings.Repeat("x", 5000) + bodies := []string{bigBody, bigBody} + + pkt := AssemblePacket(ctx, 100, bodies, "") + + // With a tiny budget, at most one steering body should fit + // (FitItems always includes at least one) + if len(pkt.Steering) > 1 { + t.Errorf("expected at most 1 steering body with tiny budget, got %d", len(pkt.Steering)) + } +} + +func TestAssemblePacket_SkillOmittedWhenBudgetExhausted(t *testing.T) { + ctx := &entity.Context{} + // Use a very small budget + pkt := AssemblePacket(ctx, 1, nil, strings.Repeat("x", 5000)) + + // Skill should be omitted when budget is exhausted + if pkt.Skill != "" { + t.Error("expected skill to be omitted when budget exhausted") + } +} diff --git a/internal/cli/agent/core/budget/doc.go b/internal/cli/agent/core/budget/doc.go index 60a5ea8c9..1b5cfc462 100644 --- a/internal/cli/agent/core/budget/doc.go +++ b/internal/cli/agent/core/budget/doc.go @@ -7,8 +7,8 @@ // Package budget implements the token-budgeted context assembly // algorithm for the agent command. // -// [AssemblePacket] allocates tokens across five tiers (constitution, -// tasks, conventions, decisions, learnings). [Split] divides +// [AssemblePacket] allocates tokens across seven tiers (constitution, +// tasks, conventions, decisions, learnings, steering, skill). [Split] divides // remaining budget between two scored sections. [FillSection] // applies two-tier degradation: full entries then title-only // summaries. [FitItems] and [EstimateSliceTokens] handle diff --git a/internal/cli/agent/core/budget/out.go b/internal/cli/agent/core/budget/out.go index 703793e4d..a29473faa 100644 --- a/internal/cli/agent/core/budget/out.go +++ b/internal/cli/agent/core/budget/out.go @@ -24,13 +24,19 @@ import ( // - cmd: Cobra command for output stream // - ctx: Loaded context containing the files // - budget: Token budget for content selection +// - steeringBodies: Pre-filtered steering file bodies +// - skillBody: Skill content (empty if none) // // Returns: // - error: Non-nil if JSON encoding fails func OutputAgentJSON( - cmd *cobra.Command, ctx *entity.Context, budget int, + cmd *cobra.Command, + ctx *entity.Context, + budget int, + steeringBodies []string, + skillBody string, ) error { - pkt := AssemblePacket(ctx, budget) + pkt := AssemblePacket(ctx, budget, steeringBodies, skillBody) packet := packet{ Generated: time.Now().UTC().Format(time.RFC3339), @@ -43,6 +49,8 @@ func OutputAgentJSON( Decisions: pkt.Decisions, Learnings: pkt.Learnings, Summaries: pkt.Summaries, + Steering: pkt.Steering, + Skill: pkt.Skill, Instruction: pkt.Instruction, } @@ -55,19 +63,26 @@ func OutputAgentJSON( // // Uses budget-aware assembly to score entries and respect the token budget. // Output includes sections for constitution, tasks, conventions, -// decisions (full body), learnings (full body), and title-only summaries. +// decisions (full body), learnings (full body), title-only summaries, +// steering files, and skill content. // // Parameters: // - cmd: Cobra command for output stream // - ctx: Loaded context containing the files // - budget: Token budget for content selection +// - steeringBodies: Pre-filtered steering file bodies +// - skillBody: Skill content (empty if none) // // Returns: // - error: Always nil (included for interface consistency) func OutputAgentMarkdown( - cmd *cobra.Command, ctx *entity.Context, budget int, + cmd *cobra.Command, + ctx *entity.Context, + budget int, + steeringBodies []string, + skillBody string, ) error { - pkt := AssemblePacket(ctx, budget) + pkt := AssemblePacket(ctx, budget, steeringBodies, skillBody) writeAgent.Packet(cmd, RenderMarkdownPacket(pkt)) return nil } diff --git a/internal/cli/agent/core/budget/render.go b/internal/cli/agent/core/budget/render.go index e19bff034..1677a8c7a 100644 --- a/internal/cli/agent/core/budget/render.go +++ b/internal/cli/agent/core/budget/render.go @@ -100,6 +100,20 @@ func RenderMarkdownPacket(pkt *AssembledPacket) string { sb.WriteString(nl) } + // Steering + if len(pkt.Steering) > 0 { + sb.WriteString(desc.Text(text.DescKeyAgentSectionSteering) + nl) + for _, s := range pkt.Steering { + sb.WriteString(s + nl + nl) + } + } + + // Skill + if pkt.Skill != "" { + sb.WriteString(desc.Text(text.DescKeyAgentSectionSkill) + nl) + sb.WriteString(pkt.Skill + nl + nl) + } + sb.WriteString(pkt.Instruction + nl) return sb.String() diff --git a/internal/cli/agent/core/budget/types.go b/internal/cli/agent/core/budget/types.go index 052a8f25b..f7feccbd7 100644 --- a/internal/cli/agent/core/budget/types.go +++ b/internal/cli/agent/core/budget/types.go @@ -18,6 +18,8 @@ type packet struct { Decisions []string `json:"decisions"` Learnings []string `json:"learnings,omitempty"` Summaries []string `json:"summaries,omitempty"` + Steering []string `json:"steering,omitempty"` + Skill string `json:"skill,omitempty"` Instruction string `json:"instruction"` } @@ -31,6 +33,8 @@ type packet struct { // - Decisions: Architectural decisions (scored) // - Learnings: Gotchas and tips (scored) // - Summaries: Title-only overflow entries +// - Steering: Applicable steering file bodies +// - Skill: Named skill content (from --skill flag) // - Instruction: Behavioral instruction text // - Budget: Token budget limit // - TokensUsed: Estimated tokens consumed @@ -42,6 +46,8 @@ type AssembledPacket struct { Decisions []string Learnings []string Summaries []string + Steering []string + Skill string Instruction string Budget int TokensUsed int diff --git a/internal/cli/agent/core/steering/doc.go b/internal/cli/agent/core/steering/doc.go new file mode 100644 index 000000000..313ef5fca --- /dev/null +++ b/internal/cli/agent/core/steering/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package steering loads steering and skill content +// for inclusion in the agent context packet. +// +// It filters steering files by tool and inclusion mode, +// and resolves named skills from the skills directory. +package steering diff --git a/internal/cli/agent/core/steering/steering.go b/internal/cli/agent/core/steering/steering.go new file mode 100644 index 000000000..b857a0cea --- /dev/null +++ b/internal/cli/agent/core/steering/steering.go @@ -0,0 +1,61 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "errors" + "os" + "path/filepath" + + "github.com/ActiveMemory/ctx/internal/config/dir" + errSkill "github.com/ActiveMemory/ctx/internal/err/skill" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/skill" + "github.com/ActiveMemory/ctx/internal/steering" +) + +// LoadBodies loads and filters steering files, +// returning their bodies as strings. Returns nil +// when the steering directory does not exist or +// contains no applicable files. +func LoadBodies() []string { + steeringDir := rc.SteeringDir() + + files, loadErr := steering.LoadAll(steeringDir) + if loadErr != nil { + return nil + } + + filtered := steering.Filter( + files, "", nil, rc.Tool(), + ) + + var bodies []string + for _, sf := range filtered { + if sf.Body != "" { + bodies = append(bodies, sf.Body) + } + } + return bodies +} + +// LoadSkill loads a named skill and returns its body +// content. Returns an error if the skill is not found. +func LoadSkill(name string) (string, error) { + skillsDir := filepath.Join( + rc.ContextDir(), dir.Skills, + ) + + sk, loadErr := skill.Load(skillsDir, name) + if loadErr != nil { + if errors.Is(loadErr, os.ErrNotExist) { + return "", errSkill.NotFound(name) + } + return "", errSkill.LoadQuoted(name, loadErr) + } + return sk.Body, nil +} diff --git a/internal/cli/initialize/cmd/root/run.go b/internal/cli/initialize/cmd/root/run.go index 0bc14c9f3..99a8d9737 100644 --- a/internal/cli/initialize/cmd/root/run.go +++ b/internal/cli/initialize/cmd/root/run.go @@ -27,6 +27,7 @@ import ( "github.com/ActiveMemory/ctx/internal/config/claude" "github.com/ActiveMemory/ctx/internal/config/cli" "github.com/ActiveMemory/ctx/internal/config/ctx" + "github.com/ActiveMemory/ctx/internal/config/dir" "github.com/ActiveMemory/ctx/internal/config/embed/text" "github.com/ActiveMemory/ctx/internal/config/file" "github.com/ActiveMemory/ctx/internal/config/fs" @@ -99,6 +100,15 @@ func Run( return errFs.Mkdir(contextDir, mkdirErr) } + // Create .context/ subdirectories for steering, hooks, and skills. + // Uses SafeMkdirAll which is a no-op when the directory already exists. + for _, sub := range []string{dir.Steering, dir.Hooks, dir.Skills} { + subDir := filepath.Join(contextDir, sub) + if mkdirErr := ctxIo.SafeMkdirAll(subDir, fs.PermExec); mkdirErr != nil { + return errFs.Mkdir(subDir, mkdirErr) + } + } + // Get the list of templates to create var templatesToCreate []string if minimal { diff --git a/internal/cli/initialize/init_test.go b/internal/cli/initialize/init_test.go index e7ba5c307..9b598cfd2 100644 --- a/internal/cli/initialize/init_test.go +++ b/internal/cli/initialize/init_test.go @@ -63,6 +63,87 @@ func TestInitCommand(t *testing.T) { } } +func TestInitCreatesSteeringHooksSkillsDirs(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "cli-init-dirs-*") + if err != nil { + t.Fatalf("failed to create temp dir: %v", err) + } + defer func() { _ = os.RemoveAll(tmpDir) }() + + origDir, _ := os.Getwd() + if err = os.Chdir(tmpDir); err != nil { + t.Fatalf("failed to chdir: %v", err) + } + defer func() { _ = os.Chdir(origDir) }() + t.Setenv("HOME", tmpDir) + t.Setenv(env.SkipPathCheck, env.True) + + cmd := Cmd() + cmd.SetArgs([]string{}) + if err = cmd.Execute(); err != nil { + t.Fatalf("init command failed: %v", err) + } + + for _, sub := range []string{"steering", "hooks", "skills"} { + dirPath := filepath.Join(tmpDir, ".context", sub) + info, statErr := os.Stat(dirPath) + if statErr != nil { + t.Errorf(".context/%s was not created: %v", sub, statErr) + continue + } + if !info.IsDir() { + t.Errorf(".context/%s should be a directory", sub) + } + } +} + +func TestInitSkipsExistingSteeringHooksSkillsDirs(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "cli-init-dirs-exist-*") + if err != nil { + t.Fatalf("failed to create temp dir: %v", err) + } + defer func() { _ = os.RemoveAll(tmpDir) }() + + origDir, _ := os.Getwd() + if err = os.Chdir(tmpDir); err != nil { + t.Fatalf("failed to chdir: %v", err) + } + defer func() { _ = os.Chdir(origDir) }() + t.Setenv("HOME", tmpDir) + t.Setenv(env.SkipPathCheck, env.True) + + // Pre-create the directories with a marker file inside each. + for _, sub := range []string{"steering", "hooks", "skills"} { + dirPath := filepath.Join(tmpDir, ".context", sub) + if mkErr := os.MkdirAll(dirPath, 0755); mkErr != nil { + t.Fatalf("failed to pre-create %s: %v", sub, mkErr) + } + marker := filepath.Join(dirPath, "marker.txt") + if wErr := os.WriteFile(marker, []byte("keep"), 0644); wErr != nil { + t.Fatalf("failed to write marker in %s: %v", sub, wErr) + } + } + + cmd := Cmd() + cmd.SetArgs([]string{"--force"}) + if err = cmd.Execute(); err != nil { + t.Fatalf("init command failed: %v", err) + } + + // Verify directories still exist and marker files are preserved. + for _, sub := range []string{"steering", "hooks", "skills"} { + marker := filepath.Join(tmpDir, ".context", sub, "marker.txt") + content, readErr := os.ReadFile(marker) + if readErr != nil { + t.Errorf(".context/%s/marker.txt was lost: %v", sub, readErr) + continue + } + if string(content) != "keep" { + t.Errorf(".context/%s/marker.txt content changed", sub) + } + } +} + func TestInitMergeInsertsAfterH1(t *testing.T) { tmpDir, err := os.MkdirTemp("", "cli-init-merge-h1-*") if err != nil { diff --git a/internal/cli/resolve/doc.go b/internal/cli/resolve/doc.go new file mode 100644 index 000000000..b639e8e10 --- /dev/null +++ b/internal/cli/resolve/doc.go @@ -0,0 +1,13 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package resolve provides shared CLI flag resolution helpers +// used across multiple command packages. +// +// [Tool] resolves the active tool identifier from the --tool +// flag or the .ctxrc tool field, returning an error when +// neither source provides a value. +package resolve diff --git a/internal/cli/resolve/tool.go b/internal/cli/resolve/tool.go new file mode 100644 index 000000000..13472f322 --- /dev/null +++ b/internal/cli/resolve/tool.go @@ -0,0 +1,45 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package resolve + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/flag" + errCli "github.com/ActiveMemory/ctx/internal/err/cli" + "github.com/ActiveMemory/ctx/internal/rc" +) + +// Tool returns the active tool identifier from the --tool flag +// or the .ctxrc tool field. +// +// Resolution order: +// 1. --tool flag (if explicitly set on the command) +// 2. rc.Tool() (from .ctxrc) +// +// Returns an error if neither source provides a value. +// +// Parameters: +// - cmd: The cobra command to read the --tool flag from +// +// Returns: +// - string: The resolved tool identifier +// - error: Non-nil when no tool is configured +func Tool(cmd *cobra.Command) (string, error) { + if cmd.Flags().Changed(flag.Tool) { + v, _ := cmd.Flags().GetString(flag.Tool) + if v != "" { + return v, nil + } + } + + if t := rc.Tool(); t != "" { + return t, nil + } + + return "", errCli.NoToolSpecified() +} diff --git a/internal/cli/setup/cmd/root/run.go b/internal/cli/setup/cmd/root/run.go index 257bf4ca4..0d6c4edb6 100644 --- a/internal/cli/setup/cmd/root/run.go +++ b/internal/cli/setup/cmd/root/run.go @@ -14,8 +14,11 @@ import ( "github.com/ActiveMemory/ctx/internal/assets/read/agent" "github.com/ActiveMemory/ctx/internal/assets/read/desc" coreAgents "github.com/ActiveMemory/ctx/internal/cli/setup/core/agents" + coreCline "github.com/ActiveMemory/ctx/internal/cli/setup/core/cline" coreCopilot "github.com/ActiveMemory/ctx/internal/cli/setup/core/copilot" coreCopCLI "github.com/ActiveMemory/ctx/internal/cli/setup/core/copilot_cli" + coreCursor "github.com/ActiveMemory/ctx/internal/cli/setup/core/cursor" + coreKiro "github.com/ActiveMemory/ctx/internal/cli/setup/core/kiro" "github.com/ActiveMemory/ctx/internal/config/embed/text" cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" "github.com/ActiveMemory/ctx/internal/err/config" @@ -55,7 +58,22 @@ func Run(cmd *cobra.Command, args []string, writeFile bool) error { writeSetup.InfoTool(cmd, desc.Text(text.DescKeyHookClaude)) case cfgHook.ToolCursor: - writeSetup.InfoTool(cmd, desc.Text(text.DescKeyHookCursor)) + if writeFile { + return coreCursor.Deploy(cmd) + } + writeSetup.InfoCursorIntegration(cmd) + + case cfgHook.ToolKiro: + if writeFile { + return coreKiro.Deploy(cmd) + } + writeSetup.InfoKiroIntegration(cmd) + + case cfgHook.ToolCline: + if writeFile { + return coreCline.Deploy(cmd) + } + writeSetup.InfoClineIntegration(cmd) case cfgHook.ToolAider: writeSetup.InfoTool(cmd, desc.Text(text.DescKeyHookAider)) diff --git a/internal/cli/setup/core/cline/cline.go b/internal/cli/setup/core/cline/cline.go new file mode 100644 index 000000000..45303ffe3 --- /dev/null +++ b/internal/cli/setup/core/cline/cline.go @@ -0,0 +1,38 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package cline generates Cline MCP and steering configuration files. +package cline + +import ( + "github.com/spf13/cobra" + + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// Cline deploy constants. +const ( + // displayName is the display name for Cline. + displayName = "Cline" + // mcpConfigPath is the deployed MCP config path. + mcpConfigPath = ".vscode/mcp.json" + // steeringPath is the deployed steering path. + steeringPath = ".clinerules/" +) + +// Deploy generates Cline integration files: +// 1. .vscode/mcp.json — MCP server configuration (shared with VS Code) +// 2. .clinerules/*.md — synced steering files +func Deploy(cmd *cobra.Command) error { + if mcpErr := ensureMCPConfig(cmd); mcpErr != nil { + return mcpErr + } + if steerErr := syncSteering(cmd); steerErr != nil { + return steerErr + } + writeSetup.DeployComplete(cmd, displayName, mcpConfigPath, steeringPath) + return nil +} diff --git a/internal/cli/setup/core/cline/deploy.go b/internal/cli/setup/core/cline/deploy.go new file mode 100644 index 000000000..931172e69 --- /dev/null +++ b/internal/cli/setup/core/cline/deploy.go @@ -0,0 +1,98 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cline + +import ( + "encoding/json" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/fs" + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + mcpServer "github.com/ActiveMemory/ctx/internal/config/mcp/server" + "github.com/ActiveMemory/ctx/internal/config/token" + cfgVscode "github.com/ActiveMemory/ctx/internal/config/vscode" + errSetup "github.com/ActiveMemory/ctx/internal/err/setup" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// ensureMCPConfig creates .vscode/mcp.json with the ctx +// MCP server configuration. Skips if the file exists. +func ensureMCPConfig(cmd *cobra.Command) error { + target := filepath.Join( + cfgVscode.Dir, cfgVscode.FileMCPJSON, + ) + + if _, statErr := ctxIo.SafeStat(target); statErr == nil { + writeSetup.DeployFileExists(cmd, target) + return nil + } + + if mkdirErr := ctxIo.SafeMkdirAll( + cfgVscode.Dir, fs.PermExec, + ); mkdirErr != nil { + return errSetup.CreateDir(cfgVscode.Dir, mkdirErr) + } + + cfg := vscodeMCPConfig{ + Servers: map[string]vscodeMCPServer{ + mcpServer.Name: { + Command: mcpServer.Command, + Args: mcpServer.Args(), + }, + }, + } + + data, marshalErr := json.MarshalIndent( + cfg, "", " ", + ) + if marshalErr != nil { + return errSetup.MarshalConfig(marshalErr) + } + data = append(data, token.NewlineLF[0]) + + if writeErr := ctxIo.SafeWriteFile( + target, data, fs.PermFile, + ); writeErr != nil { + return errSetup.WriteFile(target, writeErr) + } + + writeSetup.DeployFileCreated(cmd, target) + return nil +} + +// syncSteering syncs steering files to Cline format +// if a steering directory exists. +func syncSteering(cmd *cobra.Command) error { + steeringDir := rc.SteeringDir() + if _, statErr := ctxIo.SafeStat( + steeringDir, + ); os.IsNotExist(statErr) { + writeSetup.DeployNoSteering(cmd) + return nil + } + + report, syncErr := steering.SyncTool( + steeringDir, token.Dot, cfgHook.ToolCline, + ) + if syncErr != nil { + return errSetup.SyncSteering(syncErr) + } + + for _, name := range report.Written { + writeSetup.DeploySteeringSynced(cmd, name) + } + for _, name := range report.Skipped { + writeSetup.DeploySteeringSkipped(cmd, name) + } + return nil +} diff --git a/internal/cli/setup/core/cline/doc.go b/internal/cli/setup/core/cline/doc.go new file mode 100644 index 000000000..ecb1090db --- /dev/null +++ b/internal/cli/setup/core/cline/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package cline generates Cline editor integration files. +// +// [Deploy] creates .vscode/mcp.json for MCP server +// configuration and syncs steering files to .clinerules/ +// in Cline's native markdown format. +package cline diff --git a/internal/cli/setup/core/cline/types.go b/internal/cli/setup/core/cline/types.go new file mode 100644 index 000000000..8efb7b02a --- /dev/null +++ b/internal/cli/setup/core/cline/types.go @@ -0,0 +1,18 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cline + +// vscodeMCPConfig is the top-level mcp.json structure for Cline. +type vscodeMCPConfig struct { + Servers map[string]vscodeMCPServer `json:"servers"` +} + +// vscodeMCPServer describes one MCP server entry in mcp.json. +type vscodeMCPServer struct { + Command string `json:"command"` + Args []string `json:"args"` +} diff --git a/internal/cli/setup/core/cursor/cursor.go b/internal/cli/setup/core/cursor/cursor.go new file mode 100644 index 000000000..1d2acf7cf --- /dev/null +++ b/internal/cli/setup/core/cursor/cursor.go @@ -0,0 +1,42 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package cursor generates Cursor MCP and steering configuration files. +package cursor + +import ( + "github.com/spf13/cobra" + + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// Cursor configuration paths. +const ( + // dirCursor is the Cursor editor config directory. + dirCursor = ".cursor" + // fileMCPJSON is the MCP server config file name. + fileMCPJSON = "mcp.json" + // displayName is the display name for Cursor. + displayName = "Cursor" + // mcpConfigPath is the deployed MCP config path. + mcpConfigPath = dirCursor + "/mcp.json" + // steeringPath is the deployed steering path. + steeringPath = dirCursor + "/rules/" +) + +// Deploy generates Cursor integration files: +// 1. .cursor/mcp.json — MCP server configuration +// 2. .cursor/rules/*.mdc — synced steering files +func Deploy(cmd *cobra.Command) error { + if mcpErr := ensureMCPConfig(cmd); mcpErr != nil { + return mcpErr + } + if steerErr := syncSteering(cmd); steerErr != nil { + return steerErr + } + writeSetup.DeployComplete(cmd, displayName, mcpConfigPath, steeringPath) + return nil +} diff --git a/internal/cli/setup/core/cursor/deploy.go b/internal/cli/setup/core/cursor/deploy.go new file mode 100644 index 000000000..0a88c1ecb --- /dev/null +++ b/internal/cli/setup/core/cursor/deploy.go @@ -0,0 +1,95 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cursor + +import ( + "encoding/json" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/fs" + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + mcpServer "github.com/ActiveMemory/ctx/internal/config/mcp/server" + "github.com/ActiveMemory/ctx/internal/config/token" + errSetup "github.com/ActiveMemory/ctx/internal/err/setup" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// ensureMCPConfig creates .cursor/mcp.json with the ctx +// MCP server configuration. Skips if the file exists. +func ensureMCPConfig(cmd *cobra.Command) error { + target := filepath.Join(dirCursor, fileMCPJSON) + + if _, statErr := ctxIo.SafeStat(target); statErr == nil { + writeSetup.DeployFileExists(cmd, target) + return nil + } + + if mkdirErr := ctxIo.SafeMkdirAll( + dirCursor, fs.PermExec, + ); mkdirErr != nil { + return errSetup.CreateDir(dirCursor, mkdirErr) + } + + cfg := mcpConfig{ + MCPServers: map[string]serverEntry{ + mcpServer.Name: { + Command: mcpServer.Command, + Args: mcpServer.Args(), + }, + }, + } + + data, marshalErr := json.MarshalIndent( + cfg, "", " ", + ) + if marshalErr != nil { + return errSetup.MarshalConfig(marshalErr) + } + data = append(data, token.NewlineLF[0]) + + if writeErr := ctxIo.SafeWriteFile( + target, data, fs.PermFile, + ); writeErr != nil { + return errSetup.WriteFile(target, writeErr) + } + + writeSetup.DeployFileCreated(cmd, target) + return nil +} + +// syncSteering syncs steering files to Cursor format +// if a steering directory exists. +func syncSteering(cmd *cobra.Command) error { + steeringDir := rc.SteeringDir() + if _, statErr := ctxIo.SafeStat( + steeringDir, + ); os.IsNotExist(statErr) { + writeSetup.DeployNoSteering(cmd) + return nil + } + + report, syncErr := steering.SyncTool( + steeringDir, token.Dot, cfgHook.ToolCursor, + ) + if syncErr != nil { + return errSetup.SyncSteering(syncErr) + } + + for _, name := range report.Written { + writeSetup.DeploySteeringSynced(cmd, name) + } + for _, name := range report.Skipped { + writeSetup.DeploySteeringSkipped(cmd, name) + } + return nil +} diff --git a/internal/cli/setup/core/cursor/doc.go b/internal/cli/setup/core/cursor/doc.go new file mode 100644 index 000000000..0db79cb5b --- /dev/null +++ b/internal/cli/setup/core/cursor/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package cursor generates Cursor editor integration files. +// +// [Deploy] creates .cursor/mcp.json for MCP server +// configuration and syncs steering files to .cursor/rules/ +// in Cursor's native MDC format. +package cursor diff --git a/internal/cli/setup/core/cursor/types.go b/internal/cli/setup/core/cursor/types.go new file mode 100644 index 000000000..6765ca1cd --- /dev/null +++ b/internal/cli/setup/core/cursor/types.go @@ -0,0 +1,18 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cursor + +// mcpConfig is the JSON structure for .cursor/mcp.json. +type mcpConfig struct { + MCPServers map[string]serverEntry `json:"mcpServers"` +} + +// serverEntry describes one MCP server entry in mcp.json. +type serverEntry struct { + Command string `json:"command"` + Args []string `json:"args"` +} diff --git a/internal/cli/setup/core/kiro/deploy.go b/internal/cli/setup/core/kiro/deploy.go new file mode 100644 index 000000000..bda1cf39e --- /dev/null +++ b/internal/cli/setup/core/kiro/deploy.go @@ -0,0 +1,111 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package kiro + +import ( + "encoding/json" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/config/fs" + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + mcpServer "github.com/ActiveMemory/ctx/internal/config/mcp/server" + cfgMcpTool "github.com/ActiveMemory/ctx/internal/config/mcp/tool" + "github.com/ActiveMemory/ctx/internal/config/token" + errSetup "github.com/ActiveMemory/ctx/internal/err/setup" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// ensureMCPConfig creates .kiro/settings/mcp.json with +// the ctx MCP server config. Skips if the file exists. +func ensureMCPConfig(cmd *cobra.Command) error { + settingsDir := filepath.Join(DirKiro, DirSettings) + target := filepath.Join(settingsDir, FileMCPJSON) + + if _, statErr := ctxIo.SafeStat( + target, + ); statErr == nil { + writeSetup.DeployFileExists(cmd, target) + return nil + } + + if mkdirErr := ctxIo.SafeMkdirAll( + settingsDir, fs.PermExec, + ); mkdirErr != nil { + return errSetup.CreateDir(settingsDir, mkdirErr) + } + + cfg := mcpConfig{ + MCPServers: map[string]serverEntry{ + mcpServer.Name: { + Command: mcpServer.Command, + Args: mcpServer.Args(), + Disabled: false, + AutoApprove: []string{ + cfgMcpTool.Status, + cfgMcpTool.SteeringGet, + cfgMcpTool.Search, + cfgMcpTool.SessionStart, + cfgMcpTool.SessionEnd, + cfgMcpTool.Next, + cfgMcpTool.Remind, + }, + }, + }, + } + + data, marshalErr := json.MarshalIndent( + cfg, "", " ", + ) + if marshalErr != nil { + return errSetup.MarshalConfig(marshalErr) + } + data = append(data, token.NewlineLF[0]) + + if writeErr := ctxIo.SafeWriteFile( + target, data, fs.PermFile, + ); writeErr != nil { + return errSetup.WriteFile(target, writeErr) + } + + writeSetup.DeployFileCreated(cmd, target) + return nil +} + +// syncSteering syncs steering files to Kiro format +// if a steering directory exists. +func syncSteering(cmd *cobra.Command) error { + steeringDir := rc.SteeringDir() + + if _, statErr := ctxIo.SafeStat( + steeringDir, + ); os.IsNotExist(statErr) { + writeSetup.DeployNoSteering(cmd) + return nil + } + + report, syncErr := steering.SyncTool( + steeringDir, token.Dot, cfgHook.ToolKiro, + ) + if syncErr != nil { + return errSetup.SyncSteering(syncErr) + } + + for _, name := range report.Written { + writeSetup.DeploySteeringSynced(cmd, name) + } + for _, name := range report.Skipped { + writeSetup.DeploySteeringSkipped(cmd, name) + } + + return nil +} diff --git a/internal/cli/setup/core/kiro/doc.go b/internal/cli/setup/core/kiro/doc.go new file mode 100644 index 000000000..1d915235c --- /dev/null +++ b/internal/cli/setup/core/kiro/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package kiro generates Kiro editor integration files. +// +// [Deploy] creates .kiro/settings/mcp.json for MCP server +// configuration and syncs steering files to .kiro/steering/ +// in Kiro's native format. +package kiro diff --git a/internal/cli/setup/core/kiro/kiro.go b/internal/cli/setup/core/kiro/kiro.go new file mode 100644 index 000000000..c4825ddf7 --- /dev/null +++ b/internal/cli/setup/core/kiro/kiro.go @@ -0,0 +1,58 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package kiro generates Kiro MCP and steering configuration files. +package kiro + +import ( + "github.com/spf13/cobra" + + writeSetup "github.com/ActiveMemory/ctx/internal/write/setup" +) + +// Kiro configuration paths. +const ( + // DirKiro is the Kiro editor config directory. + DirKiro = ".kiro" + // DirSettings is the Kiro settings subdirectory. + DirSettings = "settings" + // FileMCPJSON is the MCP server config file name. + FileMCPJSON = "mcp.json" + // displayName is the display name for Kiro. + displayName = "Kiro" + // mcpConfigPath is the deployed MCP config path. + mcpConfigPath = DirKiro + "/settings/mcp.json" + // steeringDeployPath is the deployed steering path. + steeringDeployPath = DirKiro + "/steering/" +) + +// Deploy generates Kiro integration files: +// 1. .kiro/settings/mcp.json — MCP server configuration +// 2. .kiro/steering/*.md — synced steering files +// +// Skips files that already exist to avoid overwriting user customizations. +// +// Parameters: +// - cmd: Cobra command for output messages +// +// Returns: +// - error: Non-nil if directory creation or file write fails +func Deploy(cmd *cobra.Command) error { + if mcpErr := ensureMCPConfig(cmd); mcpErr != nil { + return mcpErr + } + + if steerErr := syncSteering(cmd); steerErr != nil { + return steerErr + } + + writeSetup.DeployComplete( + cmd, displayName, + mcpConfigPath, + steeringDeployPath, + ) + return nil +} diff --git a/internal/cli/setup/core/kiro/types.go b/internal/cli/setup/core/kiro/types.go new file mode 100644 index 000000000..88f56c8e7 --- /dev/null +++ b/internal/cli/setup/core/kiro/types.go @@ -0,0 +1,20 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package kiro + +// mcpConfig is the JSON structure for .kiro/settings/mcp.json. +type mcpConfig struct { + MCPServers map[string]serverEntry `json:"mcpServers"` +} + +// serverEntry describes one MCP server entry in mcp.json. +type serverEntry struct { + Command string `json:"command"` + Args []string `json:"args"` + Disabled bool `json:"disabled"` + AutoApprove []string `json:"autoApprove"` +} diff --git a/internal/cli/skill/cmd/install/cmd.go b/internal/cli/skill/cmd/install/cmd.go new file mode 100644 index 000000000..d9a27471b --- /dev/null +++ b/internal/cli/skill/cmd/install/cmd.go @@ -0,0 +1,55 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package install + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/skill" + writeSkill "github.com/ActiveMemory/ctx/internal/write/skill" +) + +// Cmd returns the "ctx skill install" subcommand. +// +// Returns: +// - *cobra.Command: Configured install subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeySkillInstall) + + return &cobra.Command{ + Use: cmd.UseSkillInstall, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run installs a skill from the given source directory. +// +// Parameters: +// - c: The cobra command for output +// - source: Path to the source directory containing SKILL.md +func Run(c *cobra.Command, source string) error { + skillsDir := filepath.Join(rc.ContextDir(), dir.Skills) + + sk, err := skill.Install(source, skillsDir) + if err != nil { + return err + } + + writeSkill.Installed(c, sk.Name, sk.Dir) + return nil +} diff --git a/internal/cli/skill/cmd/install/doc.go b/internal/cli/skill/cmd/install/doc.go new file mode 100644 index 000000000..cf090311e --- /dev/null +++ b/internal/cli/skill/cmd/install/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package install implements the "ctx skill install" subcommand. +// +// [Run] installs a skill from a source directory containing a +// SKILL.md manifest with YAML frontmatter, copying all files +// into the skills directory under .context/. +package install diff --git a/internal/cli/skill/cmd/list/cmd.go b/internal/cli/skill/cmd/list/cmd.go new file mode 100644 index 000000000..98364e8b9 --- /dev/null +++ b/internal/cli/skill/cmd/list/cmd.go @@ -0,0 +1,66 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package list + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/skill" + writeSkill "github.com/ActiveMemory/ctx/internal/write/skill" +) + +// Cmd returns the "ctx skill list" subcommand. +// +// Returns: +// - *cobra.Command: Configured list subcommand +func Cmd() *cobra.Command { + short, _ := desc.Command(cmd.DescKeySkillList) + + return &cobra.Command{ + Use: cmd.UseSkillList, + Short: short, + Args: cobra.NoArgs, + RunE: func(c *cobra.Command, _ []string) error { + return Run(c) + }, + } +} + +// Run lists all installed skills with name and description. +// +// Parameters: +// - c: The cobra command for output +func Run(c *cobra.Command) error { + skillsDir := filepath.Join(rc.ContextDir(), dir.Skills) + + skills, err := skill.LoadAll(skillsDir) + if err != nil { + return err + } + + if len(skills) == 0 { + writeSkill.NoSkillsFound(c) + return nil + } + + for _, sk := range skills { + if sk.Description != "" { + writeSkill.EntryWithDesc(c, sk.Name, sk.Description) + } else { + writeSkill.Entry(c, sk.Name) + } + } + + writeSkill.Count(c, len(skills)) + return nil +} diff --git a/internal/cli/skill/cmd/list/doc.go b/internal/cli/skill/cmd/list/doc.go new file mode 100644 index 000000000..5a50594d9 --- /dev/null +++ b/internal/cli/skill/cmd/list/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package list implements the "ctx skill list" subcommand. +// +// [Run] reads all installed skills from the skills directory +// and displays each skill's name and description, with a +// total count at the end. +package list diff --git a/internal/cli/skill/cmd/remove/cmd.go b/internal/cli/skill/cmd/remove/cmd.go new file mode 100644 index 000000000..5accc899a --- /dev/null +++ b/internal/cli/skill/cmd/remove/cmd.go @@ -0,0 +1,54 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package remove + +import ( + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/dir" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/skill" + writeSkill "github.com/ActiveMemory/ctx/internal/write/skill" +) + +// Cmd returns the "ctx skill remove" subcommand. +// +// Returns: +// - *cobra.Command: Configured remove subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeySkillRemove) + + return &cobra.Command{ + Use: cmd.UseSkillRemove, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run removes an installed skill by name. +// +// Parameters: +// - c: The cobra command for output +// - name: The skill name to remove +func Run(c *cobra.Command, name string) error { + skillsDir := filepath.Join(rc.ContextDir(), dir.Skills) + + if err := skill.Remove(skillsDir, name); err != nil { + return err + } + + writeSkill.Removed(c, name) + return nil +} diff --git a/internal/cli/skill/cmd/remove/doc.go b/internal/cli/skill/cmd/remove/doc.go new file mode 100644 index 000000000..553afd4dc --- /dev/null +++ b/internal/cli/skill/cmd/remove/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package remove implements the "ctx skill remove" subcommand. +// +// [Run] removes an installed skill by name, deleting its +// directory and all contained files from the skills directory +// under .context/. +package remove diff --git a/internal/cli/skill/doc.go b/internal/cli/skill/doc.go new file mode 100644 index 000000000..65eb5454d --- /dev/null +++ b/internal/cli/skill/doc.go @@ -0,0 +1,13 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package skill implements the "ctx skill" command group for managing +// reusable instruction bundles that can be installed, listed, and +// removed from the project context. +// +// Subcommands: install (copy from source), list (show all), +// remove (delete by name). +package skill diff --git a/internal/cli/skill/skill.go b/internal/cli/skill/skill.go new file mode 100644 index 000000000..af9d6cf9f --- /dev/null +++ b/internal/cli/skill/skill.go @@ -0,0 +1,29 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/parent" + "github.com/ActiveMemory/ctx/internal/cli/skill/cmd/install" + "github.com/ActiveMemory/ctx/internal/cli/skill/cmd/list" + "github.com/ActiveMemory/ctx/internal/cli/skill/cmd/remove" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the "ctx skill" parent command. +// +// Returns: +// - *cobra.Command: Configured skill command with subcommands +func Cmd() *cobra.Command { + return parent.Cmd(cmd.DescKeySkill, cmd.UseSkill, + install.Cmd(), + list.Cmd(), + remove.Cmd(), + ) +} diff --git a/internal/cli/steering/cmd/add/cmd.go b/internal/cli/steering/cmd/add/cmd.go new file mode 100644 index 000000000..2917c0aec --- /dev/null +++ b/internal/cli/steering/cmd/add/cmd.go @@ -0,0 +1,93 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package add + +import ( + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/config/file" + "github.com/ActiveMemory/ctx/internal/config/fs" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSteering "github.com/ActiveMemory/ctx/internal/write/steering" +) + +// defaultPriority is the default priority for new steering files. +const defaultPriority = 50 + +// Cmd returns the "ctx steering add" subcommand. +// +// Returns: +// - *cobra.Command: Configured add subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeySteeringAdd) + + return &cobra.Command{ + Use: cmd.UseSteeringAdd, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run creates a new steering file with default frontmatter. +// +// Parameters: +// - c: The cobra command for output +// - name: The steering file name (without .md extension) +func Run(c *cobra.Command, name string) error { + contextDir := rc.ContextDir() + + // Check that .context/ directory exists. + if _, statErr := ctxIo.SafeStat(contextDir); os.IsNotExist(statErr) { + return errSteering.ContextDirMissing() + } + + steeringDir := rc.SteeringDir() + + // Ensure the steering directory exists. + if mkdirErr := ctxIo.SafeMkdirAll( + steeringDir, fs.PermRestrictedDir, + ); mkdirErr != nil { + return errSteering.CreateDir(mkdirErr) + } + + filePath := filepath.Join( + steeringDir, name+file.ExtMarkdown, + ) + + // Error if file already exists. + if _, statErr := ctxIo.SafeStat(filePath); statErr == nil { + return errSteering.FileExists(filePath) + } + + sf := &steering.SteeringFile{ + Name: name, + Inclusion: steering.InclusionManual, + Priority: defaultPriority, + } + + data := steering.Print(sf) + if writeErr := ctxIo.SafeWriteFile( + filePath, data, fs.PermFile, + ); writeErr != nil { + return errSteering.Write(writeErr) + } + + writeSteering.Created(c, filePath) + return nil +} diff --git a/internal/cli/steering/cmd/add/doc.go b/internal/cli/steering/cmd/add/doc.go new file mode 100644 index 000000000..2f3b0779a --- /dev/null +++ b/internal/cli/steering/cmd/add/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package add implements the "ctx steering add" subcommand. +// +// [Run] creates a new steering file with default frontmatter +// (manual inclusion, priority 50) in the steering directory. +// Errors if a file with the same name already exists. +package add diff --git a/internal/cli/steering/cmd/initcmd/cmd.go b/internal/cli/steering/cmd/initcmd/cmd.go new file mode 100644 index 000000000..f815fedf9 --- /dev/null +++ b/internal/cli/steering/cmd/initcmd/cmd.go @@ -0,0 +1,106 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package initcmd + +import ( + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/config/file" + "github.com/ActiveMemory/ctx/internal/config/fs" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSteering "github.com/ActiveMemory/ctx/internal/write/steering" +) + +// Cmd returns the "ctx steering init" subcommand. +// +// Returns: +// - *cobra.Command: Configured init subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeySteeringInit) + + return &cobra.Command{ + Use: cmd.UseSteeringInit, + Short: short, + Long: long, + Args: cobra.NoArgs, + RunE: func(c *cobra.Command, _ []string) error { + return Run(c) + }, + } +} + +// Run generates foundation steering files in the steering directory. +// Existing files are skipped and reported. +// +// Parameters: +// - c: The cobra command for output +func Run(c *cobra.Command) error { + contextDir := rc.ContextDir() + + // Check that .context/ directory exists. + if _, statErr := ctxIo.SafeStat( + contextDir, + ); os.IsNotExist(statErr) { + return errSteering.ContextDirMissing() + } + + steeringDir := rc.SteeringDir() + + // Ensure the steering directory exists. + if mkdirErr := ctxIo.SafeMkdirAll( + steeringDir, fs.PermRestrictedDir, + ); mkdirErr != nil { + return errSteering.CreateDir(mkdirErr) + } + + var created, skipped int + + for _, ff := range steering.FoundationFiles { + filePath := filepath.Join( + steeringDir, ff.Name+file.ExtMarkdown, + ) + + if _, statErr := ctxIo.SafeStat( + filePath, + ); statErr == nil { + writeSteering.Skipped(c, filePath) + skipped++ + continue + } + + sf := &steering.SteeringFile{ + Name: ff.Name, + Description: ff.Description, + Inclusion: steering.InclusionAlways, + Priority: 10, + Body: ff.Body, + } + + data := steering.Print(sf) + if writeErr := ctxIo.SafeWriteFile( + filePath, data, fs.PermFile, + ); writeErr != nil { + return errSteering.WriteInitFile( + filePath, writeErr, + ) + } + + writeSteering.Created(c, filePath) + created++ + } + + writeSteering.InitSummary(c, created, skipped) + return nil +} diff --git a/internal/cli/steering/cmd/initcmd/doc.go b/internal/cli/steering/cmd/initcmd/doc.go new file mode 100644 index 000000000..eee2acf18 --- /dev/null +++ b/internal/cli/steering/cmd/initcmd/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package initcmd implements the "ctx steering init" subcommand. +// +// [Run] generates foundation steering files (product, tech, +// structure, workflow) in the steering directory with default +// frontmatter. Existing files are skipped to avoid overwriting. +package initcmd diff --git a/internal/cli/steering/cmd/list/cmd.go b/internal/cli/steering/cmd/list/cmd.go new file mode 100644 index 000000000..73489b66a --- /dev/null +++ b/internal/cli/steering/cmd/list/cmd.go @@ -0,0 +1,74 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package list + +import ( + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/config/token" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSteering "github.com/ActiveMemory/ctx/internal/write/steering" +) + +// labelAllTools is the display label when a steering file +// applies to all tools. +const labelAllTools = "all" + +// Cmd returns the "ctx steering list" subcommand. +// +// Returns: +// - *cobra.Command: Configured list subcommand +func Cmd() *cobra.Command { + short, _ := desc.Command(cmd.DescKeySteeringList) + + return &cobra.Command{ + Use: cmd.UseSteeringList, + Short: short, + Args: cobra.NoArgs, + RunE: func(c *cobra.Command, _ []string) error { + return Run(c) + }, + } +} + +// Run lists all steering files with name, inclusion mode, priority, +// and target tools. +// +// Parameters: +// - c: The cobra command for output +func Run(c *cobra.Command) error { + steeringDir := rc.SteeringDir() + + files, err := steering.LoadAll(steeringDir) + if err != nil { + return err + } + + if len(files) == 0 { + writeSteering.NoFilesFound(c) + return nil + } + + for _, sf := range files { + tools := labelAllTools + if len(sf.Tools) > 0 { + tools = strings.Join(sf.Tools, token.CommaSpace) + } + writeSteering.FileEntry( + c, sf.Name, + string(sf.Inclusion), sf.Priority, tools, + ) + } + + writeSteering.FileCount(c, len(files)) + return nil +} diff --git a/internal/cli/steering/cmd/list/doc.go b/internal/cli/steering/cmd/list/doc.go new file mode 100644 index 000000000..0a95e0e1a --- /dev/null +++ b/internal/cli/steering/cmd/list/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package list implements the "ctx steering list" subcommand. +// +// [Run] reads all steering files from the steering directory +// and displays each file's name, inclusion mode, priority, +// and target tools with a total count. +package list diff --git a/internal/cli/steering/cmd/preview/cmd.go b/internal/cli/steering/cmd/preview/cmd.go new file mode 100644 index 000000000..0884aa1b1 --- /dev/null +++ b/internal/cli/steering/cmd/preview/cmd.go @@ -0,0 +1,80 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package preview + +import ( + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/config/token" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + writeSteering "github.com/ActiveMemory/ctx/internal/write/steering" +) + +// labelAllTools is the display label when a steering file +// applies to all tools. +const labelAllTools = "all" + +// Cmd returns the "ctx steering preview" subcommand. +// +// Returns: +// - *cobra.Command: Configured preview subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeySteeringPreview) + + return &cobra.Command{ + Use: cmd.UseSteeringPreview, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run shows which steering files would be included for the given +// prompt text, respecting inclusion mode rules. +// +// Parameters: +// - c: The cobra command for output +// - prompt: The prompt text to match against +func Run(c *cobra.Command, prompt string) error { + steeringDir := rc.SteeringDir() + + files, err := steering.LoadAll(steeringDir) + if err != nil { + return err + } + + // Filter with no manual names — preview only shows always + auto matches. + matched := steering.Filter(files, prompt, nil, "") + + if len(matched) == 0 { + writeSteering.NoFilesMatch(c) + return nil + } + + writeSteering.PreviewHeader(c, prompt) + for _, sf := range matched { + tools := labelAllTools + if len(sf.Tools) > 0 { + tools = strings.Join(sf.Tools, token.CommaSpace) + } + writeSteering.PreviewEntry( + c, sf.Name, + string(sf.Inclusion), sf.Priority, tools, + ) + } + + writeSteering.PreviewCount(c, len(matched)) + return nil +} diff --git a/internal/cli/steering/cmd/preview/doc.go b/internal/cli/steering/cmd/preview/doc.go new file mode 100644 index 000000000..63738e58a --- /dev/null +++ b/internal/cli/steering/cmd/preview/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package preview implements the "ctx steering preview" subcommand. +// +// [Run] shows which steering files would be included for a +// given prompt text, displaying each matching file's name, +// inclusion mode, priority, and target tools. +package preview diff --git a/internal/cli/steering/cmd/synccmd/cmd.go b/internal/cli/steering/cmd/synccmd/cmd.go new file mode 100644 index 000000000..3adff5cbc --- /dev/null +++ b/internal/cli/steering/cmd/synccmd/cmd.go @@ -0,0 +1,87 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package synccmd + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/cli/resolve" + coreSync "github.com/ActiveMemory/ctx/internal/cli/steering/core/sync" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + embedFlag "github.com/ActiveMemory/ctx/internal/config/embed/flag" + "github.com/ActiveMemory/ctx/internal/config/flag" + "github.com/ActiveMemory/ctx/internal/config/token" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" + "github.com/ActiveMemory/ctx/internal/flagbind" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" +) + +// Cmd returns the "ctx steering sync" subcommand. +// +// Returns: +// - *cobra.Command: Configured sync subcommand +func Cmd() *cobra.Command { + var syncAll bool + + short, long := desc.Command(cmd.DescKeySteeringSync) + + c := &cobra.Command{ + Use: cmd.UseSteeringSync, + Short: short, + Long: long, + Args: cobra.NoArgs, + RunE: func(c *cobra.Command, _ []string) error { + return Run(c, syncAll) + }, + } + + flagbind.BoolFlag( + c, &syncAll, flag.All, + embedFlag.DescKeySteeringSyncAll, + ) + + return c +} + +// Run syncs steering files to tool-native formats. +// +// Parameters: +// - c: The cobra command for output and flag access +// - syncAll: Whether to sync to all supported tools +func Run(c *cobra.Command, syncAll bool) error { + steeringDir := rc.SteeringDir() + projectRoot := token.Dot + + if syncAll { + report, syncErr := steering.SyncAll( + steeringDir, projectRoot, + ) + if syncErr != nil { + return syncErr + } + coreSync.PrintReport(c, report) + return nil + } + + // Resolve tool from --tool flag or .ctxrc. + tool, resolveErr := resolve.Tool(c) + if resolveErr != nil { + return errSteering.NoTool() + } + + report, syncErr := steering.SyncTool( + steeringDir, projectRoot, tool, + ) + if syncErr != nil { + return syncErr + } + + coreSync.PrintReport(c, report) + return nil +} diff --git a/internal/cli/steering/cmd/synccmd/doc.go b/internal/cli/steering/cmd/synccmd/doc.go new file mode 100644 index 000000000..dacf3a360 --- /dev/null +++ b/internal/cli/steering/cmd/synccmd/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package synccmd implements the "ctx steering sync" subcommand. +// +// [Run] syncs steering files to tool-native formats for one +// tool (resolved from --tool flag or .ctxrc) or all supported +// tools when the --all flag is provided. +package synccmd diff --git a/internal/cli/steering/core/sync/doc.go b/internal/cli/steering/core/sync/doc.go new file mode 100644 index 000000000..9e266f6d2 --- /dev/null +++ b/internal/cli/steering/core/sync/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package sync provides output formatting for the +// steering sync command. +// +// It renders sync reports showing written, skipped, +// and errored files with summary counts. +package sync diff --git a/internal/cli/steering/core/sync/report.go b/internal/cli/steering/core/sync/report.go new file mode 100644 index 000000000..e24339e7f --- /dev/null +++ b/internal/cli/steering/core/sync/report.go @@ -0,0 +1,36 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package sync + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/steering" + writeSteering "github.com/ActiveMemory/ctx/internal/write/steering" +) + +// PrintReport outputs the sync report to the command +// output stream. +func PrintReport( + c *cobra.Command, report steering.SyncReport, +) { + for _, name := range report.Written { + writeSteering.SyncWritten(c, name) + } + for _, name := range report.Skipped { + writeSteering.SyncSkipped(c, name) + } + for _, syncErr := range report.Errors { + writeSteering.SyncError(c, syncErr.Error()) + } + + writeSteering.SyncSummary(c, + len(report.Written), + len(report.Skipped), + len(report.Errors), + ) +} diff --git a/internal/cli/steering/doc.go b/internal/cli/steering/doc.go new file mode 100644 index 000000000..6532b163a --- /dev/null +++ b/internal/cli/steering/doc.go @@ -0,0 +1,13 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package steering implements the "ctx steering" command group for +// managing steering files that define persistent behavioral rules +// for AI coding assistants. +// +// Subcommands: add (create file), list (show all), preview (match +// prompt), init (generate foundation files), sync (tool-native export). +package steering diff --git a/internal/cli/steering/steering.go b/internal/cli/steering/steering.go new file mode 100644 index 000000000..6372c28ed --- /dev/null +++ b/internal/cli/steering/steering.go @@ -0,0 +1,33 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/parent" + "github.com/ActiveMemory/ctx/internal/cli/steering/cmd/add" + "github.com/ActiveMemory/ctx/internal/cli/steering/cmd/initcmd" + "github.com/ActiveMemory/ctx/internal/cli/steering/cmd/list" + "github.com/ActiveMemory/ctx/internal/cli/steering/cmd/preview" + "github.com/ActiveMemory/ctx/internal/cli/steering/cmd/synccmd" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the "ctx steering" parent command. +// +// Returns: +// - *cobra.Command: Configured steering command with subcommands +func Cmd() *cobra.Command { + return parent.Cmd(cmd.DescKeySteering, cmd.UseSteering, + add.Cmd(), + list.Cmd(), + preview.Cmd(), + initcmd.Cmd(), + synccmd.Cmd(), + ) +} diff --git a/internal/cli/system/cmd/message/cmd/edit/run.go b/internal/cli/system/cmd/message/cmd/edit/run.go index cee4dbd45..a35577e7a 100644 --- a/internal/cli/system/cmd/message/cmd/edit/run.go +++ b/internal/cli/system/cmd/message/cmd/edit/run.go @@ -17,7 +17,7 @@ import ( "github.com/ActiveMemory/ctx/internal/cli/system/core/message" "github.com/ActiveMemory/ctx/internal/config/file" "github.com/ActiveMemory/ctx/internal/err/fs" - errHook "github.com/ActiveMemory/ctx/internal/err/hook" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" ctxIo "github.com/ActiveMemory/ctx/internal/io" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -35,13 +35,13 @@ import ( func Run(cmd *cobra.Command, hk, variant string) error { info := messages.Lookup(hk, variant) if info == nil { - return errHook.Validate(messages.Variants(hk) != nil, hk, variant) + return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } oPath := message.OverridePath(hk, variant) if _, statErr := os.Stat(oPath); statErr == nil { - return errHook.OverrideExists(oPath, hk, variant) + return errTrigger.OverrideExists(oPath, hk, variant) } if info.Category == messages.CategoryCtxSpecific { @@ -50,7 +50,7 @@ func Run(cmd *cobra.Command, hk, variant string) error { data, readErr := hook.Message(hk, variant+file.ExtTxt) if readErr != nil { - return errHook.EmbeddedTemplateNotFound(hk, variant) + return errTrigger.EmbeddedTemplateNotFound(hk, variant) } dir := filepath.Dir(oPath) @@ -59,7 +59,7 @@ func Run(cmd *cobra.Command, hk, variant string) error { } if writeErr := ctxIo.SafeWriteFile(oPath, data, 0o600); writeErr != nil { - return errHook.WriteOverride(oPath, writeErr) + return errTrigger.WriteOverride(oPath, writeErr) } writeMessage.OverrideCreated(cmd, oPath) diff --git a/internal/cli/system/cmd/message/cmd/reset/run.go b/internal/cli/system/cmd/message/cmd/reset/run.go index d0faddbcd..5df198463 100644 --- a/internal/cli/system/cmd/message/cmd/reset/run.go +++ b/internal/cli/system/cmd/message/cmd/reset/run.go @@ -15,7 +15,7 @@ import ( "github.com/ActiveMemory/ctx/internal/assets/hooks/messages" "github.com/ActiveMemory/ctx/internal/cli/system/core/message" "github.com/ActiveMemory/ctx/internal/config/warn" - errHook "github.com/ActiveMemory/ctx/internal/err/hook" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" ctxLog "github.com/ActiveMemory/ctx/internal/log/warn" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -32,7 +32,7 @@ import ( func Run(cmd *cobra.Command, hk, variant string) error { info := messages.Lookup(hk, variant) if info == nil { - return errHook.Validate(messages.Variants(hk) != nil, hk, variant) + return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } oPath := message.OverridePath(hk, variant) @@ -42,7 +42,7 @@ func Run(cmd *cobra.Command, hk, variant string) error { writeMessage.NoOverride(cmd, hk, variant) return nil } - return errHook.RemoveOverride(oPath, removeErr) + return errTrigger.RemoveOverride(oPath, removeErr) } hookDir := filepath.Dir(oPath) diff --git a/internal/cli/system/cmd/message/cmd/show/run.go b/internal/cli/system/cmd/message/cmd/show/run.go index 8fc634025..21c3656ec 100644 --- a/internal/cli/system/cmd/message/cmd/show/run.go +++ b/internal/cli/system/cmd/message/cmd/show/run.go @@ -13,7 +13,7 @@ import ( readHook "github.com/ActiveMemory/ctx/internal/assets/read/hook" "github.com/ActiveMemory/ctx/internal/cli/system/core/message" "github.com/ActiveMemory/ctx/internal/config/file" - errHook "github.com/ActiveMemory/ctx/internal/err/hook" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" "github.com/ActiveMemory/ctx/internal/io" writeMessage "github.com/ActiveMemory/ctx/internal/write/message" ) @@ -30,7 +30,7 @@ import ( func Run(cmd *cobra.Command, hk, variant string) error { info := messages.Lookup(hk, variant) if info == nil { - return errHook.Validate(messages.Variants(hk) != nil, hk, variant) + return errTrigger.Validate(messages.Variants(hk) != nil, hk, variant) } oPath := message.OverridePath(hk, variant) @@ -43,7 +43,7 @@ func Run(cmd *cobra.Command, hk, variant string) error { data, readErr := readHook.Message(hk, variant+file.ExtTxt) if readErr != nil { - return errHook.EmbeddedTemplateNotFound(hk, variant) + return errTrigger.EmbeddedTemplateNotFound(hk, variant) } writeMessage.SourceDefault(cmd) diff --git a/internal/cli/trigger/cmd/add/cmd.go b/internal/cli/trigger/cmd/add/cmd.go new file mode 100644 index 000000000..d5b3970b2 --- /dev/null +++ b/internal/cli/trigger/cmd/add/cmd.go @@ -0,0 +1,120 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package add + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/config/file" + "github.com/ActiveMemory/ctx/internal/config/fs" + "github.com/ActiveMemory/ctx/internal/config/token" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" + writeTrigger "github.com/ActiveMemory/ctx/internal/write/trigger" +) + +// scriptTemplate is the shell script template for new hooks. +const scriptTemplate = `#!/usr/bin/env bash +# Hook: %s +# Type: %s +# Created by: ctx hook add + +set -euo pipefail + +INPUT=$(cat) + +# Parse input fields +HOOK_TYPE=$(echo "$INPUT" | jq -r '.hookType') +TOOL=$(echo "$INPUT" | jq -r '.tool // empty') + +# Your hook logic here + +# Return output +echo '{"cancel": false, "context": "", "message": ""}' +` + +// Cmd returns the "ctx hook add" subcommand. +// +// Returns: +// - *cobra.Command: Configured add subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeyTriggerAdd) + + return &cobra.Command{ + Use: cmd.UseTriggerAdd, + Short: short, + Long: long, + Args: cobra.ExactArgs(2), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0], args[1]) + }, + } +} + +// Run creates a new hook script with a template. +// +// Parameters: +// - c: The cobra command for output +// - hookType: The hook type (e.g., "pre-tool-use") +// - name: The hook script name (without .sh extension) +func Run(c *cobra.Command, hookType, name string) error { + // Validate hook type. + ht := trigger.HookType(hookType) + valid := trigger.ValidTypes() + + found := false + for _, v := range valid { + if v == ht { + found = true + break + } + } + + if !found { + names := make([]string, len(valid)) + for i, v := range valid { + names[i] = string(v) + } + return errTrigger.InvalidType(hookType, strings.Join(names, token.CommaSpace)) + } + + hooksDir := rc.HooksDir() + typeDir := filepath.Join(hooksDir, hookType) + + // Ensure the type directory exists. + if mkdirErr := ctxIo.SafeMkdirAll( + typeDir, fs.PermRestrictedDir, + ); mkdirErr != nil { + return errTrigger.CreateDir(mkdirErr) + } + + filePath := filepath.Join(typeDir, name+file.ExtSh) + + // Error if file already exists. + if _, statErr := ctxIo.SafeStat(filePath); statErr == nil { + return errTrigger.ScriptExists(filePath) + } + + content := fmt.Sprintf(scriptTemplate, name, hookType) + writeErr := ctxIo.SafeWriteFile( + filePath, []byte(content), fs.PermExec, + ) + if writeErr != nil { + return errTrigger.WriteScript(writeErr) + } + + writeTrigger.Created(c, filePath) + return nil +} diff --git a/internal/cli/trigger/cmd/add/doc.go b/internal/cli/trigger/cmd/add/doc.go new file mode 100644 index 000000000..8b04ae9c5 --- /dev/null +++ b/internal/cli/trigger/cmd/add/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package add implements the "ctx trigger add" subcommand. +// +// [Run] creates a new trigger script from a shell template in +// the appropriate trigger type subdirectory under .context/hooks/, +// with the executable permission bit set. +package add diff --git a/internal/cli/trigger/cmd/disable/cmd.go b/internal/cli/trigger/cmd/disable/cmd.go new file mode 100644 index 000000000..503ccb7a0 --- /dev/null +++ b/internal/cli/trigger/cmd/disable/cmd.go @@ -0,0 +1,70 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package disable + +import ( + "os" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" + writeTrigger "github.com/ActiveMemory/ctx/internal/write/trigger" +) + +// Cmd returns the "ctx hook disable" subcommand. +// +// Returns: +// - *cobra.Command: Configured disable subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeyTriggerDisable) + + return &cobra.Command{ + Use: cmd.UseTriggerDisable, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run disables a hook by removing the executable permission bit. +// +// Parameters: +// - c: The cobra command for output +// - name: The hook name to disable +func Run(c *cobra.Command, name string) error { + hooksDir := rc.HooksDir() + + h, findErr := trigger.FindByName(hooksDir, name) + if findErr != nil { + return findErr + } + + if h == nil { + return errTrigger.NotFound(name) + } + + fi, statErr := os.Stat(h.Path) + if statErr != nil { + return errTrigger.Stat(statErr) + } + + // Remove executable permission bit for user, group, and other. + newMode := fi.Mode() &^ 0o111 + if chmodErr := os.Chmod(h.Path, newMode); chmodErr != nil { + return errTrigger.Chmod(chmodErr) + } + + writeTrigger.Disabled(c, name, h.Path) + return nil +} diff --git a/internal/cli/trigger/cmd/disable/doc.go b/internal/cli/trigger/cmd/disable/doc.go new file mode 100644 index 000000000..2ae31d249 --- /dev/null +++ b/internal/cli/trigger/cmd/disable/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package disable implements the "ctx trigger disable" subcommand. +// +// [Run] disables a trigger script by removing the executable +// permission bit, preventing it from running during lifecycle +// events while preserving the script file. +package disable diff --git a/internal/cli/trigger/cmd/enable/cmd.go b/internal/cli/trigger/cmd/enable/cmd.go new file mode 100644 index 000000000..8287d0405 --- /dev/null +++ b/internal/cli/trigger/cmd/enable/cmd.go @@ -0,0 +1,70 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package enable + +import ( + "os" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" + writeTrigger "github.com/ActiveMemory/ctx/internal/write/trigger" +) + +// Cmd returns the "ctx hook enable" subcommand. +// +// Returns: +// - *cobra.Command: Configured enable subcommand +func Cmd() *cobra.Command { + short, long := desc.Command(cmd.DescKeyTriggerEnable) + + return &cobra.Command{ + Use: cmd.UseTriggerEnable, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0]) + }, + } +} + +// Run enables a hook by adding the executable permission bit. +// +// Parameters: +// - c: The cobra command for output +// - name: The hook name to enable +func Run(c *cobra.Command, name string) error { + hooksDir := rc.HooksDir() + + h, findErr := trigger.FindByName(hooksDir, name) + if findErr != nil { + return findErr + } + + if h == nil { + return errTrigger.NotFound(name) + } + + fi, statErr := os.Stat(h.Path) + if statErr != nil { + return errTrigger.Stat(statErr) + } + + // Add executable permission bit for user, group, and other. + newMode := fi.Mode() | 0o111 + if chmodErr := os.Chmod(h.Path, newMode); chmodErr != nil { + return errTrigger.Chmod(chmodErr) + } + + writeTrigger.Enabled(c, name, h.Path) + return nil +} diff --git a/internal/cli/trigger/cmd/enable/doc.go b/internal/cli/trigger/cmd/enable/doc.go new file mode 100644 index 000000000..0baf0415a --- /dev/null +++ b/internal/cli/trigger/cmd/enable/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package enable implements the "ctx trigger enable" subcommand. +// +// [Run] enables a trigger script by adding the executable +// permission bit, making it eligible for execution by the +// trigger runner during lifecycle events. +package enable diff --git a/internal/cli/trigger/cmd/list/cmd.go b/internal/cli/trigger/cmd/list/cmd.go new file mode 100644 index 000000000..99681b7cd --- /dev/null +++ b/internal/cli/trigger/cmd/list/cmd.go @@ -0,0 +1,83 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package list + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" + writeTrigger "github.com/ActiveMemory/ctx/internal/write/trigger" +) + +// Hook status labels. +const ( + // statusEnabled is the label for an enabled hook. + statusEnabled = "enabled" + // statusDisabled is the label for a disabled hook. + statusDisabled = "disabled" +) + +// Cmd returns the "ctx hook list" subcommand. +// +// Returns: +// - *cobra.Command: Configured list subcommand +func Cmd() *cobra.Command { + short, _ := desc.Command(cmd.DescKeyTriggerList) + + return &cobra.Command{ + Use: cmd.UseTriggerList, + Short: short, + Args: cobra.NoArgs, + RunE: func(c *cobra.Command, _ []string) error { + return Run(c) + }, + } +} + +// Run lists all hooks grouped by hook type with name, enabled/disabled +// status, and file path. +// +// Parameters: +// - c: The cobra command for output +func Run(c *cobra.Command) error { + hooksDir := rc.HooksDir() + + all, err := trigger.Discover(hooksDir) + if err != nil { + return err + } + + total := 0 + for _, ht := range trigger.ValidTypes() { + hooks := all[ht] + if len(hooks) == 0 { + continue + } + + writeTrigger.TypeHeader(c, string(ht)) + for _, h := range hooks { + status := statusEnabled + if !h.Enabled { + status = statusDisabled + } + writeTrigger.Entry(c, h.Name, status, h.Path) + total++ + } + writeTrigger.BlankLine(c) + } + + if total == 0 { + writeTrigger.NoHooksFound(c) + return nil + } + + writeTrigger.Count(c, total) + return nil +} diff --git a/internal/cli/trigger/cmd/list/doc.go b/internal/cli/trigger/cmd/list/doc.go new file mode 100644 index 000000000..87d143154 --- /dev/null +++ b/internal/cli/trigger/cmd/list/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package list implements the "ctx trigger list" subcommand. +// +// [Run] discovers all trigger scripts grouped by trigger type and +// displays each hook's name, enabled/disabled status, and +// file path in the terminal. +package list diff --git a/internal/cli/trigger/cmd/test/cmd.go b/internal/cli/trigger/cmd/test/cmd.go new file mode 100644 index 000000000..7f1894f23 --- /dev/null +++ b/internal/cli/trigger/cmd/test/cmd.go @@ -0,0 +1,146 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package test + +import ( + "encoding/json" + "strings" + "time" + + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" + embedFlag "github.com/ActiveMemory/ctx/internal/config/embed/flag" + "github.com/ActiveMemory/ctx/internal/config/flag" + "github.com/ActiveMemory/ctx/internal/config/token" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + "github.com/ActiveMemory/ctx/internal/flagbind" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" + writeTrigger "github.com/ActiveMemory/ctx/internal/write/trigger" +) + +// Mock input constants for hook testing. +const ( + // mockSessionID is the session ID used in test hook input. + mockSessionID = "test-session" + // mockModel is the model name used in test hook input. + mockModel = "test-model" + // mockVersion is the ctx version used in test hook input. + mockVersion = "test" +) + +// Cmd returns the "ctx hook test" subcommand. +// +// Returns: +// - *cobra.Command: Configured test subcommand +func Cmd() *cobra.Command { + var toolName string + var path string + + short, long := desc.Command(cmd.DescKeyTriggerTest) + + c := &cobra.Command{ + Use: cmd.UseTriggerTest, + Short: short, + Long: long, + Args: cobra.ExactArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return Run(c, args[0], toolName, path) + }, + } + + flagbind.StringFlag(c, &toolName, flag.Tool, embedFlag.DescKeyTriggerTestTool) + flagbind.StringFlag(c, &path, flag.Path, embedFlag.DescKeyTriggerTestPath) + + return c +} + +// Run tests hooks for a given hook type by constructing a mock input +// and executing all enabled hooks. +// +// Parameters: +// - c: The cobra command for output +// - hookType: The hook type to test +// - toolName: Optional tool name for mock input +// - path: Optional file path for mock input +func Run(c *cobra.Command, hookType, toolName, path string) error { + // Validate hook type. + ht := trigger.HookType(hookType) + valid := trigger.ValidTypes() + + found := false + for _, v := range valid { + if v == ht { + found = true + break + } + } + + if !found { + names := make([]string, len(valid)) + for i, v := range valid { + names[i] = string(v) + } + return errTrigger.InvalidType(hookType, strings.Join(names, token.CommaSpace)) + } + + hooksDir := rc.HooksDir() + timeout := time.Duration(rc.HookTimeout()) * time.Second + + // Build mock input. + params := make(map[string]any) + if path != "" { + params[flag.Path] = path + } + + input := &trigger.HookInput{ + TriggerType: hookType, + Tool: toolName, + Parameters: params, + Session: trigger.HookSession{ + ID: mockSessionID, + Model: mockModel, + }, + Timestamp: time.Now().UTC().Format(time.RFC3339), + CtxVersion: mockVersion, + } + + writeTrigger.TestingHeader(c, hookType) + + inputJSON, _ := json.MarshalIndent(input, "", " ") + writeTrigger.TestInput(c, string(inputJSON)) + + agg, err := trigger.RunAll(hooksDir, ht, input, timeout) + if err != nil { + return err + } + + if agg.Cancelled { + writeTrigger.Cancelled(c, agg.Message) + return nil + } + + if agg.Context != "" { + writeTrigger.ContextOutput(c, agg.Context) + } + + if len(agg.Errors) > 0 { + writeTrigger.ErrorsHeader(c) + for _, e := range agg.Errors { + writeTrigger.ErrorLine(c, e) + } + writeTrigger.BlankLine(c) + } + + if agg.Context == "" && len(agg.Errors) == 0 { + writeTrigger.NoOutput(c) + } + + return nil +} diff --git a/internal/cli/trigger/cmd/test/doc.go b/internal/cli/trigger/cmd/test/doc.go new file mode 100644 index 000000000..26483c7d4 --- /dev/null +++ b/internal/cli/trigger/cmd/test/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package test implements the "ctx trigger test" subcommand. +// +// [Run] constructs a mock TriggerInput and executes all enabled +// hooks for a given trigger type, displaying the aggregated +// output, context, and any errors for debugging. +package test diff --git a/internal/cli/trigger/doc.go b/internal/cli/trigger/doc.go new file mode 100644 index 000000000..d35eff82b --- /dev/null +++ b/internal/cli/trigger/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trigger implements the "ctx trigger" command group for managing +// lifecycle triggers that fire at specific events during AI sessions. +// +// Subcommands: add (create script), list (show all), test (execute +// with mock input), enable (add exec bit), disable (remove exec bit). +package trigger diff --git a/internal/cli/trigger/trigger.go b/internal/cli/trigger/trigger.go new file mode 100644 index 000000000..2db423e33 --- /dev/null +++ b/internal/cli/trigger/trigger.go @@ -0,0 +1,33 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "github.com/spf13/cobra" + + "github.com/ActiveMemory/ctx/internal/cli/parent" + "github.com/ActiveMemory/ctx/internal/cli/trigger/cmd/add" + "github.com/ActiveMemory/ctx/internal/cli/trigger/cmd/disable" + "github.com/ActiveMemory/ctx/internal/cli/trigger/cmd/enable" + "github.com/ActiveMemory/ctx/internal/cli/trigger/cmd/list" + "github.com/ActiveMemory/ctx/internal/cli/trigger/cmd/test" + "github.com/ActiveMemory/ctx/internal/config/embed/cmd" +) + +// Cmd returns the "ctx trigger" parent command. +// +// Returns: +// - *cobra.Command: Configured trigger command with subcommands +func Cmd() *cobra.Command { + return parent.Cmd(cmd.DescKeyTrigger, cmd.UseTrigger, + add.Cmd(), + list.Cmd(), + test.Cmd(), + enable.Cmd(), + disable.Cmd(), + ) +} diff --git a/internal/compat/compat_test.go b/internal/compat/compat_test.go new file mode 100644 index 000000000..a954842a4 --- /dev/null +++ b/internal/compat/compat_test.go @@ -0,0 +1,195 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package compat + +import ( + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/ActiveMemory/ctx/internal/cli/agent/core/budget" + "github.com/ActiveMemory/ctx/internal/entity" + "github.com/ActiveMemory/ctx/internal/skill" + "github.com/ActiveMemory/ctx/internal/steering" + "github.com/ActiveMemory/ctx/internal/trigger" +) + +// TestBackwardCompat_AssemblePacket_NoSteeringNoSkill verifies that +// AssemblePacket with nil steering and empty skill produces a packet +// without Steering or Skill sections — identical to pre-extension +// behaviour. +// +// Validates: Requirements 14.1 +func TestBackwardCompat_AssemblePacket_NoSteeringNoSkill(t *testing.T) { + ctx := &entity.Context{} + + pkt := budget.AssemblePacket(ctx, 8000, nil, "") + + if len(pkt.Steering) != 0 { + t.Errorf("expected no steering entries, got %d", len(pkt.Steering)) + } + if pkt.Skill != "" { + t.Errorf("expected empty skill, got %q", pkt.Skill) + } + + // Verify the packet still contains the always-present fields. + if pkt.Budget != 8000 { + t.Errorf("expected budget 8000, got %d", pkt.Budget) + } + if pkt.Instruction == "" { + t.Error("expected non-empty instruction") + } + + // Render to markdown and confirm no Steering/Skill sections appear. + md := budget.RenderMarkdownPacket(pkt) + if strings.Contains(md, "## Steering") { + t.Error("rendered markdown should not contain Steering section") + } + if strings.Contains(md, "## Skill") { + t.Error("rendered markdown should not contain Skill section") + } +} + +// TestBackwardCompat_HookRunAll_NonExistentDir verifies that RunAll +// on a non-existent hooks directory returns an empty AggregatedOutput +// without error. +// +// Validates: Requirements 14.2 +func TestBackwardCompat_HookRunAll_NonExistentDir(t *testing.T) { + nonexistent := filepath.Join(t.TempDir(), "no-such-hooks") + input := &trigger.HookInput{TriggerType: "pre-tool-use", Tool: "test"} + + agg, err := trigger.RunAll(nonexistent, trigger.PreToolUse, input, 5*time.Second) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if agg == nil { + t.Fatal("expected non-nil AggregatedOutput") + } + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if agg.Context != "" { + t.Errorf("expected empty context, got %q", agg.Context) + } + if agg.Message != "" { + t.Errorf("expected empty message, got %q", agg.Message) + } + if len(agg.Errors) != 0 { + t.Errorf("expected no errors, got %v", agg.Errors) + } +} + +// TestBackwardCompat_HookDiscover_NonExistentDir verifies that Discover +// on a non-existent hooks directory returns an empty map without error. +// +// Validates: Requirements 14.2 +func TestBackwardCompat_HookDiscover_NonExistentDir(t *testing.T) { + nonexistent := filepath.Join(t.TempDir(), "no-such-hooks") + + result, err := trigger.Discover(nonexistent) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if len(result) != 0 { + t.Errorf("expected empty map, got %d entries", len(result)) + } +} + +// TestBackwardCompat_SkillLoadAll_NonExistentDir verifies that LoadAll +// on a non-existent skills directory returns nil without error. +// +// Validates: Requirements 14.4 +func TestBackwardCompat_SkillLoadAll_NonExistentDir(t *testing.T) { + nonexistent := filepath.Join(t.TempDir(), "no-such-skills") + + skills, err := skill.LoadAll(nonexistent) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if skills != nil { + t.Errorf("expected nil, got %d skills", len(skills)) + } +} + +// TestBackwardCompat_SteeringLoadAll_NonExistentDir verifies that +// LoadAll on a non-existent steering directory returns an error, which +// callers (like ctx agent) handle gracefully by skipping steering. +// +// Validates: Requirements 14.1 +func TestBackwardCompat_SteeringLoadAll_NonExistentDir(t *testing.T) { + nonexistent := filepath.Join(t.TempDir(), "no-such-steering") + + files, err := steering.LoadAll(nonexistent) + if err == nil { + t.Fatal("expected error for non-existent steering directory") + } + if files != nil { + t.Errorf("expected nil files on error, got %d", len(files)) + } + + // The error should be an os-level "not exist" error that callers + // can detect with os.IsNotExist or errors.Is. + if !os.IsNotExist(unwrapPathError(err)) { + t.Errorf("expected not-exist error, got %v", err) + } +} + +// TestBackwardCompat_FullAgentPath_NoExtensions exercises the full +// backward-compatible agent assembly path: no steering directory, +// no skills directory, no hooks directory. The resulting packet should +// be structurally identical to the pre-extension output. +// +// Validates: Requirements 14.1, 14.2, 14.3, 14.4, 14.5 +func TestBackwardCompat_FullAgentPath_NoExtensions(t *testing.T) { + ctx := &entity.Context{} + + // Simulate the agent path: no steering files loaded (directory + // missing → error → caller passes nil), no skill. + pkt := budget.AssemblePacket(ctx, 8000, nil, "") + + // Verify core structure is intact. + if pkt.Budget != 8000 { + t.Errorf("budget = %d, want 8000", pkt.Budget) + } + if pkt.Instruction == "" { + t.Error("instruction should be populated from embedded assets") + } + + // Verify no extension sections are present. + if len(pkt.Steering) != 0 { + t.Errorf("steering should be empty, got %d", len(pkt.Steering)) + } + if pkt.Skill != "" { + t.Errorf("skill should be empty, got %q", pkt.Skill) + } + + // Render and verify the markdown output has no extension sections. + md := budget.RenderMarkdownPacket(pkt) + if !strings.Contains(md, "# Context Packet") { + t.Error("rendered markdown should contain Context Packet header") + } + for _, section := range []string{"## Steering", "## Skill"} { + if strings.Contains(md, section) { + t.Errorf("rendered markdown should not contain %q when no extensions are active", section) + } + } +} + +// unwrapPathError extracts the underlying error from a wrapped path +// error chain for os.IsNotExist checking. +func unwrapPathError(err error) error { + for { + u, ok := err.(interface{ Unwrap() error }) + if !ok { + return err + } + err = u.Unwrap() + } +} diff --git a/internal/compat/doc.go b/internal/compat/doc.go new file mode 100644 index 000000000..76abc3ae6 --- /dev/null +++ b/internal/compat/doc.go @@ -0,0 +1,14 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package compat contains backward-compatibility integration tests +// that verify the hooks-and-steering extensions do not break existing +// ctx workflows when the new directories are absent. +// +// Tests exercise init, status, agent, and drift commands in a +// clean project to confirm graceful degradation when .context/hooks/ +// and .context/steering/ directories do not exist. +package compat diff --git a/internal/compat/testmain_test.go b/internal/compat/testmain_test.go new file mode 100644 index 000000000..7a3e096a2 --- /dev/null +++ b/internal/compat/testmain_test.go @@ -0,0 +1,19 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package compat + +import ( + "os" + "testing" + + "github.com/ActiveMemory/ctx/internal/assets/read/lookup" +) + +func TestMain(m *testing.M) { + lookup.Init() + os.Exit(m.Run()) +} diff --git a/internal/config/dir/dir.go b/internal/config/dir/dir.go index ebb79da98..9ecadbcd6 100644 --- a/internal/config/dir/dir.go +++ b/internal/config/dir/dir.go @@ -14,6 +14,8 @@ const ( Claude = ".claude" // Context is the default context directory name. Context = ".context" + // Hooks is the subdirectory for lifecycle hook scripts within .context/. + Hooks = "hooks" // HooksMessages is the subdirectory path for hook message // overrides within .context/. HooksMessages = "hooks/messages" @@ -35,6 +37,10 @@ const ( Projects = "projects" // Sessions is the subdirectory for session summaries within .context/. Sessions = "sessions" + // Skills is the subdirectory for skill definitions within .context/. + Skills = "skills" + // Steering is the subdirectory for steering files within .context/. + Steering = "steering" // Specs is the project-root directory for formalized plans and feature specs. Specs = "specs" // State is the subdirectory for project-scoped runtime state within .context/. diff --git a/internal/config/embed/cmd/skill.go b/internal/config/embed/cmd/skill.go new file mode 100644 index 000000000..da5e2949c --- /dev/null +++ b/internal/config/embed/cmd/skill.go @@ -0,0 +1,23 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +// Use strings for skill subcommands. +const ( + UseSkill = "skill" + UseSkillInstall = "install " + UseSkillList = "list" + UseSkillRemove = "remove " +) + +// DescKeys for skill subcommands. +const ( + DescKeySkill = "skill" + DescKeySkillInstall = "skill.install" + DescKeySkillList = "skill.list" + DescKeySkillRemove = "skill.remove" +) diff --git a/internal/config/embed/cmd/steering.go b/internal/config/embed/cmd/steering.go new file mode 100644 index 000000000..2810c294d --- /dev/null +++ b/internal/config/embed/cmd/steering.go @@ -0,0 +1,27 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +// Use strings for steering subcommands. +const ( + UseSteering = "steering" + UseSteeringAdd = "add " + UseSteeringList = "list" + UseSteeringPreview = "preview " + UseSteeringInit = "init" + UseSteeringSync = "sync" +) + +// DescKeys for steering subcommands. +const ( + DescKeySteering = "steering" + DescKeySteeringAdd = "steering.add" + DescKeySteeringList = "steering.list" + DescKeySteeringPreview = "steering.preview" + DescKeySteeringInit = "steering.init" + DescKeySteeringSync = "steering.sync" +) diff --git a/internal/config/embed/cmd/trigger.go b/internal/config/embed/cmd/trigger.go new file mode 100644 index 000000000..2b0b0b008 --- /dev/null +++ b/internal/config/embed/cmd/trigger.go @@ -0,0 +1,27 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +// Use strings for trigger subcommands. +const ( + UseTrigger = "trigger" + UseTriggerAdd = "add " + UseTriggerList = "list" + UseTriggerTest = "test " + UseTriggerEnable = "enable " + UseTriggerDisable = "disable " +) + +// DescKeys for trigger subcommands. +const ( + DescKeyTrigger = "trigger" + DescKeyTriggerAdd = "trigger.add" + DescKeyTriggerList = "trigger.list" + DescKeyTriggerTest = "trigger.test" + DescKeyTriggerEnable = "trigger.enable" + DescKeyTriggerDisable = "trigger.disable" +) diff --git a/internal/config/embed/flag/agent.go b/internal/config/embed/flag/agent.go index 7f532f117..c493d8721 100644 --- a/internal/config/embed/flag/agent.go +++ b/internal/config/embed/flag/agent.go @@ -12,4 +12,5 @@ const ( DescKeyAgentCooldown = "agent.cooldown" DescKeyAgentFormat = "agent.format" DescKeyAgentSession = "agent.session" + DescKeyAgentSkill = "agent.skill" ) diff --git a/internal/config/embed/flag/flag.go b/internal/config/embed/flag/flag.go index e28d5313e..1af103756 100644 --- a/internal/config/embed/flag/flag.go +++ b/internal/config/embed/flag/flag.go @@ -13,6 +13,10 @@ const ( DescKeyCompactArchive = "compact.archive" DescKeyContextDir = "context-dir" DescKeyDoctorJson = "doctor.json" + DescKeyTriggerTestPath = "trigger.test.path" + DescKeyTriggerTestTool = "trigger.test.tool" DescKeyInitializeCaller = "initialize.caller" DescKeySetupWrite = "setup.write" + DescKeySteeringSyncAll = "steering.sync.all" + DescKeyTool = "tool" ) diff --git a/internal/config/embed/text/agent.go b/internal/config/embed/text/agent.go index 78c48b69d..bcdedb2b3 100644 --- a/internal/config/embed/text/agent.go +++ b/internal/config/embed/text/agent.go @@ -19,6 +19,9 @@ const ( DescKeyAgentSectionLearnings = "agent.section-learnings" DescKeyAgentSectionSummaries = "agent.section-summaries" + DescKeyAgentSectionSteering = "agent.section-steering" + DescKeyAgentSectionSkill = "agent.section-skill" + DescKeyWriteAgentBulletItem = "write.agent-bullet-item" DescKeyWriteAgentNumberedItem = "write.agent-numbered-item" ) diff --git a/internal/config/embed/text/drift.go b/internal/config/embed/text/drift.go index da71f11c3..331f0a4f3 100644 --- a/internal/config/embed/text/drift.go +++ b/internal/config/embed/text/drift.go @@ -55,6 +55,9 @@ const ( DescKeyDriftCheckFileAge = "drift.check-file-age" DescKeyDriftStaleHeader = "drift.stale-header" DescKeyDriftCheckTemplateHeader = "drift.check-template-header" + DescKeyDriftInvalidTool = "drift.invalid-tool" + DescKeyDriftHookNoExec = "drift.hook-no-exec" + DescKeyDriftStaleSyncFile = "drift.stale-sync-file" DescKeyVersionDriftRelayMessage = "version-drift.relay-message" DescKeyWriteVersionDriftFallback = "write.version-drift-fallback" ) diff --git a/internal/config/embed/text/err_cli.go b/internal/config/embed/text/err_cli.go new file mode 100644 index 000000000..76ab35cee --- /dev/null +++ b/internal/config/embed/text/err_cli.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package text + +// DescKeys for CLI errors. +const ( + DescKeyErrCliNoToolSpecified = "err.cli.no-tool-specified" +) diff --git a/internal/config/embed/text/err_hook.go b/internal/config/embed/text/err_hook.go index 95be4b9ed..4befa572f 100644 --- a/internal/config/embed/text/err_hook.go +++ b/internal/config/embed/text/err_hook.go @@ -8,10 +8,25 @@ package text // DescKeys for hook execution errors. const ( + DescKeyErrHookChmod = "err.hook.chmod" + DescKeyErrHookCreateDir = "err.hook.create-dir" + DescKeyErrHookDiscover = "err.hook.discover" DescKeyErrHookEmbeddedTemplateNotFound = "err.hook.embedded-template-not-found" + DescKeyErrHookExit = "err.hook.exit" + DescKeyErrHookInvalidJSONOutput = "err.hook.invalid-json-output" + DescKeyErrHookInvalidType = "err.hook.invalid-type" + DescKeyErrHookMarshalInput = "err.hook.marshal-input" + DescKeyErrHookNotFound = "err.hook.not-found" DescKeyErrHookOverrideExists = "err.hook.override-exists" DescKeyErrHookRemoveOverride = "err.hook.remove-override" + DescKeyErrHookResolveHooksDir = "err.hook.resolve-hooks-dir" + DescKeyErrHookResolvePath = "err.hook.resolve-path" + DescKeyErrHookScriptExists = "err.hook.script-exists" + DescKeyErrHookStat = "err.hook.stat" + DescKeyErrHookStatPath = "err.hook.stat-path" + DescKeyErrHookTimeout = "err.hook.timeout" DescKeyErrHookUnknownHook = "err.hook.unknown-hook" DescKeyErrHookUnknownVariant = "err.hook.unknown-variant" DescKeyErrHookWriteOverride = "err.hook.write-override" + DescKeyErrHookWriteScript = "err.hook.write-script" ) diff --git a/internal/config/embed/text/err_lifecycle_hook.go b/internal/config/embed/text/err_lifecycle_hook.go new file mode 100644 index 000000000..bc4a99699 --- /dev/null +++ b/internal/config/embed/text/err_lifecycle_hook.go @@ -0,0 +1,14 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package text + +// DescKeys for lifecycle hook error messages. +const ( + DescKeyErrLifecycleHookSymlink = "err.lifecycle-hook.symlink" + DescKeyErrLifecycleHookBoundary = "err.lifecycle-hook.boundary" + DescKeyErrLifecycleHookNotExecutable = "err.lifecycle-hook.not-executable" +) diff --git a/internal/config/embed/text/err_setup.go b/internal/config/embed/text/err_setup.go new file mode 100644 index 000000000..0b6175ba3 --- /dev/null +++ b/internal/config/embed/text/err_setup.go @@ -0,0 +1,15 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package text + +// DescKeys for setup operations errors. +const ( + DescKeyErrSetupCreateDir = "err.setup.create-dir" + DescKeyErrSetupMarshalConfig = "err.setup.marshal-config" + DescKeyErrSetupFileWrite = "err.setup.write-file" + DescKeyErrSetupSyncSteering = "err.setup.sync-steering" +) diff --git a/internal/config/embed/text/err_skill.go b/internal/config/embed/text/err_skill.go index db55ea606..022ac8232 100644 --- a/internal/config/embed/text/err_skill.go +++ b/internal/config/embed/text/err_skill.go @@ -8,6 +8,20 @@ package text // DescKeys for skill operations errors. const ( - DescKeyErrSkillList = "err.skill.skill-list" - DescKeyErrSkillRead = "err.skill.skill-read" + DescKeyErrSkillCreateDest = "err.skill.create-dest" + DescKeyErrSkillInstall = "err.skill.install" + DescKeyErrSkillInvalidManifest = "err.skill.invalid-manifest" + DescKeyErrSkillInvalidYAML = "err.skill.invalid-yaml" + DescKeyErrSkillList = "err.skill.skill-list" + DescKeyErrSkillLoad = "err.skill.load" + DescKeyErrSkillMissingClosingDelim = "err.skill.missing-closing-delimiter" + DescKeyErrSkillMissingName = "err.skill.missing-name" + DescKeyErrSkillMissingOpeningDelim = "err.skill.missing-opening-delimiter" + DescKeyErrSkillNotFound = "err.skill.not-found" + DescKeyErrSkillNotValidDir = "err.skill.not-valid-dir" + DescKeyErrSkillNotValidSource = "err.skill.not-valid-source" + DescKeyErrSkillRead = "err.skill.skill-read" + DescKeyErrSkillReadDir = "err.skill.read-dir" + DescKeyErrSkillRemove = "err.skill.remove" + DescKeyErrSkillSkillLoad = "err.skill.skill-load" ) diff --git a/internal/config/embed/text/err_steering.go b/internal/config/embed/text/err_steering.go new file mode 100644 index 000000000..c4df1a46e --- /dev/null +++ b/internal/config/embed/text/err_steering.go @@ -0,0 +1,31 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package text + +// DescKeys for steering operations errors. +const ( + DescKeyErrSteeringComputeRelPath = "err.steering.compute-rel-path" + DescKeyErrSteeringContextDirMissing = "err.steering.context-dir-missing" + DescKeyErrSteeringCreateDir = "err.steering.create-dir" + DescKeyErrSteeringFileExists = "err.steering.file-exists" + DescKeyErrSteeringInvalidYAML = "err.steering.invalid-yaml" + DescKeyErrSteeringMissingClosingDelim = "err.steering.missing-closing-delimiter" + DescKeyErrSteeringMissingOpeningDelim = "err.steering.missing-opening-delimiter" + DescKeyErrSteeringNoTool = "err.steering.no-tool" + DescKeyErrSteeringOutputEscapesRoot = "err.steering.output-escapes-root" + DescKeyErrSteeringParse = "err.steering.parse" + DescKeyErrSteeringReadDir = "err.steering.read-dir" + DescKeyErrSteeringReadFile = "err.steering.read-file" + DescKeyErrSteeringResolveOutput = "err.steering.resolve-output" + DescKeyErrSteeringResolveRoot = "err.steering.resolve-root" + DescKeyErrSteeringSyncAll = "err.steering.sync-all" + DescKeyErrSteeringSyncName = "err.steering.sync-name" + DescKeyErrSteeringUnsupportedTool = "err.steering.unsupported-tool" + DescKeyErrSteeringWriteFile = "err.steering.write-file" + DescKeyErrSteeringWriteSteeringFile = "err.steering.write-steering-file" + DescKeyErrSteeringWriteInitFile = "err.steering.write-init-file" +) diff --git a/internal/config/embed/text/hook.go b/internal/config/embed/text/hook.go index 20f44b220..755c0c4c9 100644 --- a/internal/config/embed/text/hook.go +++ b/internal/config/embed/text/hook.go @@ -13,7 +13,6 @@ const ( DescKeyHookClaude = "hook.claude" DescKeyHookCopilot = "hook.copilot" DescKeyHookCopilotCLI = "hook.copilot-cli" - DescKeyHookCursor = "hook.cursor" DescKeyHookSupportedTools = "hook.supported-tools" DescKeyHookWindsurf = "hook.windsurf" ) diff --git a/internal/config/embed/text/mcp_err.go b/internal/config/embed/text/mcp_err.go index b22a7c607..7bd6acaa0 100644 --- a/internal/config/embed/text/mcp_err.go +++ b/internal/config/embed/text/mcp_err.go @@ -17,6 +17,7 @@ const ( DescKeyMCPErrFailedMarshal = "mcp.err-failed-marshal" DescKeyMCPErrTypeContentRequired = "mcp.err-type-content-required" DescKeyMCPErrQueryRequired = "mcp.err-query-required" + DescKeyMCPErrSearchRead = "mcp.err-search-read" DescKeyMCPErrUnknownPrompt = "mcp.err-unknown-prompt" DescKeyMCPErrURIRequired = "mcp.err-uri-required" ) diff --git a/internal/config/embed/text/mcp_tool.go b/internal/config/embed/text/mcp_tool.go index f4f5d3c75..4019df4ec 100644 --- a/internal/config/embed/text/mcp_tool.go +++ b/internal/config/embed/text/mcp_tool.go @@ -37,4 +37,11 @@ const ( DescKeyMCPToolPropRecentAct = "mcp.tool-prop-recent-action" DescKeyMCPToolPropEventType = "mcp.tool-prop-event-type" DescKeyMCPToolPropCaller = "mcp.tool-prop-caller" + DescKeyMCPToolSteeringGetDesc = "mcp.tool-steering-get-desc" + DescKeyMCPToolSearchDesc = "mcp.tool-search-desc" + DescKeyMCPToolSessionStartDesc = "mcp.tool-session-start-desc" + DescKeyMCPToolSessionEndDesc = "mcp.tool-session-end-desc" + DescKeyMCPToolPropPrompt = "mcp.tool-prop-prompt" + DescKeyMCPToolPropSearchQuery = "mcp.tool-prop-search-query" + DescKeyMCPToolPropSummary = "mcp.tool-prop-summary" ) diff --git a/internal/config/flag/flag.go b/internal/config/flag/flag.go index f76e8ff91..2d71fc7a1 100644 --- a/internal/config/flag/flag.go +++ b/internal/config/flag/flag.go @@ -45,6 +45,7 @@ const ( Follow = "follow" Format = "format" Session = "session" + Skill = "skill" ) // Shared flag names used across commands. @@ -84,6 +85,7 @@ const ( NoPluginEnable = "no-plugin-enable" Out = "out" Output = "output" + Path = "path" Prepend = "prepend" Project = "project" Prompt = "prompt" diff --git a/internal/config/fs/perm.go b/internal/config/fs/perm.go index b3072ca78..92d68a4d6 100644 --- a/internal/config/fs/perm.go +++ b/internal/config/fs/perm.go @@ -21,3 +21,10 @@ const ( // directory (owner rwx only). PermKeyDir = 0700 ) + +// Permission bit masks. +const ( + // ExecBitMask is the Unix permission bitmask for any + // executable bit (owner, group, other). + ExecBitMask = 0111 +) diff --git a/internal/config/hook/hook.go b/internal/config/hook/hook.go index 338680013..6112737c8 100644 --- a/internal/config/hook/hook.go +++ b/internal/config/hook/hook.go @@ -64,6 +64,8 @@ const ( ToolCopilot = "copilot" ToolCopilotCLI = "copilot-cli" ToolCursor = "cursor" + ToolKiro = "kiro" + ToolCline = "cline" ToolWindsurf = "windsurf" ) diff --git a/internal/config/mcp/field/field.go b/internal/config/mcp/field/field.go index 88491da1e..9ba4dc58f 100644 --- a/internal/config/mcp/field/field.go +++ b/internal/config/mcp/field/field.go @@ -27,4 +27,8 @@ const ( // AttrFile is the metadata key on PendingUpdate recording which // context file was written to (e.g., "DECISIONS.md"). AttrFile = "file" + // Prompt is the optional prompt text for steering file matching. + Prompt = "prompt" + // Summary is the optional session summary for session-end hooks. + Summary = "summary" ) diff --git a/internal/config/mcp/tool/tool.go b/internal/config/mcp/tool/tool.go index 8789de8e6..a43292250 100644 --- a/internal/config/mcp/tool/tool.go +++ b/internal/config/mcp/tool/tool.go @@ -30,4 +30,12 @@ const ( SessionEvent = "ctx_session_event" // Remind is the MCP tool name for listing reminders. Remind = "ctx_remind" + // SteeringGet is the MCP tool name for retrieving steering files. + SteeringGet = "ctx_steering_get" + // Search is the MCP tool name for searching context files. + Search = "ctx_search" + // SessionStart is the MCP tool name for session start hooks. + SessionStart = "ctx_session_start" + // SessionEnd is the MCP tool name for session end hooks. + SessionEnd = "ctx_session_end" ) diff --git a/internal/config/sysinfo/proc.go b/internal/config/sysinfo/proc.go index 12ca8a138..fee37de38 100644 --- a/internal/config/sysinfo/proc.go +++ b/internal/config/sysinfo/proc.go @@ -6,7 +6,9 @@ package sysinfo -// Linux procfs path constants. +// Linux procfs path constants. These constants are consumed +// by Linux-specific source files (memory_linux.go, +// load_linux.go) and are not visible on non-Linux builds. const ( // ProcLoadavg is the Linux procfs path for load averages. ProcLoadavg = "/proc/loadavg" @@ -20,7 +22,9 @@ const ( BytesPerKB = 1024 ) -// Meminfo field keys from /proc/meminfo. +// Meminfo field keys from /proc/meminfo. These constants are +// consumed by Linux-specific source files and are not visible +// on non-Linux builds. const ( // FieldMemTotal is the total physical memory field. FieldMemTotal = "MemTotal" diff --git a/internal/drift/check_ext.go b/internal/drift/check_ext.go new file mode 100644 index 000000000..d0e115a7c --- /dev/null +++ b/internal/drift/check_ext.go @@ -0,0 +1,183 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package drift + +import ( + "fmt" + "os" + "path/filepath" + "slices" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/text" + "github.com/ActiveMemory/ctx/internal/config/file" + "github.com/ActiveMemory/ctx/internal/config/fs" + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" + "github.com/ActiveMemory/ctx/internal/trigger" +) + +// supportedTools lists the valid tool identifiers for ctx. +var supportedTools = []string{"claude", "cursor", "cline", "kiro", "codex"} + +// checkSteeringTools validates that all steering files reference only +// supported tool identifiers in their tools list. +// +// Parameters: +// - report: Report to append warnings to (modified in place) +func checkSteeringTools(report *Report) { + steeringDir := rc.SteeringDir() + + files, err := steering.LoadAll(steeringDir) + if err != nil { + // Directory doesn't exist or can't be read — skip silently. + report.Passed = append(report.Passed, CheckSteeringTools) + return + } + + found := false + for _, sf := range files { + for _, tool := range sf.Tools { + if !slices.Contains(supportedTools, tool) { + report.Warnings = append(report.Warnings, Issue{ + File: filepath.Base(sf.Path), + Type: IssueInvalidTool, + Message: fmt.Sprintf( + desc.Text(text.DescKeyDriftInvalidTool), tool, + ), + }) + found = true + } + } + } + + if !found { + report.Passed = append(report.Passed, CheckSteeringTools) + } +} + +// checkHookPerms scans hook directories for scripts that lack the +// executable permission bit. +// +// Parameters: +// - report: Report to append warnings to (modified in place) +func checkHookPerms(report *Report) { + hooksDir := rc.HooksDir() + + // Scan the raw directories to find scripts without the executable bit. + // We don't use trigger.Discover here because it skips non-executable scripts. + found := false + for _, ht := range trigger.ValidTypes() { + typeDir := filepath.Join(hooksDir, string(ht)) + entries, readErr := os.ReadDir(typeDir) + if readErr != nil { + continue + } + for _, e := range entries { + if e.IsDir() { + continue + } + info, infoErr := e.Info() + if infoErr != nil { + continue + } + if info.Mode().Perm()&fs.ExecBitMask == 0 { + report.Warnings = append(report.Warnings, Issue{ + File: filepath.Join(string(ht), e.Name()), + Type: IssueHookNoExec, + Message: desc.Text(text.DescKeyDriftHookNoExec), + Path: filepath.Join(typeDir, e.Name()), + }) + found = true + } + } + } + + if !found { + report.Passed = append(report.Passed, CheckHookPerms) + } +} + +// checkSyncStaleness compares synced tool-native files against what +// steering.SyncTool would produce. If they differ, the synced file +// is stale. +// +// Parameters: +// - report: Report to append warnings to (modified in place) +func checkSyncStaleness(report *Report) { + steeringDir := rc.SteeringDir() + + files, err := steering.LoadAll(steeringDir) + if err != nil { + // No steering files — nothing to check. + report.Passed = append(report.Passed, CheckSyncStaleness) + return + } + + if len(files) == 0 { + report.Passed = append(report.Passed, CheckSyncStaleness) + return + } + + cwd, cwdErr := os.Getwd() + if cwdErr != nil { + report.Passed = append(report.Passed, CheckSyncStaleness) + return + } + + found := false + // Check each syncable tool. + syncTools := []string{ + cfgHook.ToolCursor, cfgHook.ToolCline, + cfgHook.ToolKiro, + } + for _, tool := range syncTools { + stale := steering.StaleFiles(steeringDir, cwd, tool) + for _, name := range stale { + report.Warnings = append(report.Warnings, Issue{ + File: name, + Type: IssueStaleSyncFile, + Message: desc.Text(text.DescKeyDriftStaleSyncFile), + Path: fmt.Sprintf("%s (tool: %s)", name, tool), + }) + found = true + } + } + + if !found { + report.Passed = append(report.Passed, CheckSyncStaleness) + } +} + +// checkRCTool validates that the .ctxrc tool field contains a supported +// tool identifier. +// +// Parameters: +// - report: Report to append warnings to (modified in place) +func checkRCTool(report *Report) { + tool := rc.Tool() + + // Empty tool field is valid — it means no tool is configured. + if tool == "" { + report.Passed = append(report.Passed, CheckRCTool) + return + } + + if !slices.Contains(supportedTools, tool) { + report.Warnings = append(report.Warnings, Issue{ + File: file.CtxRC, + Type: IssueInvalidTool, + Message: fmt.Sprintf( + desc.Text(text.DescKeyDriftInvalidTool), tool, + ), + }) + return + } + + report.Passed = append(report.Passed, CheckRCTool) +} diff --git a/internal/drift/check_ext_test.go b/internal/drift/check_ext_test.go new file mode 100644 index 000000000..83bebd18d --- /dev/null +++ b/internal/drift/check_ext_test.go @@ -0,0 +1,427 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package drift + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" +) + +// **Validates: Requirements 19.7** + +func TestCheckSteeringTools(t *testing.T) { + tests := []struct { + name string + files map[string]string // steering file name → content + wantWarnings int + wantPassed bool + }{ + { + name: "no steering directory", + files: nil, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "valid tool identifiers", + files: map[string]string{ + "api.md": "---\nname: api\ntools: [claude, cursor]\n---\nBody\n", + }, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "empty tools list (all tools)", + files: map[string]string{ + "api.md": "---\nname: api\n---\nBody\n", + }, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "invalid tool identifier", + files: map[string]string{ + "api.md": "---\nname: api\ntools: [claude, vscode]\n---\nBody\n", + }, + wantWarnings: 1, + wantPassed: false, + }, + { + name: "multiple invalid tools in one file", + files: map[string]string{ + "api.md": "---\nname: api\ntools: [vscode, neovim]\n---\nBody\n", + }, + wantWarnings: 2, + wantPassed: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + + steeringDir := filepath.Join(tmpDir, ".context", "steering") + if tt.files != nil { + if err := os.MkdirAll(steeringDir, 0o755); err != nil { + t.Fatal(err) + } + for name, content := range tt.files { + if err := os.WriteFile( + filepath.Join(steeringDir, name), + []byte(content), 0o644, + ); err != nil { + t.Fatal(err) + } + } + } + + // chdir so rc reads .ctxrc from tmpDir + origDir := chdir(t, tmpDir) + defer func() { _ = os.Chdir(origDir) }() + + writeCtxRC(t, tmpDir, fmt.Sprintf("steering:\n dir: %s\n", steeringDir)) + rc.Reset() + defer rc.Reset() + + report := &Report{ + Warnings: []Issue{}, + Violations: []Issue{}, + Passed: []CheckName{}, + } + + checkSteeringTools(report) + + if len(report.Warnings) != tt.wantWarnings { + t.Errorf("expected %d warnings, got %d", tt.wantWarnings, len(report.Warnings)) + for _, w := range report.Warnings { + t.Logf(" warning: %s", w.Message) + } + } + + for _, w := range report.Warnings { + if w.Type != IssueInvalidTool { + t.Errorf("expected issue type %q, got %q", IssueInvalidTool, w.Type) + } + } + + passed := checkPassed(report, CheckSteeringTools) + if passed != tt.wantPassed { + t.Errorf("expected passed=%v, got passed=%v", tt.wantPassed, passed) + } + }) + } +} + +func TestCheckHookPerms(t *testing.T) { + tests := []struct { + name string + setup func(t *testing.T, hooksDir string) + wantWarnings int + wantPassed bool + }{ + { + name: "no hooks directory", + setup: func(_ *testing.T, _ string) {}, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "all hooks executable", + setup: func(t *testing.T, hooksDir string) { + t.Helper() + dir := filepath.Join(hooksDir, "pre-tool-use") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "check.sh"), + "#!/bin/bash\necho ok", 0o755) + }, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "hook missing executable bit", + setup: func(t *testing.T, hooksDir string) { + t.Helper() + dir := filepath.Join(hooksDir, "session-start") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "init.sh"), + "#!/bin/bash\necho ok", 0o644) + }, + wantWarnings: 1, + wantPassed: false, + }, + { + name: "mixed executable and non-executable", + setup: func(t *testing.T, hooksDir string) { + t.Helper() + dir := filepath.Join(hooksDir, "post-tool-use") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "lint.sh"), + "#!/bin/bash\necho ok", 0o755) + mustWriteFile(t, filepath.Join(dir, "broken.sh"), + "#!/bin/bash\necho ok", 0o644) + }, + wantWarnings: 1, + wantPassed: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + + hooksDir := filepath.Join(tmpDir, ".context", "hooks") + tt.setup(t, hooksDir) + + origDir := chdir(t, tmpDir) + defer func() { _ = os.Chdir(origDir) }() + + writeCtxRC(t, tmpDir, fmt.Sprintf("hooks:\n dir: %s\n", hooksDir)) + rc.Reset() + defer rc.Reset() + + report := &Report{ + Warnings: []Issue{}, + Violations: []Issue{}, + Passed: []CheckName{}, + } + + checkHookPerms(report) + + if len(report.Warnings) != tt.wantWarnings { + t.Errorf("expected %d warnings, got %d", tt.wantWarnings, len(report.Warnings)) + for _, w := range report.Warnings { + t.Logf(" warning: %s (file=%s)", w.Message, w.File) + } + } + + for _, w := range report.Warnings { + if w.Type != IssueHookNoExec { + t.Errorf("expected issue type %q, got %q", IssueHookNoExec, w.Type) + } + } + + passed := checkPassed(report, CheckHookPerms) + if passed != tt.wantPassed { + t.Errorf("expected passed=%v, got passed=%v", tt.wantPassed, passed) + } + }) + } +} + +func TestCheckSyncStaleness(t *testing.T) { + tests := []struct { + name string + setup func(t *testing.T, tmpDir, steeringDir string) + wantWarnings int + wantPassed bool + }{ + { + name: "no steering files", + setup: func(_ *testing.T, _, _ string) {}, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "synced files up to date", + setup: func(t *testing.T, tmpDir, steeringDir string) { + t.Helper() + // Create a steering file. + mustMkdir(t, steeringDir) + mustWriteFile(t, filepath.Join(steeringDir, "api.md"), + "---\nname: api\ndescription: API rules\ninclusion: always\npriority: 50\n---\nAPI body\n", 0o644) + + // Sync to all tools so all native files are up to date. + _, err := steering.SyncAll(steeringDir, tmpDir) + if err != nil { + t.Fatal(err) + } + }, + wantWarnings: 0, + wantPassed: true, + }, + { + name: "synced file is stale", + setup: func(t *testing.T, tmpDir, steeringDir string) { + t.Helper() + mustMkdir(t, steeringDir) + mustWriteFile(t, filepath.Join(steeringDir, "api.md"), + "---\nname: api\ndescription: API rules\ninclusion: always\npriority: 50\n---\nAPI body\n", 0o644) + + // Sync all tools first. + _, err := steering.SyncAll(steeringDir, tmpDir) + if err != nil { + t.Fatal(err) + } + + // Now modify the source steering file — all synced files become stale. + mustWriteFile(t, filepath.Join(steeringDir, "api.md"), + "---\nname: api\ndescription: Updated API rules\ninclusion: always\npriority: 50\n---\nUpdated body\n", 0o644) + }, + // All 3 syncable tools (cursor, cline, kiro) will report stale. + wantWarnings: 3, + wantPassed: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + + steeringDir := filepath.Join(tmpDir, ".context", "steering") + tt.setup(t, tmpDir, steeringDir) + + origDir := chdir(t, tmpDir) + defer func() { _ = os.Chdir(origDir) }() + + writeCtxRC(t, tmpDir, fmt.Sprintf("steering:\n dir: %s\n", steeringDir)) + rc.Reset() + defer rc.Reset() + + report := &Report{ + Warnings: []Issue{}, + Violations: []Issue{}, + Passed: []CheckName{}, + } + + checkSyncStaleness(report) + + if len(report.Warnings) != tt.wantWarnings { + t.Errorf("expected %d warnings, got %d", tt.wantWarnings, len(report.Warnings)) + for _, w := range report.Warnings { + t.Logf(" warning: %s (file=%s path=%s)", w.Message, w.File, w.Path) + } + } + + for _, w := range report.Warnings { + if w.Type != IssueStaleSyncFile { + t.Errorf("expected issue type %q, got %q", IssueStaleSyncFile, w.Type) + } + } + + passed := checkPassed(report, CheckSyncStaleness) + if passed != tt.wantPassed { + t.Errorf("expected passed=%v, got passed=%v", tt.wantPassed, passed) + } + }) + } +} + +func TestCheckRCTool(t *testing.T) { + tests := []struct { + name string + rcContent string + wantWarnings int + wantPassed bool + }{ + { + name: "no tool configured", + rcContent: "", + wantWarnings: 0, + wantPassed: true, + }, + { + name: "valid tool", + rcContent: "tool: kiro\n", + wantWarnings: 0, + wantPassed: true, + }, + { + name: "invalid tool", + rcContent: "tool: vscode\n", + wantWarnings: 1, + wantPassed: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := t.TempDir() + + origDir := chdir(t, tmpDir) + defer func() { _ = os.Chdir(origDir) }() + + writeCtxRC(t, tmpDir, tt.rcContent) + rc.Reset() + defer rc.Reset() + + report := &Report{ + Warnings: []Issue{}, + Violations: []Issue{}, + Passed: []CheckName{}, + } + + checkRCTool(report) + + if len(report.Warnings) != tt.wantWarnings { + t.Errorf("expected %d warnings, got %d", tt.wantWarnings, len(report.Warnings)) + for _, w := range report.Warnings { + t.Logf(" warning: %s", w.Message) + } + } + + for _, w := range report.Warnings { + if w.Type != IssueInvalidTool { + t.Errorf("expected issue type %q, got %q", IssueInvalidTool, w.Type) + } + } + + passed := checkPassed(report, CheckRCTool) + if passed != tt.wantPassed { + t.Errorf("expected passed=%v, got passed=%v", tt.wantPassed, passed) + } + }) + } +} + +// --- helpers --- + +func checkPassed(report *Report, check CheckName) bool { + for _, p := range report.Passed { + if p == check { + return true + } + } + return false +} + +func mustMkdir(t *testing.T, path string) { + t.Helper() + if err := os.MkdirAll(path, 0o755); err != nil { + t.Fatal(err) + } +} + +func mustWriteFile(t *testing.T, path, content string, perm os.FileMode) { + t.Helper() + if err := os.WriteFile(path, []byte(content), perm); err != nil { + t.Fatal(err) + } +} + +func writeCtxRC(t *testing.T, dir, content string) { + t.Helper() + if err := os.WriteFile(filepath.Join(dir, ".ctxrc"), []byte(content), 0o644); err != nil { + t.Fatal(err) + } +} + +func chdir(t *testing.T, dir string) string { + t.Helper() + origDir, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + return origDir +} diff --git a/internal/drift/detector.go b/internal/drift/detector.go index c0f91f5b7..f688cf6a0 100644 --- a/internal/drift/detector.go +++ b/internal/drift/detector.go @@ -65,5 +65,17 @@ func Detect(ctx *entity.Context) *Report { // Check context file comment headers against templates checkTemplateHeaders(ctx, report) + // Check steering files for unsupported tool identifiers + checkSteeringTools(report) + + // Check hook scripts for missing executable permission bit + checkHookPerms(report) + + // Check synced tool-native files are up to date vs source steering files + checkSyncStaleness(report) + + // Check .ctxrc tool field for unsupported tool identifier + checkRCTool(report) + return report } diff --git a/internal/drift/types.go b/internal/drift/types.go index a5ec5cdcd..ee105df37 100644 --- a/internal/drift/types.go +++ b/internal/drift/types.go @@ -28,6 +28,15 @@ const ( // IssueStaleHeader indicates a context file whose comment header // doesn't match the embedded template. IssueStaleHeader IssueType = "stale_header" + // IssueInvalidTool indicates an unsupported tool identifier in a + // steering file or .ctxrc configuration. + IssueInvalidTool IssueType = "invalid_tool" + // IssueHookNoExec indicates a hook script missing the executable + // permission bit. + IssueHookNoExec IssueType = "hook_no_exec" + // IssueStaleSyncFile indicates a synced tool-native file that is + // out of date compared to its source steering file. + IssueStaleSyncFile IssueType = "stale_sync_file" ) // StatusType represents the overall status of a drift report. @@ -62,6 +71,15 @@ const ( CheckMissingPackages CheckName = "missing_packages" // CheckTemplateHeaders checks context file comment headers against templates. CheckTemplateHeaders CheckName = "template_headers" + // CheckSteeringTools validates tool identifiers in steering files. + CheckSteeringTools CheckName = "steering_tools" + // CheckHookPerms checks hook scripts for executable permission bits. + CheckHookPerms CheckName = "hook_permissions" + // CheckSyncStaleness compares synced tool-native files + // against source steering files. + CheckSyncStaleness CheckName = "sync_staleness" + // CheckRCTool validates the .ctxrc tool field against supported identifiers. + CheckRCTool CheckName = "rc_tool_field" ) // Constitution rule names referenced in drift violations. diff --git a/internal/entity/trigger.go b/internal/entity/trigger.go new file mode 100644 index 000000000..af4bb0e84 --- /dev/null +++ b/internal/entity/trigger.go @@ -0,0 +1,49 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package entity + +// TriggerType identifies the lifecycle trigger event type. +type TriggerType string + +const ( + // TriggerPreToolUse fires before an AI tool invocation. + TriggerPreToolUse TriggerType = "pre-tool-use" + // TriggerPostToolUse fires after an AI tool invocation. + TriggerPostToolUse TriggerType = "post-tool-use" + // TriggerSessionStart fires when an AI session begins. + TriggerSessionStart TriggerType = "session-start" + // TriggerSessionEnd fires when an AI session ends. + TriggerSessionEnd TriggerType = "session-end" + // TriggerFileSave fires when a file is saved. + TriggerFileSave TriggerType = "file-save" + // TriggerContextAdd fires when context is added. + TriggerContextAdd TriggerType = "context-add" +) + +// TriggerInput is the JSON object sent to trigger scripts via stdin. +// +// Fields: +// - TriggerType: Lifecycle event category +// - Tool: Name of the AI tool being used +// - Parameters: Tool-specific parameters +// - Session: Session metadata (id and model) +// - Timestamp: ISO 8601 timestamp +// - CtxVersion: Version of ctx +type TriggerInput struct { + TriggerType string `json:"hookType"` + Tool string `json:"tool"` + Parameters map[string]any `json:"parameters"` + Session TriggerSession `json:"session"` + Timestamp string `json:"timestamp"` + CtxVersion string `json:"ctxVersion"` +} + +// TriggerSession contains session metadata sent to trigger scripts. +type TriggerSession struct { + ID string `json:"id"` + Model string `json:"model"` +} diff --git a/internal/err/cli/cli.go b/internal/err/cli/cli.go index 9f87694bc..4c8d2dc40 100644 --- a/internal/err/cli/cli.go +++ b/internal/err/cli/cli.go @@ -7,6 +7,7 @@ package cli import ( + "errors" "fmt" "github.com/ActiveMemory/ctx/internal/assets/read/desc" @@ -67,3 +68,14 @@ func UnknownDocument(alias string) error { desc.Text(text.DescKeyErrValidateUnknownDocument), alias, ) } + +// NoToolSpecified returns an error when no tool is configured. +// +// Returns: +// - error: "no tool specified: use --tool or set the tool +// field in .ctxrc" +func NoToolSpecified() error { + return errors.New( + desc.Text(text.DescKeyErrCliNoToolSpecified), + ) +} diff --git a/internal/err/hook/hook.go b/internal/err/hook/hook.go deleted file mode 100644 index 637ad8875..000000000 --- a/internal/err/hook/hook.go +++ /dev/null @@ -1,119 +0,0 @@ -// / ctx: https://ctx.ist -// ,'`./ do you remember? -// `.,'\ -// \ Copyright 2026-present Context contributors. -// SPDX-License-Identifier: Apache-2.0 - -package hook - -import ( - "fmt" - - "github.com/ActiveMemory/ctx/internal/assets/read/desc" - "github.com/ActiveMemory/ctx/internal/config/embed/text" -) - -// EmbeddedTemplateNotFound returns an error when an embedded hook -// message template cannot be located. -// -// Parameters: -// - hook: hook name -// - variant: template variant name -// -// Returns: -// - error: "embedded template not found for /" -func EmbeddedTemplateNotFound(hook, variant string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrHookEmbeddedTemplateNotFound), - hook, variant, - ) -} - -// OverrideExists returns an error when a message override already -// exists and must be reset before editing. -// -// Parameters: -// - path: existing override file path -// - hook: hook name -// - variant: template variant name -// -// Returns: -// - error: "override already exists at ..." -func OverrideExists(path, hook, variant string) error { - return fmt.Errorf(desc.Text(text.DescKeyErrHookOverrideExists), - path, hook, variant) -} - -// WriteOverride wraps a message override write failure. -// -// Parameters: -// - path: the override file path -// - cause: the underlying error -// -// Returns: -// - error: "failed to write override : " -func WriteOverride(path string, cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrHookWriteOverride), path, cause, - ) -} - -// RemoveOverride wraps a message override removal failure. -// -// Parameters: -// - path: the override file path -// - cause: the underlying error -// -// Returns: -// - error: "failed to remove override : " -func RemoveOverride(path string, cause error) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrHookRemoveOverride), path, cause, - ) -} - -// Unknown returns an error for an unrecognized hook name. -// -// Parameters: -// - hook: the unknown hook name -// -// Returns: -// - error: "unknown hook: ..." -func Unknown(hook string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrHookUnknownHook), hook, - ) -} - -// UnknownVariant returns an error for an unrecognized variant within -// a known hook. -// -// Parameters: -// - variant: the unknown variant name -// - hook: the parent hook name -// -// Returns: -// - error: "unknown variant for hook ..." -func UnknownVariant(variant, hook string) error { - return fmt.Errorf( - desc.Text(text.DescKeyErrHookUnknownVariant), variant, hook, - ) -} - -// Validate returns an error for an unknown hook/variant combination. -// It distinguishes between an entirely unknown hook and an unknown -// variant within a known hook. -// -// Parameters: -// - hookExists: whether the hook name is recognized -// - hook: the hook name -// - variant: the variant name -// -// Returns: -// - error: descriptive error with guidance to list available options -func Validate(hookExists bool, hook, variant string) error { - if !hookExists { - return Unknown(hook) - } - return UnknownVariant(variant, hook) -} diff --git a/internal/err/mcp/doc.go b/internal/err/mcp/doc.go index 1f5fce538..52091b647 100644 --- a/internal/err/mcp/doc.go +++ b/internal/err/mcp/doc.go @@ -8,5 +8,6 @@ // // Error constructors return structured errors with context for // user-facing messages routed through internal/assets text lookups. -// Exports: [TypeContentRequired], [UnknownEventType]. +// Exports: [QueryRequired], [SearchRead], [TypeContentRequired], +// [UnknownEventType]. package mcp diff --git a/internal/err/mcp/mcp.go b/internal/err/mcp/mcp.go index eb8b28573..7c6f2fb68 100644 --- a/internal/err/mcp/mcp.go +++ b/internal/err/mcp/mcp.go @@ -25,6 +25,32 @@ func TypeContentRequired() error { ) } +// QueryRequired returns an error when query is missing from a search +// tool call. +// +// Returns: +// - error: "query is required" +func QueryRequired() error { + return errors.New( + desc.Text(text.DescKeyMCPErrQueryRequired), + ) +} + +// SearchRead wraps a failure to read the context directory during +// search. +// +// Parameters: +// - dir: the directory path +// - cause: the underlying read error +// +// Returns: +// - error: "search: read : " +func SearchRead(dir string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyMCPErrSearchRead), dir, cause, + ) +} + // UnknownEventType returns an error for an unrecognized session event // type. // diff --git a/internal/err/hook/doc.go b/internal/err/setup/doc.go similarity index 61% rename from internal/err/hook/doc.go rename to internal/err/setup/doc.go index 8e58f6efc..f6bd4147a 100644 --- a/internal/err/hook/doc.go +++ b/internal/err/setup/doc.go @@ -4,11 +4,9 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package hook provides error constructors for hook template operations. +// Package setup provides error constructors for tool setup operations. // // Error constructors return structured errors with context for // user-facing messages routed through internal/assets text lookups. -// Exports: [EmbeddedTemplateNotFound], [OverrideExists], -// [WriteOverride], [RemoveOverride], [Unknown], -// [UnknownVariant]. -package hook +// Exports: [CreateDir], [MarshalConfig], [WriteFile], [SyncSteering]. +package setup diff --git a/internal/err/setup/setup.go b/internal/err/setup/setup.go new file mode 100644 index 000000000..5a270643a --- /dev/null +++ b/internal/err/setup/setup.go @@ -0,0 +1,68 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package setup + +import ( + "fmt" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/text" +) + +// CreateDir wraps a failure to create a setup directory. +// +// Parameters: +// - dir: the directory path +// - cause: the underlying OS error +// +// Returns: +// - error: "create : " +func CreateDir(dir string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSetupCreateDir), dir, cause, + ) +} + +// MarshalConfig wraps a failure to marshal MCP configuration JSON. +// +// Parameters: +// - cause: the underlying marshal error +// +// Returns: +// - error: "marshal mcp config: " +func MarshalConfig(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSetupMarshalConfig), cause, + ) +} + +// WriteFile wraps a failure to write a setup file. +// +// Parameters: +// - path: the file path +// - cause: the underlying OS error +// +// Returns: +// - error: "write : " +func WriteFile(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSetupFileWrite), path, cause, + ) +} + +// SyncSteering wraps a failure during steering sync in setup. +// +// Parameters: +// - cause: the underlying sync error +// +// Returns: +// - error: "sync steering: " +func SyncSteering(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSetupSyncSteering), cause, + ) +} diff --git a/internal/err/skill/doc.go b/internal/err/skill/doc.go index 473fed796..ace31f991 100644 --- a/internal/err/skill/doc.go +++ b/internal/err/skill/doc.go @@ -4,9 +4,13 @@ // \ Copyright 2026-present Context contributors. // SPDX-License-Identifier: Apache-2.0 -// Package skill provides error constructors for skill listing and reading. +// Package skill provides error constructors for skill operations. // // Error constructors return structured errors with context for // user-facing messages routed through internal/assets text lookups. -// Exports: [List], [Read]. +// Exports: [CreateDest], [Install], [InvalidManifest], +// [InvalidYAML], [List], [Load], [MissingClosingDelimiter], +// [MissingName], [MissingOpeningDelimiter], [NotFound], +// [NotValidDir], [NotValidSource], [Read], [ReadDir], +// [Remove], [SkillLoad]. package skill diff --git a/internal/err/skill/skill.go b/internal/err/skill/skill.go index 15bf24420..03a98735d 100644 --- a/internal/err/skill/skill.go +++ b/internal/err/skill/skill.go @@ -7,12 +7,68 @@ package skill import ( + "errors" "fmt" "github.com/ActiveMemory/ctx/internal/assets/read/desc" "github.com/ActiveMemory/ctx/internal/config/embed/text" ) +// CreateDest wraps a failure to create the skill destination directory. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "skill: create destination: " +func CreateDest(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillCreateDest), cause, + ) +} + +// Install wraps a skill installation copy failure. +// +// Parameters: +// - name: skill name +// - cause: the underlying error +// +// Returns: +// - error: "skill: install : " +func Install(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillInstall), name, cause, + ) +} + +// InvalidManifest wraps an invalid skill manifest parse failure. +// +// Parameters: +// - manifest: manifest filename +// - cause: the underlying error +// +// Returns: +// - error: "skill: source has invalid : " +func InvalidManifest(manifest string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillInvalidManifest), manifest, cause, + ) +} + +// InvalidYAML wraps a YAML parse failure in a skill manifest. +// +// Parameters: +// - name: skill name +// - cause: the underlying YAML error +// +// Returns: +// - error: "skill: : invalid YAML frontmatter: " +func InvalidYAML(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillInvalidYAML), name, cause, + ) +} + // List wraps a failure to list embedded skill directories. // // Parameters: @@ -24,6 +80,95 @@ func List(cause error) error { return fmt.Errorf(desc.Text(text.DescKeyErrSkillList), cause) } +// Load wraps a failure to load a skill by name. +// +// Parameters: +// - name: skill name +// - cause: the underlying error +// +// Returns: +// - error: "skill: : " +func Load(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillLoad), name, cause, + ) +} + +// MissingClosingDelimiter returns an error for missing closing +// frontmatter delimiter. +// +// Returns: +// - error: "missing closing frontmatter delimiter (---)" +func MissingClosingDelimiter() error { + return errors.New( + desc.Text(text.DescKeyErrSkillMissingClosingDelim), + ) +} + +// MissingName returns an error for a skill manifest missing the +// required name field. +// +// Parameters: +// - manifest: the manifest filename +// +// Returns: +// - error: "skill: is missing required 'name' field" +func MissingName(manifest string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillMissingName), manifest, + ) +} + +// MissingOpeningDelimiter returns an error for missing opening +// frontmatter delimiter. +// +// Returns: +// - error: "missing opening frontmatter delimiter (---)" +func MissingOpeningDelimiter() error { + return errors.New( + desc.Text(text.DescKeyErrSkillMissingOpeningDelim), + ) +} + +// NotFound returns an error when a skill cannot be found by name. +// +// Parameters: +// - name: the skill name +// +// Returns: +// - error: "skill not found" +func NotFound(name string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillNotFound), name, + ) +} + +// NotValidDir returns an error when a skill path is not a directory. +// +// Parameters: +// - name: the skill name +// +// Returns: +// - error: "skill: is not a valid skill directory" +func NotValidDir(name string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillNotValidDir), name, + ) +} + +// NotValidSource wraps an error when the source is not a valid skill. +// +// Parameters: +// - cause: the underlying read error +// +// Returns: +// - error: "skill: source is not a valid skill: " +func NotValidSource(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillNotValidSource), cause, + ) +} + // Read wraps a failure to read a skill's content. // // Parameters: @@ -37,3 +182,46 @@ func Read(name string, cause error) error { desc.Text(text.DescKeyErrSkillRead), name, cause, ) } + +// ReadDir wraps a failure to read the skills directory. +// +// Parameters: +// - dir: the directory path +// - cause: the underlying error +// +// Returns: +// - error: "skill: read directory : " +func ReadDir(dir string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillReadDir), dir, cause, + ) +} + +// Remove wraps a skill removal failure. +// +// Parameters: +// - name: skill name +// - cause: the underlying error +// +// Returns: +// - error: "skill: remove : " +func Remove(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillRemove), name, cause, + ) +} + +// LoadQuoted wraps a skill load failure with the skill name +// quoted. +// +// Parameters: +// - name: skill name +// - cause: the underlying error +// +// Returns: +// - error: "skill : " +func LoadQuoted(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSkillSkillLoad), name, cause, + ) +} diff --git a/internal/err/steering/doc.go b/internal/err/steering/doc.go new file mode 100644 index 000000000..edf2ba343 --- /dev/null +++ b/internal/err/steering/doc.go @@ -0,0 +1,17 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package steering provides error constructors for steering operations. +// +// Error constructors return structured errors with context for +// user-facing messages routed through internal/assets text lookups. +// Exports: [ComputeRelPath], [ContextDirMissing], [CreateDir], +// [FileExists], [InvalidYAML], [MissingClosingDelimiter], +// [MissingOpeningDelimiter], [NoTool], [OutputEscapesRoot], +// [Parse], [ReadDir], [ReadFile], [ResolveOutput], +// [ResolveRoot], [SyncAll], [SyncName], [UnsupportedTool], +// [WriteFile], [WriteSteeringFile], [WriteInitFile]. +package steering diff --git a/internal/err/steering/steering.go b/internal/err/steering/steering.go new file mode 100644 index 000000000..171b4de76 --- /dev/null +++ b/internal/err/steering/steering.go @@ -0,0 +1,280 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "errors" + "fmt" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/text" +) + +// ComputeRelPath wraps a failure to compute a relative path. +// +// Parameters: +// - cause: the underlying error +// +// Returns: +// - error: "compute relative path: " +func ComputeRelPath(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringComputeRelPath), cause, + ) +} + +// ContextDirMissing returns an error when the .context/ directory +// does not exist. +// +// Returns: +// - error: ".context/ directory does not exist; run ctx init first" +func ContextDirMissing() error { + return errors.New( + desc.Text(text.DescKeyErrSteeringContextDirMissing), + ) +} + +// CreateDir wraps a steering directory creation failure. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "create steering directory: " +func CreateDir(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringCreateDir), cause, + ) +} + +// FileExists returns an error when a steering file already exists. +// +// Parameters: +// - path: the existing file path +// +// Returns: +// - error: "steering file already exists: " +func FileExists(path string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringFileExists), path, + ) +} + +// InvalidYAML wraps an invalid YAML frontmatter parse failure. +// +// Parameters: +// - filePath: path to the steering file +// - cause: the underlying YAML error +// +// Returns: +// - error: "steering: : invalid YAML frontmatter: " +func InvalidYAML(filePath string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringInvalidYAML), filePath, cause, + ) +} + +// MissingClosingDelimiter returns an error for missing closing +// frontmatter delimiter. +// +// Returns: +// - error: "missing closing frontmatter delimiter (---)" +func MissingClosingDelimiter() error { + return errors.New( + desc.Text(text.DescKeyErrSteeringMissingClosingDelim), + ) +} + +// MissingOpeningDelimiter returns an error for missing opening +// frontmatter delimiter. +// +// Returns: +// - error: "missing opening frontmatter delimiter (---)" +func MissingOpeningDelimiter() error { + return errors.New( + desc.Text(text.DescKeyErrSteeringMissingOpeningDelim), + ) +} + +// NoTool returns an error when no tool is specified for sync. +// +// Returns: +// - error: "no tool specified: use --tool , --all, or set +// the tool field in .ctxrc" +func NoTool() error { + return errors.New( + desc.Text(text.DescKeyErrSteeringNoTool), + ) +} + +// OutputEscapesRoot returns an error when an output path escapes +// the project root. +// +// Parameters: +// - outPath: the output path +// - projectRoot: the project root path +// +// Returns: +// - error: "output path escapes project root " +func OutputEscapesRoot(outPath, projectRoot string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringOutputEscapesRoot), + outPath, projectRoot, + ) +} + +// Parse wraps a steering file parse failure. +// +// Parameters: +// - filePath: path to the steering file +// - cause: the underlying error +// +// Returns: +// - error: "steering: : " +func Parse(filePath string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringParse), filePath, cause, + ) +} + +// ReadDir wraps a steering directory read failure. +// +// Parameters: +// - dir: the directory path +// - cause: the underlying error +// +// Returns: +// - error: "steering: read directory : " +func ReadDir(dir string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringReadDir), dir, cause, + ) +} + +// ReadFile wraps a steering file read failure. +// +// Parameters: +// - path: the file path +// - cause: the underlying error +// +// Returns: +// - error: "steering: read file : " +func ReadFile(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringReadFile), path, cause, + ) +} + +// ResolveOutput wraps a failure to resolve an output path. +// +// Parameters: +// - cause: the underlying error +// +// Returns: +// - error: "resolve output path: " +func ResolveOutput(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringResolveOutput), cause, + ) +} + +// ResolveRoot wraps a failure to resolve the project root path. +// +// Parameters: +// - cause: the underlying error +// +// Returns: +// - error: "resolve project root: " +func ResolveRoot(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringResolveRoot), cause, + ) +} + +// SyncAll wraps a failure during sync-all for a specific tool. +// +// Parameters: +// - tool: the tool that failed +// - cause: the underlying error +// +// Returns: +// - error: "steering: sync : " +func SyncAll(tool string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringSyncAll), tool, cause, + ) +} + +// SyncName wraps a steering sync error for a named file. +// +// Parameters: +// - name: steering file name +// - cause: the underlying error +// +// Returns: +// - error: "steering: : " +func SyncName(name string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringSyncName), name, cause, + ) +} + +// UnsupportedTool returns an error for an unsupported sync tool. +// +// Parameters: +// - tool: the unsupported tool name +// - supported: comma-separated list of supported tools +// +// Returns: +// - error: "steering: unsupported sync tool ; supported: " +func UnsupportedTool(tool, supported string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringUnsupportedTool), + tool, supported, + ) +} + +// WriteFile wraps a steering file write failure during sync. +// +// Parameters: +// - path: the output path +// - cause: the underlying error +// +// Returns: +// - error: "steering: write : " +func WriteFile(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringWriteFile), path, cause, + ) +} + +// Write wraps a steering file write failure. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "write steering file: " +func Write(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringWriteSteeringFile), cause, + ) +} + +// WriteInitFile wraps a steering init file write failure. +// +// Parameters: +// - path: the file path +// - cause: the underlying OS error +// +// Returns: +// - error: "write : " +func WriteInitFile(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrSteeringWriteInitFile), path, cause, + ) +} diff --git a/internal/err/trigger/doc.go b/internal/err/trigger/doc.go new file mode 100644 index 000000000..30cc273aa --- /dev/null +++ b/internal/err/trigger/doc.go @@ -0,0 +1,17 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trigger provides error constructors for trigger operations. +// +// Error constructors return structured errors with context for +// user-facing messages routed through internal/assets text lookups. +// Exports: [Chmod], [CreateDir], [DiscoverFailed], +// [EmbeddedTemplateNotFound], [Exit], [InvalidJSONOutput], +// [InvalidType], [MarshalInput], [NotFound], [OverrideExists], +// [RemoveOverride], [ResolveHooksDir], [ResolvePath], +// [ScriptExists], [Stat], [StatPath], [Timeout], [Unknown], +// [UnknownVariant], [Validate], [WriteScript], [WriteOverride]. +package trigger diff --git a/internal/err/trigger/trigger.go b/internal/err/trigger/trigger.go new file mode 100644 index 000000000..6caf968a4 --- /dev/null +++ b/internal/err/trigger/trigger.go @@ -0,0 +1,364 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "fmt" + "time" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/text" +) + +// Chmod wraps a failure to change hook file permissions. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "chmod hook: " +func Chmod(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookChmod), cause, + ) +} + +// CreateDir wraps a hook directory creation failure. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "create hook directory: " +func CreateDir(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookCreateDir), cause, + ) +} + +// DiscoverFailed wraps a hook discovery failure. +// +// Parameters: +// - cause: the underlying error +// +// Returns: +// - error: "discover hooks: " +func DiscoverFailed(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookDiscover), cause, + ) +} + +// EmbeddedTemplateNotFound returns an error when an embedded hook +// message template cannot be located. +// +// Parameters: +// - hook: hook name +// - variant: template variant name +// +// Returns: +// - error: "embedded template not found for /" +func EmbeddedTemplateNotFound(hook, variant string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookEmbeddedTemplateNotFound), + hook, variant, + ) +} + +// Exit wraps a hook script non-zero exit. +// +// Parameters: +// - cause: the underlying exec error +// +// Returns: +// - error: "exit: " +func Exit(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookExit), cause, + ) +} + +// InvalidJSONOutput wraps an invalid JSON output from a hook. +// +// Parameters: +// - cause: the underlying JSON parse error +// +// Returns: +// - error: "invalid JSON output: " +func InvalidJSONOutput(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookInvalidJSONOutput), cause, + ) +} + +// InvalidType returns an error for an unrecognized hook type. +// +// Parameters: +// - hookType: the invalid hook type string +// - valid: comma-separated list of valid types +// +// Returns: +// - error: "invalid hook type ; valid types: " +func InvalidType(hookType, valid string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookInvalidType), hookType, valid, + ) +} + +// MarshalInput wraps a hook input marshal failure. +// +// Parameters: +// - cause: the underlying marshal error +// +// Returns: +// - error: "marshal hook input: " +func MarshalInput(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookMarshalInput), cause, + ) +} + +// NotFound returns an error when a hook cannot be found by name. +// +// Parameters: +// - name: the hook name that was not found +// +// Returns: +// - error: "hook not found: " +func NotFound(name string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookNotFound), name, + ) +} + +// OverrideExists returns an error when a message override already +// exists and must be reset before editing. +// +// Parameters: +// - path: existing override file path +// - hook: hook name +// - variant: template variant name +// +// Returns: +// - error: "override already exists at ..." +func OverrideExists(path, hook, variant string) error { + return fmt.Errorf(desc.Text(text.DescKeyErrHookOverrideExists), + path, hook, variant) +} + +// RemoveOverride wraps a message override removal failure. +// +// Parameters: +// - path: the override file path +// - cause: the underlying error +// +// Returns: +// - error: "failed to remove override : " +func RemoveOverride(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookRemoveOverride), path, cause, + ) +} + +// ResolveHooksDir wraps a failure to resolve the hooks directory path. +// +// Parameters: +// - dir: the hooks directory path +// - cause: the underlying error +// +// Returns: +// - error: "resolve hooks directory : " +func ResolveHooksDir(dir string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookResolveHooksDir), dir, cause, + ) +} + +// ResolvePath wraps a failure to resolve a hook script path. +// +// Parameters: +// - path: the hook path +// - cause: the underlying error +// +// Returns: +// - error: "resolve hook path : " +func ResolvePath(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookResolvePath), path, cause, + ) +} + +// ScriptExists returns an error when a hook script already exists. +// +// Parameters: +// - path: the existing script path +// +// Returns: +// - error: "hook script already exists: " +func ScriptExists(path string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookScriptExists), path, + ) +} + +// Stat wraps a hook stat failure. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "stat hook: " +func Stat(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookStat), cause, + ) +} + +// StatPath wraps a hook path stat failure. +// +// Parameters: +// - path: the hook path +// - cause: the underlying OS error +// +// Returns: +// - error: "stat hook path : " +func StatPath(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookStatPath), path, cause, + ) +} + +// Timeout returns an error when a hook exceeds its execution timeout. +// +// Parameters: +// - d: the timeout duration +// +// Returns: +// - error: "timeout after " +func Timeout(d time.Duration) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookTimeout), d, + ) +} + +// Unknown returns an error for an unrecognized hook name. +// +// Parameters: +// - hook: the unknown hook name +// +// Returns: +// - error: "unknown hook: ..." +func Unknown(hook string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookUnknownHook), hook, + ) +} + +// UnknownVariant returns an error for an unrecognized variant within +// a known hook. +// +// Parameters: +// - variant: the unknown variant name +// - hook: the parent hook name +// +// Returns: +// - error: "unknown variant for hook ..." +func UnknownVariant(variant, hook string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookUnknownVariant), variant, hook, + ) +} + +// Validate returns an error for an unknown hook/variant combination. +// It distinguishes between an entirely unknown hook and an unknown +// variant within a known hook. +// +// Parameters: +// - hookExists: whether the hook name is recognized +// - hook: the hook name +// - variant: the variant name +// +// Returns: +// - error: descriptive error with guidance to list available options +func Validate(hookExists bool, hook, variant string) error { + if !hookExists { + return Unknown(hook) + } + return UnknownVariant(variant, hook) +} + +// WriteScript wraps a hook script write failure. +// +// Parameters: +// - cause: the underlying OS error +// +// Returns: +// - error: "write hook script: " +func WriteScript(cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookWriteScript), cause, + ) +} + +// WriteOverride wraps a message override write failure. +// +// Parameters: +// - path: the override file path +// - cause: the underlying error +// +// Returns: +// - error: "failed to write override : " +func WriteOverride(path string, cause error) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrHookWriteOverride), path, cause, + ) +} + +// Symlink returns an error when a hook path is a symlink. +// +// Parameters: +// - hookPath: the symlink path +// +// Returns: +// - error: "hook is a symlink: " +func Symlink(hookPath string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrLifecycleHookSymlink), + hookPath, + ) +} + +// Boundary returns an error when a hook path escapes the +// hooks directory boundary. +// +// Parameters: +// - hookPath: the escaping path +// - hooksDir: the hooks root directory +// +// Returns: +// - error: "hook escapes boundary: not in " +func Boundary(hookPath, hooksDir string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrLifecycleHookBoundary), + hookPath, hooksDir, + ) +} + +// NotExecutable returns an error when a hook script lacks +// the executable permission bit. +// +// Parameters: +// - hookPath: the non-executable path +// +// Returns: +// - error: "hook not executable: " +func NotExecutable(hookPath string) error { + return fmt.Errorf( + desc.Text(text.DescKeyErrLifecycleHookNotExecutable), + hookPath, + ) +} diff --git a/internal/exec/sysinfo/sysinfo_darwin.go b/internal/exec/sysinfo/sysinfo_darwin.go index 71e3457a1..f29601d72 100644 --- a/internal/exec/sysinfo/sysinfo_darwin.go +++ b/internal/exec/sysinfo/sysinfo_darwin.go @@ -10,6 +10,14 @@ package sysinfo import "os/exec" +// Command names for macOS system information queries. +const ( + // cmdSysctl is the sysctl command name. + cmdSysctl = "sysctl" + // cmdVMStat is the vm_stat command name. + cmdVMStat = "vm_stat" +) + // Sysctl runs sysctl with the given arguments and returns stdout. // // Parameters: @@ -20,7 +28,7 @@ import "os/exec" // - error: non-nil if the command fails func Sysctl(args ...string) ([]byte, error) { //nolint:gosec // fixed command, no user input - return exec.Command("sysctl", args...).Output() + return exec.Command(cmdSysctl, args...).Output() } // VMStat runs vm_stat and returns stdout. @@ -29,5 +37,5 @@ func Sysctl(args ...string) ([]byte, error) { // - []byte: raw stdout output // - error: non-nil if the command fails func VMStat() ([]byte, error) { - return exec.Command("vm_stat").Output() + return exec.Command(cmdVMStat).Output() } diff --git a/internal/exec/trigger/doc.go b/internal/exec/trigger/doc.go new file mode 100644 index 000000000..5076f45a1 --- /dev/null +++ b/internal/exec/trigger/doc.go @@ -0,0 +1,13 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trigger centralizes process execution for lifecycle trigger +// scripts. All exec.Command calls for trigger runners live here. +// +// [CommandContext] wraps exec.CommandContext to create a hook +// process with the given context and script path, providing +// a single point for testing and security auditing. +package trigger diff --git a/internal/exec/trigger/trigger.go b/internal/exec/trigger/trigger.go new file mode 100644 index 000000000..dd1629bad --- /dev/null +++ b/internal/exec/trigger/trigger.go @@ -0,0 +1,28 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "context" + "os/exec" +) + +// CommandContext returns an exec.Cmd for a hook script path, +// bound to the given context for timeout enforcement. +// +// Parameters: +// - ctx: context for deadline/cancellation +// - path: absolute path to the hook script +// +// Returns: +// - *exec.Cmd: configured command ready for stdin/stdout wiring +func CommandContext( + ctx context.Context, path string, +) *exec.Cmd { + //nolint:gosec // path validated by hook.ValidatePath + return exec.CommandContext(ctx, path) +} diff --git a/internal/mcp/handler/session_hooks.go b/internal/mcp/handler/session_hooks.go new file mode 100644 index 000000000..37b565b78 --- /dev/null +++ b/internal/mcp/handler/session_hooks.go @@ -0,0 +1,120 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package handler + +import ( + "time" + + "github.com/ActiveMemory/ctx/internal/entity" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/trigger" +) + +// Trigger result messages. +const ( + // msgHooksDisabled is returned when triggers are not enabled. + msgHooksDisabled = "Hooks disabled." + // msgSessionStartOK is returned when start triggers produce no + // additional context. + msgSessionStartOK = "Session start hooks executed. " + + "No additional context." + // msgSessionEndOK is returned when end triggers produce no + // additional context. + msgSessionEndOK = "Session end hooks executed." + // paramSummary is the parameter key for session summary. + paramSummary = "summary" +) + +// SessionStartHooks executes session-start triggers and returns +// aggregated context. +// +// Returns success with empty context when no triggers exist or +// triggers are disabled. +// +// Returns: +// - string: aggregated context from trigger outputs +// - error: trigger discovery or execution error +func (h *Handler) SessionStartHooks() (string, error) { + if !rc.HooksEnabled() { + return msgHooksDisabled, nil + } + + hooksDir := rc.HooksDir() + timeout := time.Duration(rc.HookTimeout()) * time.Second + + input := &entity.TriggerInput{ + TriggerType: string(entity.TriggerSessionStart), + Parameters: map[string]any{}, + Timestamp: time.Now().UTC().Format(time.RFC3339), + } + + agg, runErr := trigger.RunAll( + hooksDir, trigger.SessionStart, input, timeout, + ) + if runErr != nil { + return "", runErr + } + + if agg.Cancelled { + return agg.Message, nil + } + + if agg.Context == "" { + return msgSessionStartOK, nil + } + + return agg.Context, nil +} + +// SessionEndHooks executes session-end triggers with the given summary +// in the trigger input parameters. +// +// Returns success with empty context when no triggers exist or +// triggers are disabled. +// +// Parameters: +// - summary: optional session summary passed to triggers via parameters +// +// Returns: +// - string: aggregated context from trigger outputs +// - error: trigger discovery or execution error +func (h *Handler) SessionEndHooks(summary string) (string, error) { + if !rc.HooksEnabled() { + return msgHooksDisabled, nil + } + + hooksDir := rc.HooksDir() + timeout := time.Duration(rc.HookTimeout()) * time.Second + + params := map[string]any{} + if summary != "" { + params[paramSummary] = summary + } + + input := &entity.TriggerInput{ + TriggerType: string(entity.TriggerSessionEnd), + Parameters: params, + Timestamp: time.Now().UTC().Format(time.RFC3339), + } + + agg, runErr := trigger.RunAll( + hooksDir, trigger.SessionEnd, input, timeout, + ) + if runErr != nil { + return "", runErr + } + + if agg.Cancelled { + return agg.Message, nil + } + + if agg.Context == "" { + return msgSessionEndOK, nil + } + + return agg.Context, nil +} diff --git a/internal/mcp/handler/steering.go b/internal/mcp/handler/steering.go new file mode 100644 index 000000000..a6c7a82ef --- /dev/null +++ b/internal/mcp/handler/steering.go @@ -0,0 +1,119 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package handler + +import ( + "bufio" + "errors" + "fmt" + "os" + "path/filepath" + "strings" + + errMcp "github.com/ActiveMemory/ctx/internal/err/mcp" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + "github.com/ActiveMemory/ctx/internal/rc" + "github.com/ActiveMemory/ctx/internal/steering" +) + +// Steering result messages. +const ( + // msgNoSteeringFiles is returned when no steering files exist. + msgNoSteeringFiles = "No steering files found." + // msgNoMatchingSteering is returned when no files match. + msgNoMatchingSteering = "No matching steering files." +) + +// SteeringGet returns applicable steering files for the given prompt. +// If prompt is empty, returns only "always" inclusion files. +// +// Parameters: +// - prompt: optional prompt text for auto-inclusion matching +// +// Returns: +// - string: formatted list of matching steering files +// - error: steering load error +func (h *Handler) SteeringGet(prompt string) (string, error) { + steeringDir := rc.SteeringDir() + + files, loadErr := steering.LoadAll(steeringDir) + if loadErr != nil { + if errors.Is(loadErr, os.ErrNotExist) { + return msgNoSteeringFiles, nil + } + return "", loadErr + } + + if len(files) == 0 { + return msgNoSteeringFiles, nil + } + + filtered := steering.Filter(files, prompt, nil, "") + + if len(filtered) == 0 { + return msgNoMatchingSteering, nil + } + + var sb strings.Builder + for _, sf := range filtered { + fmt.Fprintf(&sb, "## %s\n\n%s\n\n", sf.Name, sf.Body) + } + + return sb.String(), nil +} + +// Search searches across all .context/ files for the given query. +// Returns matching excerpts with file paths and line numbers. +// +// Parameters: +// - query: search text to find in context files +// +// Returns: +// - string: formatted search results with paths and line numbers +// - error: directory read error +func (h *Handler) Search(query string) (string, error) { + if query == "" { + return "", errMcp.QueryRequired() + } + + entries, readErr := os.ReadDir(h.ContextDir) + if readErr != nil { + return "", errMcp.SearchRead(h.ContextDir, readErr) + } + + queryLower := strings.ToLower(query) + var sb strings.Builder + matches := 0 + + for _, e := range entries { + if e.IsDir() { + continue + } + path := filepath.Join(h.ContextDir, e.Name()) + data, err := ctxIo.SafeReadUserFile(path) + if err != nil { + continue + } + + scanner := bufio.NewScanner(strings.NewReader(string(data))) + lineNum := 0 + for scanner.Scan() { + lineNum++ + line := scanner.Text() + if strings.Contains(strings.ToLower(line), queryLower) { + fmt.Fprintf(&sb, "%s:%d: %s\n", e.Name(), lineNum, line) + matches++ + } + } + } + + if matches == 0 { + return fmt.Sprintf("No matches for %q in %s.", query, h.ContextDir), nil + } + + return sb.String(), nil +} diff --git a/internal/mcp/server/def/tool/tool.go b/internal/mcp/server/def/tool/tool.go index 151d4aa26..2a1eedd9d 100644 --- a/internal/mcp/server/def/tool/tool.go +++ b/internal/mcp/server/def/tool/tool.go @@ -17,182 +17,245 @@ import ( "github.com/ActiveMemory/ctx/internal/mcp/proto" ) -// Defs defines all available MCP tools. -var Defs = []proto.Tool{ - { - Name: cfgMcpTool.Status, - Description: desc.Text( - text.DescKeyMCPToolStatusDesc), - InputSchema: proto.InputSchema{Type: schema.Object}, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, - { - Name: cfgMcpTool.Add, - Description: desc.Text( - text.DescKeyMCPToolAddDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: MergeProps(map[string]proto.Property{ - cli.AttrType: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropType), - Enum: []string{ - "task", "decision", - "learning", "convention", +// Defs returns all available MCP tool definitions. +// +// This is a function (not a package-level var) because desc.Text() +// reads from lookup maps that are populated by lookup.Init() in main(). +// Package-level vars are initialized before main(), so desc.Text() +// would return empty strings. +func Defs() []proto.Tool { + return []proto.Tool{ + { + Name: cfgMcpTool.Status, + Description: desc.Text( + text.DescKeyMCPToolStatusDesc), + InputSchema: proto.InputSchema{Type: schema.Object}, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.Add, + Description: desc.Text( + text.DescKeyMCPToolAddDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: MergeProps(map[string]proto.Property{ + cli.AttrType: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropType), + Enum: []string{ + entry.Task, entry.Decision, + entry.Learning, entry.Convention, + }, }, - }, - field.Content: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropContent), - }, - field.Priority: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropPriority), - Enum: entry.Priorities, - }, - }, EntryAttrProps( - text.DescKeyMCPToolPropContext)), - Required: []string{cli.AttrType, field.Content}, + field.Content: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropContent), + }, + field.Priority: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropPriority), + Enum: entry.Priorities, + }, + }, EntryAttrProps( + text.DescKeyMCPToolPropContext)), + Required: []string{cli.AttrType, field.Content}, + }, + Annotations: &proto.ToolAnnotations{}, }, - Annotations: &proto.ToolAnnotations{}, - }, - { - Name: cfgMcpTool.Complete, - Description: desc.Text( - text.DescKeyMCPToolCompleteDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: map[string]proto.Property{ - field.Query: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropQuery), + { + Name: cfgMcpTool.Complete, + Description: desc.Text( + text.DescKeyMCPToolCompleteDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Query: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropQuery), + }, }, + Required: []string{field.Query}, }, - Required: []string{field.Query}, + Annotations: &proto.ToolAnnotations{IdempotentHint: true}, }, - Annotations: &proto.ToolAnnotations{IdempotentHint: true}, - }, - { - Name: cfgMcpTool.Drift, - Description: desc.Text( - text.DescKeyMCPToolDriftDesc), - InputSchema: proto.InputSchema{Type: schema.Object}, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, - { - Name: cfgMcpTool.JournalSource, - Description: desc.Text( - text.DescKeyMCPToolJournalSourceDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: map[string]proto.Property{ - field.Limit: { - Type: schema.Number, - Description: desc.Text( - text.DescKeyMCPToolPropLimit), - }, - field.Since: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropSince), + { + Name: cfgMcpTool.Drift, + Description: desc.Text( + text.DescKeyMCPToolDriftDesc), + InputSchema: proto.InputSchema{Type: schema.Object}, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.JournalSource, + Description: desc.Text( + text.DescKeyMCPToolJournalSourceDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Limit: { + Type: schema.Number, + Description: desc.Text( + text.DescKeyMCPToolPropLimit), + }, + field.Since: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropSince), + }, }, }, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.WatchUpdate, + Description: desc.Text( + text.DescKeyMCPToolWatchUpdateDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: MergeProps(map[string]proto.Property{ + cli.AttrType: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropEntryType), + }, + field.Content: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropMainContent), + }, + }, EntryAttrProps( + text.DescKeyMCPToolPropCtxBg)), + Required: []string{cli.AttrType, field.Content}, + }, + Annotations: &proto.ToolAnnotations{}, }, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, - { - Name: cfgMcpTool.WatchUpdate, - Description: desc.Text( - text.DescKeyMCPToolWatchUpdateDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: MergeProps(map[string]proto.Property{ - cli.AttrType: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropEntryType), + { + Name: cfgMcpTool.Compact, + Description: desc.Text( + text.DescKeyMCPToolCompactDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Archive: { + Type: schema.Boolean, + Description: desc.Text( + text.DescKeyMCPToolPropArchive), + }, }, - field.Content: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropMainContent), + }, + Annotations: &proto.ToolAnnotations{}, + }, + { + Name: cfgMcpTool.Next, + Description: desc.Text( + text.DescKeyMCPToolNextDesc), + InputSchema: proto.InputSchema{Type: schema.Object}, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.CheckTaskCompletion, + Description: desc.Text( + text.DescKeyMCPToolCheckTaskDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.RecentAction: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropRecentAct), + }, }, - }, EntryAttrProps( - text.DescKeyMCPToolPropCtxBg)), - Required: []string{cli.AttrType, field.Content}, + }, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, }, - Annotations: &proto.ToolAnnotations{}, - }, - { - Name: cfgMcpTool.Compact, - Description: desc.Text( - text.DescKeyMCPToolCompactDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: map[string]proto.Property{ - field.Archive: { - Type: schema.Boolean, - Description: desc.Text( - text.DescKeyMCPToolPropArchive), + { + Name: cfgMcpTool.SessionEvent, + Description: desc.Text( + text.DescKeyMCPToolSessionDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + cli.AttrType: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropEventType), + }, + field.Caller: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropCaller), + }, }, + Required: []string{cli.AttrType}, }, + Annotations: &proto.ToolAnnotations{}, }, - Annotations: &proto.ToolAnnotations{}, - }, - { - Name: cfgMcpTool.Next, - Description: desc.Text( - text.DescKeyMCPToolNextDesc), - InputSchema: proto.InputSchema{Type: schema.Object}, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, - { - Name: cfgMcpTool.CheckTaskCompletion, - Description: desc.Text( - text.DescKeyMCPToolCheckTaskDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: map[string]proto.Property{ - field.RecentAction: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropRecentAct), + { + Name: cfgMcpTool.Remind, + Description: desc.Text( + text.DescKeyMCPToolRemindDesc), + InputSchema: proto.InputSchema{Type: schema.Object}, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.SteeringGet, + Description: desc.Text( + text.DescKeyMCPToolSteeringGetDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Prompt: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropPrompt), + }, }, }, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, }, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, - { - Name: cfgMcpTool.SessionEvent, - Description: desc.Text( - text.DescKeyMCPToolSessionDesc), - InputSchema: proto.InputSchema{ - Type: schema.Object, - Properties: map[string]proto.Property{ - cli.AttrType: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropEventType), + { + Name: cfgMcpTool.Search, + Description: desc.Text( + text.DescKeyMCPToolSearchDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Query: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropSearchQuery), + }, }, - field.Caller: { - Type: schema.String, - Description: desc.Text( - text.DescKeyMCPToolPropCaller), + Required: []string{field.Query}, + }, + Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, + }, + { + Name: cfgMcpTool.SessionStart, + Description: desc.Text( + text.DescKeyMCPToolSessionStartDesc), + InputSchema: proto.InputSchema{Type: schema.Object}, + Annotations: &proto.ToolAnnotations{}, + }, + { + Name: cfgMcpTool.SessionEnd, + Description: desc.Text( + text.DescKeyMCPToolSessionEndDesc), + InputSchema: proto.InputSchema{ + Type: schema.Object, + Properties: map[string]proto.Property{ + field.Summary: { + Type: schema.String, + Description: desc.Text( + text.DescKeyMCPToolPropSummary), + }, }, }, - Required: []string{cli.AttrType}, + Annotations: &proto.ToolAnnotations{}, }, - Annotations: &proto.ToolAnnotations{}, - }, - { - Name: cfgMcpTool.Remind, - Description: desc.Text( - text.DescKeyMCPToolRemindDesc), - InputSchema: proto.InputSchema{Type: schema.Object}, - Annotations: &proto.ToolAnnotations{ReadOnlyHint: true}, - }, + } } diff --git a/internal/mcp/server/route/tool/dispatch.go b/internal/mcp/server/route/tool/dispatch.go index 7a8c724e9..397715cfa 100644 --- a/internal/mcp/server/route/tool/dispatch.go +++ b/internal/mcp/server/route/tool/dispatch.go @@ -27,7 +27,7 @@ import ( // Returns: // - *proto.Response: tool list response func DispatchList(req proto.Request) *proto.Response { - return out.OkResponse(req.ID, proto.ToolListResult{Tools: defTool.Defs}) + return out.OkResponse(req.ID, proto.ToolListResult{Tools: defTool.Defs()}) } // DispatchCall unmarshals tool call params and dispatches to the @@ -88,6 +88,14 @@ func DispatchCall( resp = sessionEvent(req.ID, params.Arguments, h.SessionEvent) case tool.Remind: resp = out.Call(req.ID, h.Remind) + case tool.SteeringGet: + resp = steeringGet(h, req.ID, params.Arguments) + case tool.Search: + resp = search(h, req.ID, params.Arguments) + case tool.SessionStart: + resp = out.Call(req.ID, h.SessionStartHooks) + case tool.SessionEnd: + resp = sessionEnd(h, req.ID, params.Arguments) default: return out.ErrResponse( req.ID, proto.ErrCodeNotFound, diff --git a/internal/mcp/server/route/tool/steering.go b/internal/mcp/server/route/tool/steering.go new file mode 100644 index 000000000..8751ef53e --- /dev/null +++ b/internal/mcp/server/route/tool/steering.go @@ -0,0 +1,80 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package tool + +import ( + "encoding/json" + + "github.com/ActiveMemory/ctx/internal/assets/read/desc" + "github.com/ActiveMemory/ctx/internal/config/embed/text" + "github.com/ActiveMemory/ctx/internal/config/mcp/field" + "github.com/ActiveMemory/ctx/internal/mcp/handler" + "github.com/ActiveMemory/ctx/internal/mcp/proto" + "github.com/ActiveMemory/ctx/internal/mcp/server/out" +) + +// steeringGet extracts the optional prompt and delegates to +// handler.SteeringGet. +// +// Parameters: +// - h: handler for domain logic +// - id: JSON-RPC request ID +// - args: MCP tool arguments (prompt) +// +// Returns: +// - *proto.Response: steering files or error +func steeringGet( + h *handler.Handler, id json.RawMessage, + args map[string]interface{}, +) *proto.Response { + prompt, _ := args[field.Prompt].(string) + t, err := h.SteeringGet(prompt) + return out.ToolResult(id, t, err) +} + +// search extracts the required query and delegates to +// handler.Search. +// +// Parameters: +// - h: handler for domain logic +// - id: JSON-RPC request ID +// - args: MCP tool arguments (query) +// +// Returns: +// - *proto.Response: search results or error +func search( + h *handler.Handler, id json.RawMessage, + args map[string]interface{}, +) *proto.Response { + query, _ := args[field.Query].(string) + if query == "" { + return out.ToolError( + id, desc.Text(text.DescKeyMCPErrQueryRequired), + ) + } + t, err := h.Search(query) + return out.ToolResult(id, t, err) +} + +// sessionEnd extracts the optional summary and delegates to +// handler.SessionEndHooks. +// +// Parameters: +// - h: handler for domain logic +// - id: JSON-RPC request ID +// - args: MCP tool arguments (summary) +// +// Returns: +// - *proto.Response: session end result or error +func sessionEnd( + h *handler.Handler, id json.RawMessage, + args map[string]interface{}, +) *proto.Response { + summary, _ := args[field.Summary].(string) + t, err := h.SessionEndHooks(summary) + return out.ToolResult(id, t, err) +} diff --git a/internal/mcp/server/server_test.go b/internal/mcp/server/server_test.go index 6a572030a..a376fcf48 100644 --- a/internal/mcp/server/server_test.go +++ b/internal/mcp/server/server_test.go @@ -240,8 +240,8 @@ func TestToolsList(t *testing.T) { if err := json.Unmarshal(raw, &result); err != nil { t.Fatalf("unmarshal: %v", err) } - if len(result.Tools) != 11 { - t.Errorf("tool count = %d, want 11", len(result.Tools)) + if len(result.Tools) != 15 { + t.Errorf("tool count = %d, want 15", len(result.Tools)) } names := make(map[string]bool) for _, tool := range result.Tools { @@ -252,6 +252,8 @@ func TestToolsList(t *testing.T) { "ctx_journal_source", "ctx_watch_update", "ctx_compact", "ctx_next", "ctx_check_task_completion", "ctx_session_event", "ctx_remind", + "ctx_steering_get", "ctx_search", + "ctx_session_start", "ctx_session_end", } { if !names[want] { t.Errorf("missing tool: %s", want) @@ -1164,3 +1166,195 @@ func TestResourcePollerNotification(t *testing.T) { srv.poller.Stop() } + +// --- Steering and session hook tool tests --- + +func TestToolSteeringGetWithPrompt(t *testing.T) { + srv, contextDir := newTestServer(t) + + // Create steering directory with test files. + steeringDir := filepath.Join(contextDir, "steering") + if err := os.MkdirAll(steeringDir, 0o755); err != nil { + t.Fatalf("mkdir steering: %v", err) + } + alwaysFile := "---\nname: always-rules\ndescription: Always included\ninclusion: always\npriority: 10\n---\n\nAlways body content.\n" + autoFile := "---\nname: api-rules\ndescription: API design\ninclusion: auto\npriority: 20\n---\n\nAPI body content.\n" + if err := os.WriteFile(filepath.Join(steeringDir, "always-rules.md"), []byte(alwaysFile), 0o644); err != nil { + t.Fatalf("write always: %v", err) + } + if err := os.WriteFile(filepath.Join(steeringDir, "api-rules.md"), []byte(autoFile), 0o644); err != nil { + t.Fatalf("write auto: %v", err) + } + + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_steering_get", + Arguments: map[string]interface{}{"prompt": "API design"}, + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if result.IsError { + t.Fatalf("unexpected tool error: %s", result.Content[0].Text) + } + text := result.Content[0].Text + // Should include both always and auto-matched files. + if !strings.Contains(text, "always-rules") { + t.Errorf("expected always-rules in response, got: %s", text) + } + if !strings.Contains(text, "api-rules") { + t.Errorf("expected api-rules in response, got: %s", text) + } +} + +func TestToolSteeringGetWithoutPrompt(t *testing.T) { + srv, contextDir := newTestServer(t) + + // Create steering directory with test files. + steeringDir := filepath.Join(contextDir, "steering") + if err := os.MkdirAll(steeringDir, 0o755); err != nil { + t.Fatalf("mkdir steering: %v", err) + } + alwaysFile := "---\nname: always-rules\ndescription: Always included\ninclusion: always\npriority: 10\n---\n\nAlways body.\n" + autoFile := "---\nname: api-rules\ndescription: API design\ninclusion: auto\npriority: 20\n---\n\nAPI body.\n" + if err := os.WriteFile(filepath.Join(steeringDir, "always-rules.md"), []byte(alwaysFile), 0o644); err != nil { + t.Fatalf("write always: %v", err) + } + if err := os.WriteFile(filepath.Join(steeringDir, "api-rules.md"), []byte(autoFile), 0o644); err != nil { + t.Fatalf("write auto: %v", err) + } + + // No prompt — should return only always files. + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_steering_get", + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if result.IsError { + t.Fatalf("unexpected tool error: %s", result.Content[0].Text) + } + text := result.Content[0].Text + if !strings.Contains(text, "always-rules") { + t.Errorf("expected always-rules in response, got: %s", text) + } + if strings.Contains(text, "api-rules") { + t.Errorf("auto file should not be included without matching prompt, got: %s", text) + } +} + +func TestToolSessionStartNoHooks(t *testing.T) { + srv, contextDir := newTestServer(t) + + // Create empty hooks directory so discovery succeeds. + hooksDir := filepath.Join(contextDir, "hooks") + if err := os.MkdirAll(filepath.Join(hooksDir, "session-start"), 0o755); err != nil { + t.Fatalf("mkdir hooks: %v", err) + } + + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_session_start", + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if result.IsError { + t.Fatalf("unexpected tool error: %s", result.Content[0].Text) + } + // No hooks exist — should return success message. + text := result.Content[0].Text + if !strings.Contains(text, "Session start hooks executed") && + !strings.Contains(text, "No additional context") { + t.Errorf("expected success message for no hooks, got: %s", text) + } +} + +func TestToolSessionEndWithSummary(t *testing.T) { + srv, contextDir := newTestServer(t) + + // Create empty hooks directory. + hooksDir := filepath.Join(contextDir, "hooks") + if err := os.MkdirAll(filepath.Join(hooksDir, "session-end"), 0o755); err != nil { + t.Fatalf("mkdir hooks: %v", err) + } + + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_session_end", + Arguments: map[string]interface{}{ + "summary": "Completed MCP server implementation", + }, + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if result.IsError { + t.Fatalf("unexpected tool error: %s", result.Content[0].Text) + } + // No hooks exist — should return success. + text := result.Content[0].Text + if !strings.Contains(text, "Session end hooks executed") { + t.Errorf("expected session end success message, got: %s", text) + } +} + +func TestToolSearch(t *testing.T) { + srv, _ := newTestServer(t) + + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_search", + Arguments: map[string]interface{}{"query": "Rule 1"}, + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if result.IsError { + t.Fatalf("unexpected tool error: %s", result.Content[0].Text) + } + text := result.Content[0].Text + if !strings.Contains(text, "CONSTITUTION.md") { + t.Errorf("expected match in CONSTITUTION.md, got: %s", text) + } +} + +func TestToolSearchNoQuery(t *testing.T) { + srv, _ := newTestServer(t) + + resp := request(t, srv, "tools/call", proto.CallToolParams{ + Name: "ctx_search", + }) + if resp.Error != nil { + t.Fatalf("unexpected error: %v", resp.Error.Message) + } + raw, _ := json.Marshal(resp.Result) + var result proto.CallToolResult + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if !result.IsError { + t.Error("expected error when query is missing") + } +} diff --git a/internal/rc/default.go b/internal/rc/default.go index d3fb4dc77..28c40d8f7 100644 --- a/internal/rc/default.go +++ b/internal/rc/default.go @@ -31,3 +31,13 @@ const ( // DefaultStaleAgeDays is the default stale entry age. DefaultStaleAgeDays = runtime.DefaultStaleAgeDays ) + +// Hooks & Steering defaults. +const ( + // DefaultSteeringDir is the default steering directory path. + DefaultSteeringDir = ".context/steering" + // DefaultHooksDir is the default hooks directory path. + DefaultHooksDir = ".context/hooks" + // DefaultHookTimeout is the default per-hook execution timeout in seconds. + DefaultHookTimeout = 10 +) diff --git a/internal/rc/rc.go b/internal/rc/rc.go index 0bf96c6a3..b61972c9f 100644 --- a/internal/rc/rc.go +++ b/internal/rc/rc.go @@ -347,6 +347,78 @@ func CompanionCheck() bool { return *cfg.CompanionCheck } +// Tool returns the configured AI tool identifier (e.g., "claude", "cursor", +// "cline", "kiro", "codex"). +// +// Returns an empty string when no tool is configured in .ctxrc. +// +// Returns: +// - string: The tool identifier, or "" if not set +func Tool() string { + return RC().Tool +} + +// SteeringDir returns the configured steering directory path. +// +// Returns the value from .ctxrc steering.dir, or the default +// ".context/steering" when not configured. +// +// Returns: +// - string: The steering directory path +func SteeringDir() string { + cfg := RC() + if cfg.Steering != nil && cfg.Steering.Dir != "" { + return cfg.Steering.Dir + } + return DefaultSteeringDir +} + +// HooksDir returns the configured hooks directory path. +// +// Returns the value from .ctxrc hooks.dir, or the default +// ".context/hooks" when not configured. +// +// Returns: +// - string: The hooks directory path +func HooksDir() string { + cfg := RC() + if cfg.Hooks != nil && cfg.Hooks.Dir != "" { + return cfg.Hooks.Dir + } + return DefaultHooksDir +} + +// HookTimeout returns the configured per-hook execution timeout in seconds. +// +// Returns the value from .ctxrc hooks.timeout, or the default 10 seconds +// when not configured or set to zero. +// +// Returns: +// - int: Timeout in seconds +func HookTimeout() int { + cfg := RC() + if cfg.Hooks != nil && cfg.Hooks.Timeout > 0 { + return cfg.Hooks.Timeout + } + return DefaultHookTimeout +} + +// HooksEnabled returns whether hook execution is enabled. +// +// Returns true (default) when the hooks section is not configured or +// when the enabled field is not explicitly set. Returns false only when +// hooks.enabled is explicitly set to false in .ctxrc. +// +// Returns: +// - bool: True if hooks are enabled +func HooksEnabled() bool { + cfg := RC() + if cfg.Hooks != nil && cfg.Hooks.Enabled != nil { + return *cfg.Hooks.Enabled + } + return true +} + // AllowOutsideCwd returns whether boundary validation should be skipped. // // Returns false (default) when the field is not set in .ctxrc. diff --git a/internal/rc/rc_test.go b/internal/rc/rc_test.go index 6bbd33c7f..8b615e960 100644 --- a/internal/rc/rc_test.go +++ b/internal/rc/rc_test.go @@ -694,3 +694,169 @@ func TestGetRC_NegativeEnvBudget(t *testing.T) { ) } } + +// --- Hooks & Steering RC field tests --- +// Validates: Requirements 19.8 + +func TestTool_Default(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + Reset() + + // Default is empty string when not configured + tool := Tool() + if tool != "" { + t.Errorf("Tool() = %q, want %q", tool, "") + } +} + +func TestTool_Configured(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + rcContent := `tool: kiro` + _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + + Reset() + + tool := Tool() + if tool != "kiro" { + t.Errorf("Tool() = %q, want %q", tool, "kiro") + } +} + +func TestSteeringDir_Default(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + Reset() + + dir := SteeringDir() + if dir != DefaultSteeringDir { + t.Errorf("SteeringDir() = %q, want %q", dir, DefaultSteeringDir) + } +} + +func TestSteeringDir_Configured(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + rcContent := `steering: + dir: custom/steering +` + _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + + Reset() + + dir := SteeringDir() + if dir != "custom/steering" { + t.Errorf("SteeringDir() = %q, want %q", dir, "custom/steering") + } +} + +func TestHooksDir_Default(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + Reset() + + dir := HooksDir() + if dir != DefaultHooksDir { + t.Errorf("HooksDir() = %q, want %q", dir, DefaultHooksDir) + } +} + +func TestHooksDir_Configured(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + rcContent := `hooks: + dir: custom/hooks +` + _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + + Reset() + + dir := HooksDir() + if dir != "custom/hooks" { + t.Errorf("HooksDir() = %q, want %q", dir, "custom/hooks") + } +} + +func TestHookTimeout_Default(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + Reset() + + timeout := HookTimeout() + if timeout != DefaultHookTimeout { + t.Errorf("HookTimeout() = %d, want %d", timeout, DefaultHookTimeout) + } +} + +func TestHookTimeout_Configured(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + rcContent := `hooks: + timeout: 30 +` + _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + + Reset() + + timeout := HookTimeout() + if timeout != 30 { + t.Errorf("HookTimeout() = %d, want %d", timeout, 30) + } +} + +func TestHooksEnabled_Default(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + Reset() + + // Default (nil Hooks pointer) should return true + if !HooksEnabled() { + t.Error("HooksEnabled() = false, want true (default)") + } +} + +func TestHooksEnabled_ExplicitFalse(t *testing.T) { + tempDir := t.TempDir() + origDir, _ := os.Getwd() + _ = os.Chdir(tempDir) + defer func() { _ = os.Chdir(origDir) }() + + rcContent := `hooks: + enabled: false +` + _ = os.WriteFile(filepath.Join(tempDir, ".ctxrc"), []byte(rcContent), 0600) + + Reset() + + if HooksEnabled() { + t.Error("HooksEnabled() = true, want false") + } +} diff --git a/internal/rc/types.go b/internal/rc/types.go index 7b06a2dce..39b239e39 100644 --- a/internal/rc/types.go +++ b/internal/rc/types.go @@ -54,8 +54,13 @@ import cfgMemory "github.com/ActiveMemory/ctx/internal/config/memory" // when adding tasks (overrides defaults when set) // - SpecNudgeMinLen: Task content length threshold // for spec nudge (default 150) +// - Tool: Active AI tool identifier (e.g., claude, +// cursor, cline, kiro, codex) +// - Steering: Steering layer configuration overrides +// - Hooks: Hook system configuration overrides type CtxRC struct { Profile string `yaml:"profile"` + Tool string `yaml:"tool"` ContextDir string `yaml:"context_dir"` TokenBudget int `yaml:"token_budget"` PriorityOrder []string `yaml:"priority_order"` @@ -81,6 +86,8 @@ type CtxRC struct { SpecSignalWords []string `yaml:"spec_signal_words"` SpecNudgeMinLen int `yaml:"spec_nudge_min_len"` Notify *NotifyConfig `yaml:"notify"` + Steering *SteeringRC `yaml:"steering"` + Hooks *HooksRC `yaml:"hooks"` } // FreshnessFile describes a source file containing technology-dependent @@ -109,3 +116,34 @@ type NotifyConfig struct { Events []string `yaml:"events"` KeyRotationDays int `yaml:"key_rotation_days"` } + +// SteeringRC holds steering layer configuration from .ctxrc. +// +// Fields: +// - Dir: Path override for the steering directory +// (default ".context/steering") +// - DefaultInclusion: Default inclusion mode for new +// steering files (default "manual") +// - DefaultTools: Default tool identifier list for new +// steering files (default: all tools) +type SteeringRC struct { + Dir string `yaml:"dir"` + DefaultInclusion string `yaml:"default_inclusion"` + DefaultTools []string `yaml:"default_tools"` +} + +// HooksRC holds hook system configuration from .ctxrc. +// +// Fields: +// - Dir: Path override for the hooks directory +// (default ".context/hooks") +// - Timeout: Per-hook execution timeout in seconds +// (default 10) +// - Enabled: Whether hook execution is enabled +// (default true). Pointer type distinguishes unset +// (nil → true) from explicitly set to false. +type HooksRC struct { + Dir string `yaml:"dir"` + Timeout int `yaml:"timeout"` + Enabled *bool `yaml:"enabled"` +} diff --git a/internal/skill/copy.go b/internal/skill/copy.go new file mode 100644 index 000000000..80c5e9e14 --- /dev/null +++ b/internal/skill/copy.go @@ -0,0 +1,70 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "io" + "os" + "path/filepath" + + "github.com/ActiveMemory/ctx/internal/config/fs" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// copyDir recursively copies the contents of src into dst. +// Both directories must already exist. +func copyDir(src, dst string) error { + entries, readErr := os.ReadDir(src) + if readErr != nil { + return readErr + } + + for _, entry := range entries { + srcPath := filepath.Join(src, entry.Name()) + dstPath := filepath.Join(dst, entry.Name()) + + if entry.IsDir() { + if mkdirErr := ctxIo.SafeMkdirAll( + dstPath, fs.PermRestrictedDir, + ); mkdirErr != nil { + return mkdirErr + } + if recurseErr := copyDir(srcPath, dstPath); recurseErr != nil { + return recurseErr + } + continue + } + + if cpErr := copyFile(srcPath, dstPath); cpErr != nil { + return cpErr + } + } + return nil +} + +// copyFile copies a single file from src to dst, preserving permissions. +func copyFile(src, dst string) error { + info, statErr := ctxIo.SafeStat(src) + if statErr != nil { + return statErr + } + + in, openErr := ctxIo.SafeOpenUserFile(src) + if openErr != nil { + return openErr + } + defer func() { _ = in.Close() }() + + out, createErr := ctxIo.SafeCreateFile(dst, info.Mode().Perm()) + if createErr != nil { + return createErr + } + defer func() { _ = out.Close() }() + + _, copyErr := io.Copy(out, in) + return copyErr +} diff --git a/internal/skill/doc.go b/internal/skill/doc.go new file mode 100644 index 000000000..94fdf6958 --- /dev/null +++ b/internal/skill/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package skill manages reusable instruction bundles with YAML frontmatter. +// +// Key exports: [Install], [LoadAll], [Load], [Remove]. +// See source files for implementation details. +// Part of the internal subsystem. +package skill diff --git a/internal/skill/install.go b/internal/skill/install.go new file mode 100644 index 000000000..e8a23e06a --- /dev/null +++ b/internal/skill/install.go @@ -0,0 +1,53 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "os" + "path/filepath" + + "github.com/ActiveMemory/ctx/internal/config/fs" + errSkill "github.com/ActiveMemory/ctx/internal/err/skill" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// Install copies a skill from the source directory into skillsDir//. +// The source must contain a valid SKILL.md with parseable YAML frontmatter. +// The skill name is derived from the parsed frontmatter. +func Install(source, skillsDir string) (*Skill, error) { + manifestPath := filepath.Join(source, skillManifest) + + data, readErr := ctxIo.SafeReadUserFile(manifestPath) + if readErr != nil { + return nil, errSkill.NotValidSource(readErr) + } + + sk, parseErr := parseManifest(data, filepath.Base(source), source) + if parseErr != nil { + return nil, errSkill.InvalidManifest(skillManifest, parseErr) + } + + if sk.Name == "" { + return nil, errSkill.MissingName(skillManifest) + } + + destDir := filepath.Join(skillsDir, sk.Name) + if mkdirErr := ctxIo.SafeMkdirAll( + destDir, fs.PermRestrictedDir, + ); mkdirErr != nil { + return nil, errSkill.CreateDest(mkdirErr) + } + + if copyErr := copyDir(source, destDir); copyErr != nil { + // Clean up partial copy on failure. + _ = os.RemoveAll(destDir) + return nil, errSkill.Install(sk.Name, copyErr) + } + + sk.Dir = destDir + return sk, nil +} diff --git a/internal/skill/install_test.go b/internal/skill/install_test.go new file mode 100644 index 000000000..2e177d37e --- /dev/null +++ b/internal/skill/install_test.go @@ -0,0 +1,154 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "os" + "path/filepath" + "testing" +) + +func TestInstall_ValidSkill(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + manifest := `--- +name: test-skill +description: A test skill +--- +# Instructions +Do the thing. +` + if err := os.WriteFile(filepath.Join(source, skillManifest), []byte(manifest), 0o644); err != nil { + t.Fatal(err) + } + // Add an extra file to verify full directory copy. + if err := os.WriteFile(filepath.Join(source, "extra.md"), []byte("extra"), 0o644); err != nil { + t.Fatal(err) + } + + sk, err := Install(source, skillsDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if sk.Name != "test-skill" { + t.Errorf("expected name 'test-skill', got %q", sk.Name) + } + if sk.Description != "A test skill" { + t.Errorf("expected description 'A test skill', got %q", sk.Description) + } + if sk.Dir != filepath.Join(skillsDir, "test-skill") { + t.Errorf("expected dir %q, got %q", filepath.Join(skillsDir, "test-skill"), sk.Dir) + } + + // Verify SKILL.md was copied. + if _, err := os.Stat(filepath.Join(skillsDir, "test-skill", skillManifest)); err != nil { + t.Errorf("SKILL.md not copied: %v", err) + } + // Verify extra file was copied. + if _, err := os.Stat(filepath.Join(skillsDir, "test-skill", "extra.md")); err != nil { + t.Errorf("extra.md not copied: %v", err) + } +} + +func TestInstall_CopiesSubdirectories(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + manifest := `--- +name: nested-skill +--- +Body +` + if err := os.WriteFile(filepath.Join(source, skillManifest), []byte(manifest), 0o644); err != nil { + t.Fatal(err) + } + subDir := filepath.Join(source, "templates") + if err := os.MkdirAll(subDir, 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(subDir, "tmpl.txt"), []byte("template"), 0o644); err != nil { + t.Fatal(err) + } + + sk, err := Install(source, skillsDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + copied := filepath.Join(sk.Dir, "templates", "tmpl.txt") + data, err := os.ReadFile(copied) + if err != nil { + t.Fatalf("subdirectory file not copied: %v", err) + } + if string(data) != "template" { + t.Errorf("expected 'template', got %q", string(data)) + } +} + +func TestInstall_MissingManifest(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + _, err := Install(source, skillsDir) + if err == nil { + t.Fatal("expected error for missing SKILL.md") + } +} + +func TestInstall_InvalidFrontmatter(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + manifest := `--- +name: [broken yaml +--- +Body +` + if err := os.WriteFile(filepath.Join(source, skillManifest), []byte(manifest), 0o644); err != nil { + t.Fatal(err) + } + + _, err := Install(source, skillsDir) + if err == nil { + t.Fatal("expected error for invalid YAML frontmatter") + } +} + +func TestInstall_MissingName(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + manifest := `--- +description: no name field +--- +Body +` + if err := os.WriteFile(filepath.Join(source, skillManifest), []byte(manifest), 0o644); err != nil { + t.Fatal(err) + } + + _, err := Install(source, skillsDir) + if err == nil { + t.Fatal("expected error for missing name field") + } +} + +func TestInstall_NoFrontmatterDelimiters(t *testing.T) { + source := t.TempDir() + skillsDir := t.TempDir() + + if err := os.WriteFile(filepath.Join(source, skillManifest), []byte("just plain text"), 0o644); err != nil { + t.Fatal(err) + } + + _, err := Install(source, skillsDir) + if err == nil { + t.Fatal("expected error for missing frontmatter delimiters") + } +} diff --git a/internal/skill/load.go b/internal/skill/load.go new file mode 100644 index 000000000..79882cd86 --- /dev/null +++ b/internal/skill/load.go @@ -0,0 +1,70 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "errors" + "os" + "path/filepath" + + errSkill "github.com/ActiveMemory/ctx/internal/err/skill" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// skillManifest is the expected filename inside each skill directory. +const skillManifest = "SKILL.md" + +// frontmatterDelimiter is the YAML frontmatter boundary marker. +const frontmatterDelimiter = "---" + +// trimCR is the character set trimmed from the start of +// raw frontmatter content to normalize line endings. +const trimCR = "\n\r" + +// LoadAll reads all installed skills from subdirectories of skillsDir. +// Each subdirectory must contain a SKILL.md file with YAML frontmatter. +// Returns an empty slice without error if the skills directory does not exist. +func LoadAll(skillsDir string) ([]*Skill, error) { + entries, readErr := os.ReadDir(skillsDir) + if readErr != nil { + if errors.Is(readErr, os.ErrNotExist) { + return nil, nil + } + return nil, errSkill.ReadDir(skillsDir, readErr) + } + + var skills []*Skill + for _, entry := range entries { + if !entry.IsDir() { + continue + } + sk, loadErr := Load(skillsDir, entry.Name()) + if loadErr != nil { + return nil, loadErr + } + skills = append(skills, sk) + } + return skills, nil +} + +// Load reads a single skill by name from the given skills directory. +// The name corresponds to a subdirectory containing a SKILL.md file. +func Load(skillsDir, name string) (*Skill, error) { + dir := filepath.Join(skillsDir, name) + manifestPath := filepath.Join(dir, skillManifest) + + data, readErr := ctxIo.SafeReadUserFile(manifestPath) + if readErr != nil { + return nil, errSkill.Load(name, readErr) + } + + sk, parseErr := parseManifest(data, name, dir) + if parseErr != nil { + return nil, parseErr + } + return sk, nil +} diff --git a/internal/skill/load_test.go b/internal/skill/load_test.go new file mode 100644 index 000000000..c923769b2 --- /dev/null +++ b/internal/skill/load_test.go @@ -0,0 +1,187 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "os" + "path/filepath" + "testing" +) + +func writeSkillManifest(t *testing.T, dir, name, content string) { + t.Helper() + skillDir := filepath.Join(dir, name) + if err := os.MkdirAll(skillDir, 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(skillDir, skillManifest), []byte(content), 0o644); err != nil { + t.Fatal(err) + } +} + +func TestLoadAll_NonExistentDir(t *testing.T) { + skills, err := LoadAll("/nonexistent/path") + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if len(skills) != 0 { + t.Fatalf("expected empty slice, got %d skills", len(skills)) + } +} + +func TestLoadAll_EmptyDir(t *testing.T) { + dir := t.TempDir() + skills, err := LoadAll(dir) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if len(skills) != 0 { + t.Fatalf("expected empty slice, got %d skills", len(skills)) + } +} + +func TestLoadAll_MultipleSkills(t *testing.T) { + dir := t.TempDir() + + writeSkillManifest(t, dir, "alpha", `--- +name: alpha +description: Alpha skill +--- +Alpha body content +`) + writeSkillManifest(t, dir, "beta", `--- +name: beta +description: Beta skill +--- +Beta body content +`) + + skills, err := LoadAll(dir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(skills) != 2 { + t.Fatalf("expected 2 skills, got %d", len(skills)) + } + + // os.ReadDir returns entries sorted by name. + if skills[0].Name != "alpha" { + t.Errorf("expected first skill name 'alpha', got %q", skills[0].Name) + } + if skills[1].Name != "beta" { + t.Errorf("expected second skill name 'beta', got %q", skills[1].Name) + } +} + +func TestLoadAll_SkipsNonDirectories(t *testing.T) { + dir := t.TempDir() + + writeSkillManifest(t, dir, "valid", `--- +name: valid +description: A valid skill +--- +Body +`) + // Create a regular file at the top level — should be skipped. + if err := os.WriteFile(filepath.Join(dir, "README.md"), []byte("ignore"), 0o644); err != nil { + t.Fatal(err) + } + + skills, err := LoadAll(dir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(skills) != 1 { + t.Fatalf("expected 1 skill, got %d", len(skills)) + } +} + +func TestLoad_ValidSkill(t *testing.T) { + dir := t.TempDir() + writeSkillManifest(t, dir, "react-patterns", `--- +name: react-patterns +description: React component patterns +--- +# React Patterns +- Use functional components +`) + + sk, err := Load(dir, "react-patterns") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if sk.Name != "react-patterns" { + t.Errorf("expected name 'react-patterns', got %q", sk.Name) + } + if sk.Description != "React component patterns" { + t.Errorf("expected description 'React component patterns', got %q", sk.Description) + } + if sk.Body != "# React Patterns\n- Use functional components\n" { + t.Errorf("unexpected body: %q", sk.Body) + } + expectedDir := filepath.Join(dir, "react-patterns") + if sk.Dir != expectedDir { + t.Errorf("expected dir %q, got %q", expectedDir, sk.Dir) + } +} + +func TestLoad_MissingManifest(t *testing.T) { + dir := t.TempDir() + // Create subdirectory without SKILL.md. + if err := os.MkdirAll(filepath.Join(dir, "empty-skill"), 0o755); err != nil { + t.Fatal(err) + } + + _, err := Load(dir, "empty-skill") + if err == nil { + t.Fatal("expected error for missing SKILL.md") + } +} + +func TestLoad_InvalidFrontmatter(t *testing.T) { + dir := t.TempDir() + writeSkillManifest(t, dir, "bad", `--- +name: [invalid yaml +--- +Body +`) + + _, err := Load(dir, "bad") + if err == nil { + t.Fatal("expected error for invalid YAML") + } +} + +func TestLoad_MissingDelimiters(t *testing.T) { + dir := t.TempDir() + writeSkillManifest(t, dir, "no-fm", `Just plain markdown without frontmatter`) + + _, err := Load(dir, "no-fm") + if err == nil { + t.Fatal("expected error for missing frontmatter delimiters") + } +} + +func TestLoad_EmptyDescription(t *testing.T) { + dir := t.TempDir() + writeSkillManifest(t, dir, "minimal", `--- +name: minimal +--- +Minimal body +`) + + sk, err := Load(dir, "minimal") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if sk.Name != "minimal" { + t.Errorf("expected name 'minimal', got %q", sk.Name) + } + if sk.Description != "" { + t.Errorf("expected empty description, got %q", sk.Description) + } +} diff --git a/internal/skill/manifest.go b/internal/skill/manifest.go new file mode 100644 index 000000000..8c4330ae7 --- /dev/null +++ b/internal/skill/manifest.go @@ -0,0 +1,65 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "strings" + + "gopkg.in/yaml.v3" + + "github.com/ActiveMemory/ctx/internal/config/token" + errSkill "github.com/ActiveMemory/ctx/internal/err/skill" +) + +// parseManifest extracts YAML frontmatter and markdown body from a +// SKILL.md file. The frontmatter is delimited by --- lines. +func parseManifest(data []byte, name, dir string) (*Skill, error) { + raw, body, splitErr := splitFrontmatter(data) + if splitErr != nil { + return nil, errSkill.Load(name, splitErr) + } + + sk := &Skill{} + if unmarshalErr := yaml.Unmarshal(raw, sk); unmarshalErr != nil { + return nil, errSkill.InvalidYAML(name, unmarshalErr) + } + + sk.Body = body + sk.Dir = dir + return sk, nil +} + +// splitFrontmatter separates YAML frontmatter from the markdown body. +// Frontmatter must start with a --- line and end with a second --- line. +func splitFrontmatter( + data []byte, +) (frontmatter []byte, body string, err error) { + content := strings.TrimLeft(string(data), trimCR) + + if !strings.HasPrefix(content, frontmatterDelimiter) { + return nil, "", errSkill.MissingOpeningDelimiter() + } + + // Skip the opening delimiter line. + rest := content[len(frontmatterDelimiter):] + rest = strings.TrimPrefix(rest, token.NewlineLF) + + needle := token.NewlineLF + frontmatterDelimiter + idx := strings.Index(rest, needle) + if idx < 0 { + return nil, "", errSkill.MissingClosingDelimiter() + } + + fm := rest[:idx] + + // Skip past the closing delimiter line. + after := rest[idx+1+len(frontmatterDelimiter):] + // Trim exactly one leading newline from the body if present. + after = strings.TrimPrefix(after, token.NewlineLF) + + return []byte(fm), after, nil +} diff --git a/internal/skill/remove.go b/internal/skill/remove.go new file mode 100644 index 000000000..3856a7ad8 --- /dev/null +++ b/internal/skill/remove.go @@ -0,0 +1,39 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "errors" + "os" + "path/filepath" + + errSkill "github.com/ActiveMemory/ctx/internal/err/skill" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// Remove deletes the skill directory for the given name from skillsDir. +// Returns an error if the skill does not exist. +func Remove(skillsDir, name string) error { + dir := filepath.Join(skillsDir, name) + + info, statErr := ctxIo.SafeStat(dir) + if statErr != nil { + if errors.Is(statErr, os.ErrNotExist) { + return errSkill.NotFound(name) + } + return errSkill.Remove(name, statErr) + } + + if !info.IsDir() { + return errSkill.NotValidDir(name) + } + + if removeErr := os.RemoveAll(dir); removeErr != nil { + return errSkill.Remove(name, removeErr) + } + return nil +} diff --git a/internal/skill/remove_test.go b/internal/skill/remove_test.go new file mode 100644 index 000000000..b0f1c1759 --- /dev/null +++ b/internal/skill/remove_test.go @@ -0,0 +1,54 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "os" + "path/filepath" + "testing" +) + +func TestRemove_ExistingSkill(t *testing.T) { + dir := t.TempDir() + writeSkillManifest(t, dir, "to-remove", `--- +name: to-remove +description: Will be removed +--- +Body +`) + + if err := Remove(dir, "to-remove"); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Verify directory was deleted. + if _, err := os.Stat(filepath.Join(dir, "to-remove")); !os.IsNotExist(err) { + t.Error("expected skill directory to be deleted") + } +} + +func TestRemove_NonExistentSkill(t *testing.T) { + dir := t.TempDir() + + err := Remove(dir, "does-not-exist") + if err == nil { + t.Fatal("expected error for non-existent skill") + } +} + +func TestRemove_NotADirectory(t *testing.T) { + dir := t.TempDir() + // Create a regular file instead of a directory. + if err := os.WriteFile(filepath.Join(dir, "not-a-dir"), []byte("file"), 0o644); err != nil { + t.Fatal(err) + } + + err := Remove(dir, "not-a-dir") + if err == nil { + t.Fatal("expected error when target is not a directory") + } +} diff --git a/internal/skill/testmain_test.go b/internal/skill/testmain_test.go new file mode 100644 index 000000000..53732729e --- /dev/null +++ b/internal/skill/testmain_test.go @@ -0,0 +1,19 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "os" + "testing" + + "github.com/ActiveMemory/ctx/internal/assets/read/lookup" +) + +func TestMain(m *testing.M) { + lookup.Init() + os.Exit(m.Run()) +} diff --git a/internal/skill/types.go b/internal/skill/types.go new file mode 100644 index 000000000..f00ebbf8a --- /dev/null +++ b/internal/skill/types.go @@ -0,0 +1,22 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +// Skill represents a parsed skill manifest (SKILL.md) with YAML +// frontmatter and markdown instruction body. +// +// Fields: +// - Name: Unique identifier from frontmatter +// - Description: Short summary from frontmatter +// - Body: Markdown instruction content after frontmatter +// - Dir: Directory path containing the SKILL.md file +type Skill struct { + Name string `yaml:"name"` + Description string `yaml:"description,omitempty"` + Body string `yaml:"-"` + Dir string `yaml:"-"` +} diff --git a/internal/steering/doc.go b/internal/steering/doc.go new file mode 100644 index 000000000..5bf9d212b --- /dev/null +++ b/internal/steering/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package steering manages behavioral guidance files with YAML frontmatter. +// +// Key exports: [Parse], [Print], [Filter], [LoadAll], [SyncTool], [SyncAll]. +// See source files for implementation details. +// Part of the internal subsystem. +package steering diff --git a/internal/steering/filter.go b/internal/steering/filter.go new file mode 100644 index 000000000..d073ab4bf --- /dev/null +++ b/internal/steering/filter.go @@ -0,0 +1,87 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "os" + "path/filepath" + "sort" + "strings" + + "github.com/ActiveMemory/ctx/internal/config/file" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// LoadAll reads all .md files from the steering directory and parses +// them into SteeringFile values. Returns an error if the directory +// cannot be read or any file fails to parse. +func LoadAll(steeringDir string) ([]*SteeringFile, error) { + entries, readDirErr := os.ReadDir(steeringDir) + if readDirErr != nil { + return nil, errSteering.ReadDir(steeringDir, readDirErr) + } + + var files []*SteeringFile + for _, e := range entries { + if e.IsDir() || !strings.HasSuffix(e.Name(), file.ExtMarkdown) { + continue + } + path := filepath.Join(steeringDir, e.Name()) + data, readErr := ctxIo.SafeReadUserFile(path) + if readErr != nil { + return nil, errSteering.ReadFile(path, readErr) + } + sf, parseErr := Parse(data, path) + if parseErr != nil { + return nil, parseErr + } + files = append(files, sf) + } + return files, nil +} + +// Filter returns steering files applicable for the given context. +// +// Inclusion rules: +// - always: included unconditionally +// - auto: included when prompt contains the file's description +// as a case-insensitive substring +// - manual: included only when the file's name appears in manualNames +// +// When tool is non-empty, files whose Tools list is non-nil and +// non-empty are excluded if the list does not contain the tool. +// When tool is empty, no tool filtering is applied. +// +// Results are sorted by ascending priority, then alphabetically +// by name on tie. +func Filter( + files []*SteeringFile, prompt string, + manualNames []string, tool string, +) []*SteeringFile { + promptLower := strings.ToLower(prompt) + + var result []*SteeringFile + for _, sf := range files { + if !matchInclusion(sf, promptLower, manualNames) { + continue + } + if !matchTool(sf, tool) { + continue + } + result = append(result, sf) + } + + sort.Slice(result, func(i, j int) bool { + if result[i].Priority != result[j].Priority { + return result[i].Priority < result[j].Priority + } + return result[i].Name < result[j].Name + }) + + return result +} diff --git a/internal/steering/filter_test.go b/internal/steering/filter_test.go new file mode 100644 index 000000000..0db3a61e3 --- /dev/null +++ b/internal/steering/filter_test.go @@ -0,0 +1,152 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "testing" +) + +func TestFilter_AlwaysIncludedRegardlessOfPrompt(t *testing.T) { + files := []*SteeringFile{ + {Name: "always-on", Inclusion: InclusionAlways, Priority: 50}, + } + + got := Filter(files, "", nil, "") + if len(got) != 1 || got[0].Name != "always-on" { + t.Errorf("always-inclusion file should be included with empty prompt, got %v", names(got)) + } + + got = Filter(files, "completely unrelated prompt", nil, "") + if len(got) != 1 || got[0].Name != "always-on" { + t.Errorf("always-inclusion file should be included with any prompt, got %v", names(got)) + } +} + +func TestFilter_AutoIncludedWhenPromptMatchesDescription(t *testing.T) { + files := []*SteeringFile{ + {Name: "api-rules", Inclusion: InclusionAuto, Description: "REST API", Priority: 50}, + } + + got := Filter(files, "I need help with REST API design", nil, "") + if len(got) != 1 || got[0].Name != "api-rules" { + t.Errorf("auto file should match when prompt contains description, got %v", names(got)) + } + + // Case-insensitive match. + got = Filter(files, "working on rest api endpoints", nil, "") + if len(got) != 1 { + t.Errorf("auto match should be case-insensitive, got %v", names(got)) + } +} + +func TestFilter_AutoExcludedWhenPromptDoesNotMatch(t *testing.T) { + files := []*SteeringFile{ + {Name: "api-rules", Inclusion: InclusionAuto, Description: "REST API", Priority: 50}, + } + + got := Filter(files, "fix the database migration", nil, "") + if len(got) != 0 { + t.Errorf("auto file should be excluded when prompt doesn't match, got %v", names(got)) + } +} + +func TestFilter_ManualIncludedOnlyWhenNamed(t *testing.T) { + files := []*SteeringFile{ + {Name: "security", Inclusion: InclusionManual, Priority: 50}, + } + + got := Filter(files, "anything", nil, "") + if len(got) != 0 { + t.Errorf("manual file should be excluded without explicit name, got %v", names(got)) + } + + got = Filter(files, "anything", []string{"security"}, "") + if len(got) != 1 || got[0].Name != "security" { + t.Errorf("manual file should be included when named, got %v", names(got)) + } +} + +func TestFilter_PriorityOrdering(t *testing.T) { + files := []*SteeringFile{ + {Name: "low", Inclusion: InclusionAlways, Priority: 90}, + {Name: "high", Inclusion: InclusionAlways, Priority: 10}, + {Name: "mid", Inclusion: InclusionAlways, Priority: 50}, + } + + got := Filter(files, "", nil, "") + if len(got) != 3 { + t.Fatalf("expected 3 files, got %d", len(got)) + } + want := []string{"high", "mid", "low"} + for i, name := range want { + if got[i].Name != name { + t.Errorf("position %d: got %q, want %q", i, got[i].Name, name) + } + } +} + +func TestFilter_AlphabeticalTieBreaking(t *testing.T) { + files := []*SteeringFile{ + {Name: "charlie", Inclusion: InclusionAlways, Priority: 50}, + {Name: "alpha", Inclusion: InclusionAlways, Priority: 50}, + {Name: "bravo", Inclusion: InclusionAlways, Priority: 50}, + } + + got := Filter(files, "", nil, "") + if len(got) != 3 { + t.Fatalf("expected 3 files, got %d", len(got)) + } + want := []string{"alpha", "bravo", "charlie"} + for i, name := range want { + if got[i].Name != name { + t.Errorf("position %d: got %q, want %q", i, got[i].Name, name) + } + } +} + +func TestFilter_ToolFilterExcludesNonMatchingTool(t *testing.T) { + files := []*SteeringFile{ + {Name: "cursor-only", Inclusion: InclusionAlways, Priority: 50, Tools: []string{"claude", "cursor"}}, + } + + got := Filter(files, "", nil, "kiro") + if len(got) != 0 { + t.Errorf("file with tools=[claude,cursor] should be excluded for tool=kiro, got %v", names(got)) + } +} + +func TestFilter_EmptyToolsListIncludedForAnyTool(t *testing.T) { + files := []*SteeringFile{ + {Name: "universal", Inclusion: InclusionAlways, Priority: 50, Tools: nil}, + } + + got := Filter(files, "", nil, "kiro") + if len(got) != 1 || got[0].Name != "universal" { + t.Errorf("file with empty tools should be included for any tool, got %v", names(got)) + } +} + +func TestFilter_EmptyToolParameterSkipsToolFiltering(t *testing.T) { + files := []*SteeringFile{ + {Name: "restricted", Inclusion: InclusionAlways, Priority: 50, Tools: []string{"cursor"}}, + {Name: "universal", Inclusion: InclusionAlways, Priority: 50, Tools: nil}, + } + + got := Filter(files, "", nil, "") + if len(got) != 2 { + t.Errorf("empty tool param should skip tool filtering, got %v", names(got)) + } +} + +// names extracts file names for readable test output. +func names(files []*SteeringFile) []string { + out := make([]string, len(files)) + for i, f := range files { + out[i] = f.Name + } + return out +} diff --git a/internal/steering/format.go b/internal/steering/format.go new file mode 100644 index 000000000..67f9d6c2c --- /dev/null +++ b/internal/steering/format.go @@ -0,0 +1,188 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "bytes" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/ActiveMemory/ctx/internal/config/file" + "github.com/ActiveMemory/ctx/internal/config/fs" + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + "github.com/ActiveMemory/ctx/internal/config/token" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" + ctxIo "github.com/ActiveMemory/ctx/internal/io" +) + +// isSyncableTool returns true if the tool supports native-format sync. +func isSyncableTool(tool string) bool { + for _, t := range syncableTools { + if t == tool { + return true + } + } + return false +} + +// nativePath returns the output file path for a steering file in the +// given tool's native format. +func nativePath( + projectRoot, tool, name string, +) string { + switch tool { + case cfgHook.ToolCursor: + return filepath.Join( + projectRoot, dirCursorDot, + dirRules, name+extMDC, + ) + case cfgHook.ToolCline: + return filepath.Join( + projectRoot, dirClinerules, + name+file.ExtMarkdown, + ) + case cfgHook.ToolKiro: + return filepath.Join( + projectRoot, dirKiroDot, + dirSteering, name+file.ExtMarkdown, + ) + default: + return "" + } +} + +// validateOutputPath checks that the output path resolves within the +// project root boundary. This prevents path traversal via crafted +// steering file names. +func validateOutputPath(outPath, projectRoot string) error { + absOut, absOutErr := filepath.Abs(outPath) + if absOutErr != nil { + return errSteering.ResolveOutput(absOutErr) + } + absRoot, absRootErr := filepath.Abs(projectRoot) + if absRootErr != nil { + return errSteering.ResolveRoot(absRootErr) + } + + rel, relErr := filepath.Rel(absRoot, absOut) + if relErr != nil { + return errSteering.ComputeRelPath(relErr) + } + + // Reject paths that escape the project root. + escape := parentDir + string(filepath.Separator) + if strings.HasPrefix(rel, escape) || rel == parentDir { + return errSteering.OutputEscapesRoot(outPath, projectRoot) + } + + return nil +} + +// formatNative converts a steering file to the tool's native format. +func formatNative(tool string, sf *SteeringFile) []byte { + switch tool { + case cfgHook.ToolCursor: + return formatCursor(sf) + case cfgHook.ToolCline: + return formatCline(sf) + case cfgHook.ToolKiro: + return formatKiro(sf) + default: + return nil + } +} + +// formatCursor produces Cursor-compatible .mdc content with frontmatter. +func formatCursor(sf *SteeringFile) []byte { + fm := cursorFrontmatter{ + Description: sf.Description, + Globs: []any{}, + AlwaysApply: sf.Inclusion == InclusionAlways, + } + + raw, _ := yaml.Marshal(fm) + + var buf bytes.Buffer + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + buf.Write(raw) + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + if sf.Body != "" { + buf.WriteString(sf.Body) + } + return buf.Bytes() +} + +// formatCline produces Cline-compatible plain markdown (no frontmatter). +func formatCline(sf *SteeringFile) []byte { + var buf bytes.Buffer + buf.WriteString(token.HeadingLevelOneStart) + buf.WriteString(sf.Name) + buf.WriteString(doubleNewline) + if sf.Body != "" { + buf.WriteString(sf.Body) + } + return buf.Bytes() +} + +// formatKiro produces Kiro-compatible steering file with frontmatter. +func formatKiro(sf *SteeringFile) []byte { + fm := kiroFrontmatter{ + Name: sf.Name, + Description: sf.Description, + Mode: mapKiroMode(sf.Inclusion), + } + + raw, _ := yaml.Marshal(fm) + + var buf bytes.Buffer + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + buf.Write(raw) + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + if sf.Body != "" { + buf.WriteString(sf.Body) + } + return buf.Bytes() +} + +// mapKiroMode maps ctx inclusion modes to Kiro equivalents. +func mapKiroMode(inc InclusionMode) string { + switch inc { + case InclusionAlways: + return string(InclusionAlways) + case InclusionAuto: + return string(InclusionAuto) + case InclusionManual: + return string(InclusionManual) + default: + return string(InclusionManual) + } +} + +// unchanged returns true if the file at path already exists and has +// the same content as data. +func unchanged(path string, data []byte) bool { + existing, err := ctxIo.SafeReadUserFile(path) + if err != nil { + return false + } + return bytes.Equal(existing, data) +} + +// writeFile creates parent directories as needed and writes data to path. +func writeFile(path string, data []byte) error { + dir := filepath.Dir(path) + if mkdirErr := ctxIo.SafeMkdirAll(dir, fs.PermExec); mkdirErr != nil { + return mkdirErr + } + return ctxIo.SafeWriteFile(path, data, fs.PermFile) +} diff --git a/internal/steering/frontmatter.go b/internal/steering/frontmatter.go new file mode 100644 index 000000000..65b823eaf --- /dev/null +++ b/internal/steering/frontmatter.go @@ -0,0 +1,58 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "strings" + + "github.com/ActiveMemory/ctx/internal/config/token" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" +) + +// splitFrontmatter separates YAML frontmatter from the markdown body. +// Frontmatter must start with a --- line and end with a second --- line. +func splitFrontmatter( + data []byte, +) (frontmatter []byte, body string, err error) { + content := string(data) + content = strings.TrimLeft(content, trimCR) + + if !strings.HasPrefix(content, frontmatterDelimiter) { + return nil, "", errSteering.MissingOpeningDelimiter() + } + + // Skip the opening delimiter line. + rest := content[len(frontmatterDelimiter):] + rest = strings.TrimPrefix(rest, token.NewlineLF) + + needle := token.NewlineLF + frontmatterDelimiter + idx := strings.Index(rest, needle) + if idx < 0 { + return nil, "", errSteering.MissingClosingDelimiter() + } + + fm := rest[:idx] + + // Skip past the closing delimiter line. + after := rest[idx+1+len(frontmatterDelimiter):] + // Trim exactly one leading newline from the body if present. + after = strings.TrimPrefix(after, token.NewlineLF) + + return []byte(fm), after, nil +} + +// applyDefaults sets default values for fields not present in the +// parsed frontmatter. +func applyDefaults(sf *SteeringFile) { + if sf.Inclusion == "" { + sf.Inclusion = defaultInclusion + } + if sf.Priority == 0 { + sf.Priority = defaultPriority + } + // Tools: nil means all tools — no default needed. +} diff --git a/internal/steering/match.go b/internal/steering/match.go new file mode 100644 index 000000000..bb2edebd6 --- /dev/null +++ b/internal/steering/match.go @@ -0,0 +1,46 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "slices" + "strings" +) + +// matchInclusion checks whether a steering file should be included +// based on its inclusion mode. +func matchInclusion( + sf *SteeringFile, promptLower string, + manualNames []string, +) bool { + switch sf.Inclusion { + case InclusionAlways: + return true + case InclusionAuto: + if sf.Description == "" { + return false + } + return strings.Contains(promptLower, strings.ToLower(sf.Description)) + case InclusionManual: + return slices.Contains(manualNames, sf.Name) + default: + return false + } +} + +// matchTool checks whether a steering file applies to the given tool. +// When the file's Tools list is nil or empty, it applies to all tools. +// When tool is empty, no filtering is applied. +func matchTool(sf *SteeringFile, tool string) bool { + if tool == "" { + return true + } + if len(sf.Tools) == 0 { + return true + } + return slices.Contains(sf.Tools, tool) +} diff --git a/internal/steering/parse.go b/internal/steering/parse.go new file mode 100644 index 000000000..cb3ff8214 --- /dev/null +++ b/internal/steering/parse.go @@ -0,0 +1,86 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "bytes" + + "gopkg.in/yaml.v3" + + "github.com/ActiveMemory/ctx/internal/config/token" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" +) + +// frontmatterDelimiter is the YAML frontmatter boundary marker. +const frontmatterDelimiter = "---" + +// defaultInclusion is the default inclusion mode when omitted. +const defaultInclusion = InclusionManual + +// defaultPriority is the default priority when omitted. +const defaultPriority = 50 + +// trimCR is the character set trimmed from the start of +// raw frontmatter content to normalize line endings. +const trimCR = "\n\r" + +// Parse reads a steering file from bytes, extracting YAML frontmatter +// and markdown body. The filePath is stored on the returned SteeringFile +// for error reporting and identification. +// +// Frontmatter must be delimited by --- lines at the top of the file. +// Missing optional fields receive defaults: inclusion → manual, +// tools → nil (all), priority → 50. +// +// Returns an error if frontmatter contains invalid YAML, identifying +// the file path and the parsing failure. +func Parse(data []byte, filePath string) (*SteeringFile, error) { + raw, body, splitErr := splitFrontmatter(data) + if splitErr != nil { + return nil, errSteering.Parse(filePath, splitErr) + } + + sf := &SteeringFile{} + if unmarshalErr := yaml.Unmarshal(raw, sf); unmarshalErr != nil { + return nil, errSteering.InvalidYAML(filePath, unmarshalErr) + } + + applyDefaults(sf) + sf.Body = body + sf.Path = filePath + + return sf, nil +} + +// Print serializes a SteeringFile back to frontmatter + markdown bytes. +// +// The output format is: +// +// --- +// +// --- +// +// +// Round-trip property: Parse(Print(Parse(data))) == Parse(data) for all +// valid inputs. +func Print(sf *SteeringFile) []byte { + var buf bytes.Buffer + + raw, _ := yaml.Marshal(sf) + + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + buf.Write(raw) + buf.WriteString(frontmatterDelimiter) + buf.WriteByte(token.NewlineLF[0]) + + if sf.Body != "" { + buf.WriteString(sf.Body) + } + + return buf.Bytes() +} diff --git a/internal/steering/parse_prop_test.go b/internal/steering/parse_prop_test.go new file mode 100644 index 000000000..6e1c95795 --- /dev/null +++ b/internal/steering/parse_prop_test.go @@ -0,0 +1,196 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "math/rand" + "reflect" + "strings" + "testing" + "testing/quick" +) + +// validSteeringFile is a wrapper around SteeringFile that implements +// quick.Generator to produce valid inputs for property testing. +type validSteeringFile struct { + Name string + Description string + Inclusion InclusionMode + Tools []string + Priority int + Body string +} + +var inclusionModes = []InclusionMode{InclusionAlways, InclusionAuto, InclusionManual} + +var validTools = []string{"claude", "cursor", "cline", "kiro", "codex"} + +// Generate implements quick.Generator for validSteeringFile. +func (validSteeringFile) Generate(rand *rand.Rand, size int) reflect.Value { + v := validSteeringFile{ + Name: randAlphaName(rand, size), + Inclusion: inclusionModes[rand.Intn(len(inclusionModes))], + Priority: rand.Intn(99) + 1, // 1–99, avoids 0 (which triggers default) + } + + // Optional description. + if rand.Intn(2) == 0 { + v.Description = randSafeString(rand, size) + } + + // Optional tools subset. + if rand.Intn(2) == 0 { + n := rand.Intn(len(validTools)) + 1 + perm := rand.Perm(len(validTools)) + v.Tools = make([]string, n) + for i := 0; i < n; i++ { + v.Tools[i] = validTools[perm[i]] + } + } + + // Optional body — must not contain frontmatter delimiter on its own line. + if rand.Intn(2) == 0 { + v.Body = randSafeBody(rand, size) + } + + return reflect.ValueOf(v) +} + +// TestProperty_RoundTripConsistency verifies the round-trip property: +// Parse(Print(Parse(data))) == Parse(data) for all valid inputs. +// +// **Validates: Requirements 1.8, 19.1** +func TestProperty_RoundTripConsistency(t *testing.T) { + const filePath = "test.md" + + f := func(v validSteeringFile) bool { + // Build a SteeringFile from the generated values. + sf := &SteeringFile{ + Name: v.Name, + Description: v.Description, + Inclusion: v.Inclusion, + Tools: v.Tools, + Priority: v.Priority, + Body: v.Body, + } + + // First trip: Print → Parse. + printed := Print(sf) + parsed1, err := Parse(printed, filePath) + if err != nil { + t.Logf("first Parse failed: %v\ninput bytes:\n%s", err, printed) + return false + } + + // Second trip: Print → Parse again. + printed2 := Print(parsed1) + parsed2, err := Parse(printed2, filePath) + if err != nil { + t.Logf("second Parse failed: %v\ninput bytes:\n%s", err, printed2) + return false + } + + // Structural equality (ignoring Path, which is set from filePath arg). + if parsed1.Name != parsed2.Name { + t.Logf("Name mismatch: %q vs %q", parsed1.Name, parsed2.Name) + return false + } + if parsed1.Description != parsed2.Description { + t.Logf("Description mismatch: %q vs %q", parsed1.Description, parsed2.Description) + return false + } + if parsed1.Inclusion != parsed2.Inclusion { + t.Logf("Inclusion mismatch: %q vs %q", parsed1.Inclusion, parsed2.Inclusion) + return false + } + if parsed1.Priority != parsed2.Priority { + t.Logf("Priority mismatch: %d vs %d", parsed1.Priority, parsed2.Priority) + return false + } + if !toolsEqual(parsed1.Tools, parsed2.Tools) { + t.Logf("Tools mismatch: %v vs %v", parsed1.Tools, parsed2.Tools) + return false + } + if parsed1.Body != parsed2.Body { + t.Logf("Body mismatch:\n first: %q\n second: %q", parsed1.Body, parsed2.Body) + return false + } + + return true + } + + cfg := &quick.Config{MaxCount: 200} + if err := quick.Check(f, cfg); err != nil { + t.Errorf("round-trip property failed: %v", err) + } +} + +// --- helpers --- + +const alphaChars = "abcdefghijklmnopqrstuvwxyz" + +// randAlphaName generates a non-empty lowercase alphabetic name. +func randAlphaName(r *rand.Rand, size int) string { + n := r.Intn(max(size, 1)) + 1 + if n > 20 { + n = 20 + } + b := make([]byte, n) + for i := range b { + b[i] = alphaChars[r.Intn(len(alphaChars))] + } + return string(b) +} + +// safeChars are characters that won't break YAML or frontmatter parsing. +const safeChars = "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.,;:!?()-" + +// randSafeString generates a string safe for YAML values (no newlines, no special YAML chars). +func randSafeString(r *rand.Rand, size int) string { + n := r.Intn(max(size, 1)) + 1 + if n > 40 { + n = 40 + } + b := make([]byte, n) + for i := range b { + b[i] = safeChars[r.Intn(len(safeChars))] + } + return string(b) +} + +// randSafeBody generates markdown body content that does not contain +// a frontmatter delimiter (---) on its own line. +func randSafeBody(r *rand.Rand, size int) string { + lines := r.Intn(max(size, 1)) + 1 + if lines > 5 { + lines = 5 + } + var sb strings.Builder + for i := 0; i < lines; i++ { + line := randSafeString(r, size) + // Ensure no line is exactly "---" which would break frontmatter. + if strings.TrimSpace(line) == "---" { + line = "safe content" + } + sb.WriteString(line) + sb.WriteByte('\n') + } + return sb.String() +} + +// toolsEqual compares two string slices for equality. +func toolsEqual(a, b []string) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} diff --git a/internal/steering/parse_test.go b/internal/steering/parse_test.go new file mode 100644 index 000000000..1ef430a95 --- /dev/null +++ b/internal/steering/parse_test.go @@ -0,0 +1,231 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "strings" + "testing" +) + +func TestParse_FullFrontmatter(t *testing.T) { + input := `--- +name: api-standards +description: REST API design conventions +inclusion: auto +tools: + - claude + - cursor +priority: 10 +--- +# API Standards +Use RESTful conventions. +` + sf, err := Parse([]byte(input), "steering/api-standards.md") + if err != nil { + t.Fatalf("Parse() error = %v", err) + } + + if sf.Name != "api-standards" { + t.Errorf("Name = %q, want %q", sf.Name, "api-standards") + } + if sf.Description != "REST API design conventions" { + t.Errorf("Description = %q, want %q", sf.Description, "REST API design conventions") + } + if sf.Inclusion != InclusionAuto { + t.Errorf("Inclusion = %q, want %q", sf.Inclusion, InclusionAuto) + } + if len(sf.Tools) != 2 || sf.Tools[0] != "claude" || sf.Tools[1] != "cursor" { + t.Errorf("Tools = %v, want [claude cursor]", sf.Tools) + } + if sf.Priority != 10 { + t.Errorf("Priority = %d, want %d", sf.Priority, 10) + } + if sf.Path != "steering/api-standards.md" { + t.Errorf("Path = %q, want %q", sf.Path, "steering/api-standards.md") + } + if !strings.Contains(sf.Body, "# API Standards") { + t.Errorf("Body missing expected content, got %q", sf.Body) + } +} + +func TestParse_DefaultValues(t *testing.T) { + input := `--- +name: minimal +--- +Some body content. +` + sf, err := Parse([]byte(input), "test.md") + if err != nil { + t.Fatalf("Parse() error = %v", err) + } + + if sf.Inclusion != InclusionManual { + t.Errorf("Inclusion = %q, want default %q", sf.Inclusion, InclusionManual) + } + if sf.Tools != nil { + t.Errorf("Tools = %v, want nil (all tools)", sf.Tools) + } + if sf.Priority != 50 { + t.Errorf("Priority = %d, want default %d", sf.Priority, 50) + } +} + +func TestParse_EmptyBody(t *testing.T) { + input := `--- +name: empty-body +--- +` + sf, err := Parse([]byte(input), "test.md") + if err != nil { + t.Fatalf("Parse() error = %v", err) + } + + if sf.Body != "" { + t.Errorf("Body = %q, want empty", sf.Body) + } +} + +func TestParse_InvalidYAML(t *testing.T) { + input := `--- +name: [invalid + yaml: {broken +--- +body +` + _, err := Parse([]byte(input), "bad-file.md") + if err == nil { + t.Fatal("Parse() expected error for invalid YAML, got nil") + } + if !strings.Contains(err.Error(), "bad-file.md") { + t.Errorf("error should identify file path, got: %v", err) + } + if !strings.Contains(err.Error(), "invalid YAML frontmatter") { + t.Errorf("error should describe YAML failure, got: %v", err) + } +} + +func TestParse_MissingOpeningDelimiter(t *testing.T) { + input := `name: no-delimiters +--- +body +` + _, err := Parse([]byte(input), "test.md") + if err == nil { + t.Fatal("Parse() expected error for missing opening delimiter") + } + if !strings.Contains(err.Error(), "missing opening frontmatter delimiter") { + t.Errorf("unexpected error: %v", err) + } +} + +func TestParse_MissingClosingDelimiter(t *testing.T) { + input := `--- +name: no-close +` + _, err := Parse([]byte(input), "test.md") + if err == nil { + t.Fatal("Parse() expected error for missing closing delimiter") + } + if !strings.Contains(err.Error(), "missing closing frontmatter delimiter") { + t.Errorf("unexpected error: %v", err) + } +} + +func TestParse_InclusionAlways(t *testing.T) { + input := `--- +name: always-on +inclusion: always +--- +Always included. +` + sf, err := Parse([]byte(input), "test.md") + if err != nil { + t.Fatalf("Parse() error = %v", err) + } + if sf.Inclusion != InclusionAlways { + t.Errorf("Inclusion = %q, want %q", sf.Inclusion, InclusionAlways) + } +} + +func TestPrint_RoundTrip(t *testing.T) { + input := `--- +name: round-trip +description: Test round-trip +inclusion: auto +tools: + - kiro +priority: 25 +--- +# Round Trip +Content here. +` + sf1, err := Parse([]byte(input), "test.md") + if err != nil { + t.Fatalf("first Parse() error = %v", err) + } + + printed := Print(sf1) + + sf2, err := Parse(printed, "test.md") + if err != nil { + t.Fatalf("second Parse() error = %v", err) + } + + if sf1.Name != sf2.Name { + t.Errorf("Name mismatch: %q vs %q", sf1.Name, sf2.Name) + } + if sf1.Description != sf2.Description { + t.Errorf("Description mismatch: %q vs %q", sf1.Description, sf2.Description) + } + if sf1.Inclusion != sf2.Inclusion { + t.Errorf("Inclusion mismatch: %q vs %q", sf1.Inclusion, sf2.Inclusion) + } + if sf1.Priority != sf2.Priority { + t.Errorf("Priority mismatch: %d vs %d", sf1.Priority, sf2.Priority) + } + if len(sf1.Tools) != len(sf2.Tools) { + t.Errorf("Tools length mismatch: %d vs %d", len(sf1.Tools), len(sf2.Tools)) + } + if sf1.Body != sf2.Body { + t.Errorf("Body mismatch:\n got: %q\n want: %q", sf2.Body, sf1.Body) + } +} + +func TestPrint_MinimalFile(t *testing.T) { + sf := &SteeringFile{ + Name: "minimal", + Inclusion: InclusionManual, + Priority: 50, + Body: "Hello.\n", + } + + out := Print(sf) + result := string(out) + + if !strings.HasPrefix(result, "---\n") { + t.Error("Print output should start with ---") + } + if !strings.Contains(result, "name: minimal") { + t.Error("Print output should contain name field") + } + if !strings.HasSuffix(result, "Hello.\n") { + t.Errorf("Print output should end with body, got %q", result) + } +} + +func TestPrint_NilToolsOmitted(t *testing.T) { + sf := &SteeringFile{ + Name: "no-tools", + Inclusion: InclusionManual, + Priority: 50, + } + + out := string(Print(sf)) + if strings.Contains(out, "tools:") { + t.Error("Print should omit tools field when nil") + } +} diff --git a/internal/steering/sync.go b/internal/steering/sync.go new file mode 100644 index 000000000..840581878 --- /dev/null +++ b/internal/steering/sync.go @@ -0,0 +1,162 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "strings" + + cfgHook "github.com/ActiveMemory/ctx/internal/config/hook" + "github.com/ActiveMemory/ctx/internal/config/token" + errSteering "github.com/ActiveMemory/ctx/internal/err/steering" +) + +// Tool-native directory and extension constants. +const ( + // dirCursorDot is the Cursor configuration directory. + dirCursorDot = ".cursor" + // dirRules is the Cursor rules subdirectory. + dirRules = "rules" + // extMDC is the Cursor MDC rule file extension. + extMDC = ".mdc" + // dirClinerules is the Cline rules directory. + dirClinerules = ".clinerules" + // dirKiroDot is the Kiro configuration directory. + dirKiroDot = ".kiro" + // dirSteering is the Kiro steering subdirectory. + dirSteering = "steering" + // parentDir is the relative parent directory component. + parentDir = ".." +) + +// doubleNewline is the separator between a heading +// and body in Cline-formatted steering output. +const doubleNewline = "\n\n" + +// syncableTools lists the tool identifiers that support +// native-format sync. Claude and Codex use ctx agent +// directly and do not need synced files. +var syncableTools = []string{ + cfgHook.ToolCursor, + cfgHook.ToolCline, + cfgHook.ToolKiro, +} + +// SyncTool writes steering files to the tool-native format directory. +// It loads all steering files from steeringDir, filters out files whose +// tools list excludes the target tool, formats each file in the tool's +// native format, and writes it to the appropriate output directory under +// projectRoot. +// +// Files whose content hasn't changed are skipped (idempotent). +// Output paths are validated to resolve within the project root boundary. +// +// Supported tools: cursor, cline, kiro. +func SyncTool( + steeringDir, projectRoot, tool string, +) (SyncReport, error) { + if !isSyncableTool(tool) { + supported := strings.Join( + syncableTools, token.CommaSpace, + ) + return SyncReport{}, errSteering.UnsupportedTool( + tool, supported, + ) + } + + files, loadErr := LoadAll(steeringDir) + if loadErr != nil { + return SyncReport{}, loadErr + } + + var report SyncReport + for _, sf := range files { + if !matchTool(sf, tool) { + report.Skipped = append(report.Skipped, sf.Name) + continue + } + + outPath := nativePath(projectRoot, tool, sf.Name) + + if validateErr := validateOutputPath( + outPath, projectRoot, + ); validateErr != nil { + report.Errors = append( + report.Errors, + errSteering.SyncName(sf.Name, validateErr), + ) + continue + } + + content := formatNative(tool, sf) + + if unchanged(outPath, content) { + report.Skipped = append(report.Skipped, sf.Name) + continue + } + + if writeErr := writeFile(outPath, content); writeErr != nil { + report.Errors = append( + report.Errors, + errSteering.WriteFile(outPath, writeErr), + ) + continue + } + + report.Written = append(report.Written, sf.Name) + } + + return report, nil +} + +// SyncAll syncs steering files to all supported +// tool-native formats. It calls SyncTool for each +// syncable tool and merges the reports. +func SyncAll( + steeringDir, projectRoot string, +) (SyncReport, error) { + var merged SyncReport + for _, tool := range syncableTools { + r, err := SyncTool(steeringDir, projectRoot, tool) + if err != nil { + return merged, errSteering.SyncAll(tool, err) + } + merged.Written = append(merged.Written, r.Written...) + merged.Skipped = append(merged.Skipped, r.Skipped...) + merged.Errors = append(merged.Errors, r.Errors...) + } + return merged, nil +} + +// StaleFiles returns the names of steering files whose synced +// tool-native output differs from what SyncTool would produce. +// This is a read-only check — no files are written. +// +// Returns nil if no stale files are found or if the steering +// directory cannot be read. +func StaleFiles(steeringDir, projectRoot, tool string) []string { + if !isSyncableTool(tool) { + return nil + } + + files, err := LoadAll(steeringDir) + if err != nil { + return nil + } + + var stale []string + for _, sf := range files { + if !matchTool(sf, tool) { + continue + } + outPath := nativePath(projectRoot, tool, sf.Name) + content := formatNative(tool, sf) + if !unchanged(outPath, content) { + stale = append(stale, sf.Name) + } + } + return stale +} diff --git a/internal/steering/sync_prop_test.go b/internal/steering/sync_prop_test.go new file mode 100644 index 000000000..f981810f2 --- /dev/null +++ b/internal/steering/sync_prop_test.go @@ -0,0 +1,138 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "math/rand" + "os" + "path/filepath" + "reflect" + "testing" + "testing/quick" +) + +// syncInput bundles one or more valid steering files for property testing +// the sync idempotence property. +type syncInput struct { + Files []validSteeringFile +} + +// Generate implements quick.Generator for syncInput. +// It produces 1–3 steering files with distinct names. +func (syncInput) Generate(r *rand.Rand, size int) reflect.Value { + n := r.Intn(3) + 1 + seen := make(map[string]bool) + var files []validSteeringFile + for len(files) < n { + v := validSteeringFile{}.Generate(r, size).Interface().(validSteeringFile) + if seen[v.Name] { + continue + } + seen[v.Name] = true + files = append(files, v) + } + return reflect.ValueOf(syncInput{Files: files}) +} + +// TestProperty_SyncIdempotence verifies the sync idempotence property: +// running SyncTool twice produces identical output files and the second +// run reports zero written files (all skipped as unchanged). +// +// **Validates: Requirements 19.4** +func TestProperty_SyncIdempotence(t *testing.T) { + f := func(input syncInput) bool { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + if err := os.MkdirAll(steeringDir, 0o755); err != nil { + t.Logf("mkdir steering dir: %v", err) + return false + } + + // Write generated steering files to disk. + for _, v := range input.Files { + sf := &SteeringFile{ + Name: v.Name, + Description: v.Description, + Inclusion: v.Inclusion, + Tools: v.Tools, + Priority: v.Priority, + Body: v.Body, + } + data := Print(sf) + path := filepath.Join(steeringDir, v.Name+".md") + if err := os.WriteFile(path, data, 0o644); err != nil { + t.Logf("write steering file %s: %v", v.Name, err) + return false + } + } + + tools := []string{"cursor", "cline", "kiro"} + for _, tool := range tools { + // First sync — writes files. + r1, err := SyncTool(steeringDir, root, tool) + if err != nil { + t.Logf("first SyncTool(%s): %v", tool, err) + return false + } + if len(r1.Errors) > 0 { + t.Logf("first SyncTool(%s) errors: %v", tool, r1.Errors) + return false + } + + // Capture output file contents after first sync. + snapshot := make(map[string][]byte) + for _, name := range r1.Written { + outPath := nativePath(root, tool, name) + data, readErr := os.ReadFile(outPath) + if readErr != nil { + t.Logf("read output %s: %v", outPath, readErr) + return false + } + snapshot[name] = data + } + + // Second sync — should skip all files. + r2, err := SyncTool(steeringDir, root, tool) + if err != nil { + t.Logf("second SyncTool(%s): %v", tool, err) + return false + } + if len(r2.Errors) > 0 { + t.Logf("second SyncTool(%s) errors: %v", tool, r2.Errors) + return false + } + + // Verify second run wrote zero files. + if len(r2.Written) != 0 { + t.Logf("tool %s: second sync wrote %d files, expected 0: %v", + tool, len(r2.Written), r2.Written) + return false + } + + // Verify output files are byte-identical after second sync. + for name, before := range snapshot { + outPath := nativePath(root, tool, name) + after, readErr := os.ReadFile(outPath) + if readErr != nil { + t.Logf("re-read output %s: %v", outPath, readErr) + return false + } + if string(before) != string(after) { + t.Logf("tool %s, file %s: content changed between syncs", tool, name) + return false + } + } + } + + return true + } + + cfg := &quick.Config{MaxCount: 100} + if err := quick.Check(f, cfg); err != nil { + t.Errorf("sync idempotence property failed: %v", err) + } +} diff --git a/internal/steering/sync_test.go b/internal/steering/sync_test.go new file mode 100644 index 000000000..048b8ad61 --- /dev/null +++ b/internal/steering/sync_test.go @@ -0,0 +1,332 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "os" + "path/filepath" + "strings" + "testing" +) + +// writeSteering creates a steering file in dir with the given content. +func writeSteering(t *testing.T, dir, name, content string) { + t.Helper() + if err := os.MkdirAll(dir, 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(dir, name+".md"), []byte(content), 0o644); err != nil { + t.Fatal(err) + } +} + +const steeringAlways = `--- +name: api-rules +description: REST API conventions +inclusion: always +priority: 10 +--- +Use RESTful conventions. +` + +const steeringCursorOnly = `--- +name: cursor-only +description: Cursor-specific rules +inclusion: auto +tools: [cursor] +priority: 50 +--- +Cursor body content. +` + +const steeringManual = `--- +name: manual-rule +description: Manual rule +inclusion: manual +priority: 50 +--- +Manual body. +` + +func TestSyncTool_CursorFormat(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + report, err := SyncTool(steeringDir, root, "cursor") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + if len(report.Written) != 1 || report.Written[0] != "api-rules" { + t.Errorf("expected 1 written file, got %v", report.Written) + } + + out := filepath.Join(root, ".cursor", "rules", "api-rules.mdc") + data, err := os.ReadFile(out) + if err != nil { + t.Fatalf("read output: %v", err) + } + content := string(data) + + // Verify Cursor frontmatter. + if !strings.Contains(content, "alwaysApply: true") { + t.Error("cursor output should have alwaysApply: true for always inclusion") + } + if !strings.Contains(content, "description: REST API conventions") { + t.Error("cursor output should contain description") + } + if !strings.Contains(content, "Use RESTful conventions.") { + t.Error("cursor output should contain body") + } +} + +func TestSyncTool_ClineFormat(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + report, err := SyncTool(steeringDir, root, "cline") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + if len(report.Written) != 1 { + t.Errorf("expected 1 written, got %v", report.Written) + } + + out := filepath.Join(root, ".clinerules", "api-rules.md") + data, err := os.ReadFile(out) + if err != nil { + t.Fatalf("read output: %v", err) + } + content := string(data) + + // Cline: plain markdown, no frontmatter. + if strings.Contains(content, "---") { + t.Error("cline output should not contain frontmatter delimiters") + } + if !strings.HasPrefix(content, "# api-rules") { + t.Error("cline output should start with # ") + } + if !strings.Contains(content, "Use RESTful conventions.") { + t.Error("cline output should contain body") + } +} + +func TestSyncTool_KiroFormat(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + report, err := SyncTool(steeringDir, root, "kiro") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + if len(report.Written) != 1 { + t.Errorf("expected 1 written, got %v", report.Written) + } + + out := filepath.Join(root, ".kiro", "steering", "api-rules.md") + data, err := os.ReadFile(out) + if err != nil { + t.Fatalf("read output: %v", err) + } + content := string(data) + + if !strings.Contains(content, "name: api-rules") { + t.Error("kiro output should contain name field") + } + if !strings.Contains(content, "mode: always") { + t.Error("kiro output should map inclusion to mode") + } + if !strings.Contains(content, "Use RESTful conventions.") { + t.Error("kiro output should contain body") + } +} + +func TestSyncTool_SkipsExcludedTool(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "cursor-only", steeringCursorOnly) + + report, err := SyncTool(steeringDir, root, "kiro") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + if len(report.Written) != 0 { + t.Errorf("expected 0 written for excluded tool, got %v", report.Written) + } + if len(report.Skipped) != 1 { + t.Errorf("expected 1 skipped, got %v", report.Skipped) + } +} + +func TestSyncTool_IdempotentSkipsUnchanged(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + // First sync writes the file. + r1, err := SyncTool(steeringDir, root, "cursor") + if err != nil { + t.Fatalf("first sync: %v", err) + } + if len(r1.Written) != 1 { + t.Fatalf("first sync should write 1 file, got %v", r1.Written) + } + + // Second sync should skip (unchanged). + r2, err := SyncTool(steeringDir, root, "cursor") + if err != nil { + t.Fatalf("second sync: %v", err) + } + if len(r2.Written) != 0 { + t.Errorf("second sync should write 0 files (idempotent), got %v", r2.Written) + } + if len(r2.Skipped) != 1 { + t.Errorf("second sync should skip 1 file, got %v", r2.Skipped) + } +} + +func TestSyncTool_UnsupportedToolReturnsError(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + _, err := SyncTool(steeringDir, root, "claude") + if err == nil { + t.Fatal("expected error for unsupported sync tool") + } + if !strings.Contains(err.Error(), "unsupported sync tool") { + t.Errorf("error should mention unsupported tool, got: %v", err) + } +} + +func TestSyncTool_PathTraversalRejected(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + + // Create a steering file whose name field contains deep path + // traversal that escapes the project root. filepath.Join cleans + // the path, so we need enough ".." segments to escape past + // .cursor/rules/ and the project root itself. + traversalContent := `--- +name: ../../../../etc/evil +description: path traversal attempt +inclusion: always +priority: 50 +--- +evil content +` + writeSteering(t, steeringDir, "evil", traversalContent) + + report, err := SyncTool(steeringDir, root, "cursor") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + if len(report.Errors) == 0 { + t.Error("path traversal should produce a boundary validation error") + } + if len(report.Written) != 0 { + t.Errorf("path traversal file should not be written, got %v", report.Written) + } +} + +func TestSyncAll_SyncsToAllTools(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "api-rules", steeringAlways) + + report, err := SyncAll(steeringDir, root) + if err != nil { + t.Fatalf("SyncAll: %v", err) + } + + // Should write to cursor, cline, and kiro. + if len(report.Written) != 3 { + t.Errorf("expected 3 written (one per tool), got %d: %v", len(report.Written), report.Written) + } + + // Verify all output files exist. + paths := []string{ + filepath.Join(root, ".cursor", "rules", "api-rules.mdc"), + filepath.Join(root, ".clinerules", "api-rules.md"), + filepath.Join(root, ".kiro", "steering", "api-rules.md"), + } + for _, p := range paths { + if _, err := os.Stat(p); os.IsNotExist(err) { + t.Errorf("expected output file to exist: %s", p) + } + } +} + +func TestSyncAll_SkipsToolExcludedFiles(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "cursor-only", steeringCursorOnly) + + report, err := SyncAll(steeringDir, root) + if err != nil { + t.Fatalf("SyncAll: %v", err) + } + + // Only cursor should get the file; cline and kiro should skip. + if len(report.Written) != 1 { + t.Errorf("expected 1 written (cursor only), got %d: %v", len(report.Written), report.Written) + } + if len(report.Skipped) != 2 { + t.Errorf("expected 2 skipped (cline + kiro), got %d: %v", len(report.Skipped), report.Skipped) + } +} + +func TestSyncTool_CursorAlwaysApplyFalseForNonAlways(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + writeSteering(t, steeringDir, "manual-rule", steeringManual) + + _, err := SyncTool(steeringDir, root, "cursor") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + + out := filepath.Join(root, ".cursor", "rules", "manual-rule.mdc") + data, err := os.ReadFile(out) + if err != nil { + t.Fatalf("read output: %v", err) + } + if !strings.Contains(string(data), "alwaysApply: false") { + t.Error("cursor output should have alwaysApply: false for manual inclusion") + } +} + +func TestSyncTool_KiroModeMapping(t *testing.T) { + root := t.TempDir() + steeringDir := filepath.Join(root, ".context", "steering") + + autoContent := `--- +name: auto-rule +description: Auto rule +inclusion: auto +priority: 50 +--- +Auto body. +` + writeSteering(t, steeringDir, "auto-rule", autoContent) + + _, err := SyncTool(steeringDir, root, "kiro") + if err != nil { + t.Fatalf("SyncTool: %v", err) + } + + out := filepath.Join(root, ".kiro", "steering", "auto-rule.md") + data, err := os.ReadFile(out) + if err != nil { + t.Fatalf("read output: %v", err) + } + if !strings.Contains(string(data), "mode: auto") { + t.Error("kiro output should map auto inclusion to mode: auto") + } +} diff --git a/internal/steering/testmain_test.go b/internal/steering/testmain_test.go new file mode 100644 index 000000000..16d15eab7 --- /dev/null +++ b/internal/steering/testmain_test.go @@ -0,0 +1,19 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "os" + "testing" + + "github.com/ActiveMemory/ctx/internal/assets/read/lookup" +) + +func TestMain(m *testing.M) { + lookup.Init() + os.Exit(m.Run()) +} diff --git a/internal/steering/types.go b/internal/steering/types.go new file mode 100644 index 000000000..1012ceacd --- /dev/null +++ b/internal/steering/types.go @@ -0,0 +1,108 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +// InclusionMode determines when a steering file is injected into +// an AI prompt. +type InclusionMode string + +const ( + // InclusionAlways includes the steering file in every context packet. + InclusionAlways InclusionMode = "always" + // InclusionAuto includes the steering file when the prompt matches + // the file's description. + InclusionAuto InclusionMode = "auto" + // InclusionManual includes the steering file only when explicitly + // referenced by name. + InclusionManual InclusionMode = "manual" +) + +// SteeringFile represents a parsed steering file with YAML frontmatter +// and markdown body content. +// +// Fields: +// - Name: Unique identifier from frontmatter +// - Description: Used for auto inclusion matching +// - Inclusion: Determines when the file is injected (default: manual) +// - Tools: Tool identifiers this file applies to (nil means all tools) +// - Priority: Injection order; lower values are injected first (default: 50) +// - Body: Markdown content after frontmatter +// - Path: Filesystem path to the steering file +type SteeringFile struct { + Name string `yaml:"name"` + Description string `yaml:"description,omitempty"` + Inclusion InclusionMode `yaml:"inclusion"` + Tools []string `yaml:"tools,omitempty"` + Priority int `yaml:"priority"` + Body string `yaml:"-"` + Path string `yaml:"-"` +} + +// SyncReport summarizes the result of syncing steering files to +// tool-native formats. +// +// Fields: +// - Written: Files that were written or updated +// - Skipped: Files that were skipped (unchanged or excluded) +// - Errors: Errors encountered during sync +type SyncReport struct { + Written []string + Skipped []string + Errors []error +} + +// cursorFrontmatter is the YAML frontmatter for Cursor rule files. +type cursorFrontmatter struct { + Description string `yaml:"description"` + Globs []any `yaml:"globs"` + AlwaysApply bool `yaml:"alwaysApply"` +} + +// kiroFrontmatter is the YAML frontmatter for Kiro steering files. +type kiroFrontmatter struct { + Name string `yaml:"name"` + Description string `yaml:"description,omitempty"` + Mode string `yaml:"mode"` +} + +// FoundationFile describes a foundation steering file to generate. +type FoundationFile struct { + Name string + Description string + Body string +} + +// FoundationFiles defines the set of files created by ctx steering init. +var FoundationFiles = []FoundationFile{ + { + Name: "product", + Description: "Product context, goals, and target users", + Body: "# Product Context\n\n" + + "Describe the product, its goals, and target users.\n", + }, + { + Name: "tech", + Description: "Technology stack, constraints, and dependencies", + Body: "# Technology Stack\n\n" + + "Describe the technology stack, " + + "constraints, and key dependencies.\n", + }, + { + Name: "structure", + Description: "Project structure and directory conventions", + Body: "# Project Structure\n\n" + + "Describe the project layout " + + "and directory conventions.\n", + }, + { + Name: "workflow", + Description: "Development workflow and process rules", + Body: "# Development Workflow\n\n" + + "Describe the development workflow, " + + "branching strategy, and process rules.\n", + }, +} diff --git a/internal/sysinfo/load_darwin.go b/internal/sysinfo/load_darwin.go index 8a3385671..1a724445b 100644 --- a/internal/sysinfo/load_darwin.go +++ b/internal/sysinfo/load_darwin.go @@ -16,6 +16,13 @@ import ( execSysinfo "github.com/ActiveMemory/ctx/internal/exec/sysinfo" ) +// sysctl key for load averages on macOS. +const keyVMLoadAvg = "vm.loadavg" + +// loadAvgBraces is the brace wrapper trimmed from +// sysctl vm.loadavg output (e.g. "{ 0.52 0.41 0.38 }"). +const loadAvgBraces = "{ }" + // collectLoad queries system load averages on macOS via sysctl. // // Parses the output of `sysctl -n vm.loadavg` (format: "{ 0.52 0.41 0.38 }") @@ -25,12 +32,12 @@ import ( // Returns: // - LoadInfo: System load averages and CPU count func collectLoad() LoadInfo { - out, cmdErr := execSysinfo.Sysctl("-n", "vm.loadavg") + out, cmdErr := execSysinfo.Sysctl(flagNoNewline, keyVMLoadAvg) if cmdErr != nil { return LoadInfo{Supported: false} } // Output: "{ 0.52 0.41 0.38 }" - s := strings.Trim(strings.TrimSpace(string(out)), "{ }") + s := strings.Trim(strings.TrimSpace(string(out)), loadAvgBraces) var load1, load5, load15 float64 _, scanErr := fmt.Sscanf( s, "%f %f %f", &load1, &load5, &load15, diff --git a/internal/sysinfo/memory_darwin.go b/internal/sysinfo/memory_darwin.go index 8dbed618a..a143be672 100644 --- a/internal/sysinfo/memory_darwin.go +++ b/internal/sysinfo/memory_darwin.go @@ -16,6 +16,45 @@ import ( execSysinfo "github.com/ActiveMemory/ctx/internal/exec/sysinfo" ) +// sysctl key and flag constants for macOS memory queries. +const ( + // flagNoNewline suppresses the key name in sysctl output. + flagNoNewline = "-n" + // keyHWMemsize is the sysctl key for total physical memory. + keyHWMemsize = "hw.memsize" + // keyVMSwapUsage is the sysctl key for swap usage. + keyVMSwapUsage = "vm.swapusage" +) + +// vm_stat output parsing constants. +const ( + // markerPageSize is the sentinel substring in vm_stat + // output that precedes the page size value. + markerPageSize = "page size of" + // labelPagesFree is the vm_stat line label for free pages. + labelPagesFree = "Pages free" + // labelPagesInactive is the vm_stat line label for + // inactive pages. + labelPagesInactive = "Pages inactive" +) + +// Swap usage parsing constants. +const ( + // suffixMB is the megabyte suffix in sysctl swap output. + suffixMB = "M" + // labelTotal is the swap usage field name for total swap. + labelTotal = "total" + // labelUsed is the swap usage field name for used swap. + labelUsed = "used" +) + +// defaultPageSize is the default memory page size on Apple +// Silicon (bytes). +const defaultPageSize = 16384 + +// bytesPerKB is the number of bytes in a kilobyte. +const bytesPerKB = 1024 + // collectMemory queries physical and swap memory usage on macOS. // // Uses `sysctl -n hw.memsize` for total RAM, `vm_stat` for page-level @@ -26,7 +65,7 @@ import ( // - MemInfo: Physical and swap memory statistics func collectMemory() MemInfo { // Total physical memory - out, memErr := execSysinfo.Sysctl("-n", "hw.memsize") + out, memErr := execSysinfo.Sysctl(flagNoNewline, keyHWMemsize) if memErr != nil { return MemInfo{Supported: false} } @@ -46,7 +85,7 @@ func collectMemory() MemInfo { // Swap via sysctl var swapTotal, swapUsed uint64 - out, swapErr := execSysinfo.Sysctl("-n", "vm.swapusage") + out, swapErr := execSysinfo.Sysctl(flagNoNewline, keyVMSwapUsage) if swapErr == nil { swapTotal, swapUsed = parseSwapUsage(string(out)) } @@ -73,11 +112,11 @@ func collectMemory() MemInfo { // Returns: // - uint64: Estimated used memory in bytes func parseVMStat(output string, totalBytes uint64) uint64 { - var pageSize uint64 = 16384 // default on Apple Silicon + var pageSize uint64 = defaultPageSize pages := make(map[string]uint64) for _, line := range strings.Split(output, token.NewlineLF) { - if strings.Contains(line, "page size of") { + if strings.Contains(line, markerPageSize) { for _, word := range strings.Fields(line) { n, parseErr := strconv.ParseUint(word, 10, 64) if parseErr == nil && n > 0 { @@ -87,18 +126,21 @@ func parseVMStat(output string, totalBytes uint64) uint64 { } continue } - parts := strings.SplitN(line, ":", 2) + parts := strings.SplitN(line, token.Colon, 2) if len(parts) != 2 { continue } key := strings.TrimSpace(parts[0]) - val := strings.TrimSpace(strings.TrimSuffix(strings.TrimSpace(parts[1]), ".")) + raw := strings.TrimSpace(parts[1]) + val := strings.TrimSpace( + strings.TrimSuffix(raw, token.Dot), + ) if n, parseErr := strconv.ParseUint(val, 10, 64); parseErr == nil { pages[key] = n } } - freeBytes := (pages["Pages free"] + pages["Pages inactive"]) * pageSize + freeBytes := (pages[labelPagesFree] + pages[labelPagesInactive]) * pageSize if freeBytes >= totalBytes { return 0 } @@ -121,21 +163,21 @@ func parseVMStat(output string, totalBytes uint64) uint64 { // - used: Used swap space in bytes func parseSwapUsage(output string) (total, used uint64) { parseMB := func(s string) uint64 { - s = strings.TrimSuffix(strings.TrimSpace(s), "M") + s = strings.TrimSuffix(strings.TrimSpace(s), suffixMB) f, parseErr := strconv.ParseFloat(s, 64) if parseErr != nil { return 0 } - return uint64(f * 1024 * 1024) + return uint64(f * bytesPerKB * bytesPerKB) } fields := strings.Fields(output) for i, f := range fields { - if f == "=" && i > 0 && i+1 < len(fields) { + if f == token.KeyValueSep && i > 0 && i+1 < len(fields) { switch fields[i-1] { - case "total": + case labelTotal: total = parseMB(fields[i+1]) - case "used": + case labelUsed: used = parseMB(fields[i+1]) } } diff --git a/internal/trigger/discover.go b/internal/trigger/discover.go new file mode 100644 index 000000000..a210c4e01 --- /dev/null +++ b/internal/trigger/discover.go @@ -0,0 +1,148 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "path/filepath" + "sort" + + "github.com/ActiveMemory/ctx/internal/config/fs" + "github.com/ActiveMemory/ctx/internal/config/warn" + ctxIo "github.com/ActiveMemory/ctx/internal/io" + ctxLog "github.com/ActiveMemory/ctx/internal/log/warn" +) + +// Discover finds all hook scripts in the hooks directory, grouped +// by type. It iterates over each valid hook type subdirectory, +// validates each file via [ValidatePath], and skips invalid +// entries with a logged warning. Hooks within each type are sorted +// alphabetically by filename. +// +// Returns an empty map without error if hooksDir does not exist. +// +// Parameters: +// - hooksDir: root hooks directory (e.g. .context/hooks) +// +// Returns: +// - map[HookType][]HookInfo: discovered hooks grouped by type +// - error: non-nil only on unexpected I/O failures +func Discover(hooksDir string) (map[HookType][]HookInfo, error) { + result := make(map[HookType][]HookInfo) + + if _, statErr := ctxIo.SafeStat(hooksDir); os.IsNotExist(statErr) { + return result, nil + } + + for _, ht := range ValidTypes() { + typeDir := filepath.Join(hooksDir, string(ht)) + + entries, readErr := os.ReadDir(typeDir) + if readErr != nil { + if os.IsNotExist(readErr) { + continue + } + return nil, readErr + } + + var hooks []HookInfo + for _, e := range entries { + if e.IsDir() { + continue + } + + path := filepath.Join(typeDir, e.Name()) + + validateErr := ValidatePath(hooksDir, path) + if validateErr != nil { + ctxLog.Warn("hook skip %s: %v", path, validateErr) + continue + } + + info, infoErr := e.Info() + if infoErr != nil { + ctxLog.Warn(warn.Readdir, typeDir, infoErr) + continue + } + + hooks = append(hooks, HookInfo{ + Name: stripExt(e.Name()), + Type: ht, + Path: path, + Enabled: info.Mode().Perm()&fs.ExecBitMask != 0, + }) + } + + sort.Slice(hooks, func(i, j int) bool { + return hooks[i].Name < hooks[j].Name + }) + + if len(hooks) > 0 { + result[ht] = hooks + } + } + + return result, nil +} + +// FindByName searches all hook type directories for a hook whose +// filename (without extension) matches name. Returns nil without +// error if no match is found. +// +// Unlike Discover, this function includes hooks regardless of their +// executable permission bit, so it can locate disabled hooks for +// enable/disable operations. +// +// Parameters: +// - hooksDir: root hooks directory +// - name: hook name to search for (without extension) +// +// Returns: +// - *HookInfo: matched hook, or nil if not found +// - error: non-nil only on unexpected I/O failures +func FindByName(hooksDir, name string) (*HookInfo, error) { + if _, statErr := ctxIo.SafeStat(hooksDir); os.IsNotExist(statErr) { + return nil, nil + } + + for _, ht := range ValidTypes() { + typeDir := filepath.Join(hooksDir, string(ht)) + + entries, readErr := os.ReadDir(typeDir) + if readErr != nil { + if os.IsNotExist(readErr) { + continue + } + return nil, readErr + } + + for _, e := range entries { + if e.IsDir() { + continue + } + if stripExt(e.Name()) == name { + path := filepath.Join(typeDir, e.Name()) + fi, lstatErr := os.Lstat(path) + if lstatErr != nil { + continue + } + // Skip symlinks for security. + if fi.Mode()&os.ModeSymlink != 0 { + continue + } + return &HookInfo{ + Name: name, + Type: ht, + Path: path, + Enabled: fi.Mode().Perm()&fs.ExecBitMask != 0, + }, nil + } + } + } + + return nil, nil +} diff --git a/internal/trigger/discover_test.go b/internal/trigger/discover_test.go new file mode 100644 index 000000000..f11de00c6 --- /dev/null +++ b/internal/trigger/discover_test.go @@ -0,0 +1,235 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "path/filepath" + "runtime" + "testing" +) + +// TestDiscover_ValidExecutableScripts verifies that Discover returns +// executable scripts grouped by hook type. +// Validates: Requirements 6.1 +func TestDiscover_ValidExecutableScripts(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // Create two hook type directories with executable scripts. + for _, ht := range []string{"pre-tool-use", "session-start"} { + typeDir := filepath.Join(hooksDir, ht) + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(typeDir, "check.sh"), []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + } + + result, err := Discover(hooksDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if len(result[PreToolUse]) != 1 { + t.Fatalf("expected 1 pre-tool-use hook, got %d", len(result[PreToolUse])) + } + if result[PreToolUse][0].Name != "check" { + t.Errorf("expected name %q, got %q", "check", result[PreToolUse][0].Name) + } + if !result[PreToolUse][0].Enabled { + t.Error("expected hook to be enabled") + } + + if len(result[SessionStart]) != 1 { + t.Fatalf("expected 1 session-start hook, got %d", len(result[SessionStart])) + } +} + +// TestDiscover_SkipsNonExecutable verifies that Discover includes +// non-executable scripts in results but marks them as disabled. +// Note: Discover calls ValidateHookPath which rejects non-executable +// files, so they are skipped entirely. +// Validates: Requirements 6.2 +func TestDiscover_SkipsNonExecutable(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "pre-tool-use") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + // Create one executable and one non-executable script. + if err := os.WriteFile(filepath.Join(typeDir, "enabled.sh"), []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(typeDir, "disabled.sh"), []byte("#!/bin/sh\n"), 0o644); err != nil { + t.Fatal(err) + } + + result, err := Discover(hooksDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + hooks := result[PreToolUse] + if len(hooks) != 1 { + t.Fatalf("expected 1 hook (non-executable skipped by validation), got %d", len(hooks)) + } + if hooks[0].Name != "enabled" { + t.Errorf("expected surviving hook name %q, got %q", "enabled", hooks[0].Name) + } +} + +// TestDiscover_SkipsSymlinks verifies that Discover skips symlinked scripts. +// Validates: Requirements 6.1, 15.1 +func TestDiscover_SkipsSymlinks(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("symlinks require elevated privileges on Windows") + } + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "post-tool-use") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + // Create a real executable script. + real := filepath.Join(dir, "real.sh") + if err := os.WriteFile(real, []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + // Create a symlink inside the hook type directory. + link := filepath.Join(typeDir, "link.sh") + if err := os.Symlink(real, link); err != nil { + t.Fatal(err) + } + + // Also create a valid executable script to confirm it's still found. + if err := os.WriteFile(filepath.Join(typeDir, "valid.sh"), []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + result, err := Discover(hooksDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + hooks := result[PostToolUse] + if len(hooks) != 1 { + t.Fatalf("expected 1 hook (symlink skipped), got %d", len(hooks)) + } + if hooks[0].Name != "valid" { + t.Errorf("expected hook name %q, got %q", "valid", hooks[0].Name) + } +} + +// TestDiscover_MissingHooksDir verifies that Discover returns an empty +// map without error when the hooks directory does not exist. +// Validates: Requirements 6.4 +func TestDiscover_MissingHooksDir(t *testing.T) { + result, err := Discover(filepath.Join(t.TempDir(), "nonexistent")) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(result) != 0 { + t.Fatalf("expected empty map, got %d entries", len(result)) + } +} + +// TestDiscover_AlphabeticalOrder verifies that hooks within each type +// are sorted alphabetically by name. +// Validates: Requirements 6.3 +func TestDiscover_AlphabeticalOrder(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "file-save") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + // Create scripts in non-alphabetical order. + for _, name := range []string{"charlie.sh", "alpha.sh", "bravo.sh"} { + if err := os.WriteFile(filepath.Join(typeDir, name), []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + } + + result, err := Discover(hooksDir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + hooks := result[FileSave] + if len(hooks) != 3 { + t.Fatalf("expected 3 hooks, got %d", len(hooks)) + } + + expected := []string{"alpha", "bravo", "charlie"} + for i, want := range expected { + if hooks[i].Name != want { + t.Errorf("hooks[%d].Name = %q, want %q", i, hooks[i].Name, want) + } + } +} + +// TestFindByName_Found verifies that FindByName locates a hook by name +// across type directories. +// Validates: Requirements 6.1 +func TestFindByName_Found(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "context-add") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + if err := os.WriteFile(filepath.Join(typeDir, "notify.sh"), []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + info, err := FindByName(hooksDir, "notify") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if info == nil { + t.Fatal("expected hook info, got nil") + } + if info.Name != "notify" { + t.Errorf("expected name %q, got %q", "notify", info.Name) + } + if info.Type != ContextAdd { + t.Errorf("expected type %q, got %q", ContextAdd, info.Type) + } +} + +// TestFindByName_NotFound verifies that FindByName returns nil when +// no hook matches the given name. +// Validates: Requirements 6.1 +func TestFindByName_NotFound(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + if err := os.MkdirAll(hooksDir, 0o755); err != nil { + t.Fatal(err) + } + + info, err := FindByName(hooksDir, "nonexistent") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if info != nil { + t.Fatalf("expected nil, got %+v", info) + } +} diff --git a/internal/trigger/doc.go b/internal/trigger/doc.go new file mode 100644 index 000000000..0b5c7b621 --- /dev/null +++ b/internal/trigger/doc.go @@ -0,0 +1,13 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trigger manages lifecycle automation scripts for AI tool events. +// +// Key exports: [Discover], [FindByName], [RunAll], +// [ValidatePath], [ValidTypes]. +// See source files for implementation details. +// Part of the internal subsystem. +package trigger diff --git a/internal/trigger/exec.go b/internal/trigger/exec.go new file mode 100644 index 000000000..c1b66b176 --- /dev/null +++ b/internal/trigger/exec.go @@ -0,0 +1,54 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "bytes" + "context" + "encoding/json" + "strings" + "time" + + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + execTrigger "github.com/ActiveMemory/ctx/internal/exec/trigger" +) + +// runOne executes a single hook script, enforcing the given timeout. +// It writes inputJSON to the script's stdin and reads HookOutput JSON +// from stdout. Returns an error for non-zero exit, timeout, or +// invalid JSON output. +func runOne( + h HookInfo, inputJSON []byte, timeout time.Duration, +) (*HookOutput, error) { + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + + cmd := execTrigger.CommandContext(ctx, h.Path) + cmd.Stdin = bytes.NewReader(inputJSON) + + var stdout bytes.Buffer + cmd.Stdout = &stdout + + if runErr := cmd.Run(); runErr != nil { + if ctx.Err() == context.DeadlineExceeded { + return nil, errTrigger.Timeout(timeout) + } + return nil, errTrigger.Exit(runErr) + } + + raw := strings.TrimSpace(stdout.String()) + if raw == "" { + return &HookOutput{}, nil + } + + var out HookOutput + if jsonErr := json.Unmarshal([]byte(raw), &out); jsonErr != nil { + return nil, errTrigger.InvalidJSONOutput(jsonErr) + } + + return &out, nil +} diff --git a/internal/trigger/helpers.go b/internal/trigger/helpers.go new file mode 100644 index 000000000..2c33b366b --- /dev/null +++ b/internal/trigger/helpers.go @@ -0,0 +1,18 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "path/filepath" + "strings" +) + +// stripExt removes the file extension from a filename. +func stripExt(filename string) string { + ext := filepath.Ext(filename) + return strings.TrimSuffix(filename, ext) +} diff --git a/internal/trigger/runner.go b/internal/trigger/runner.go new file mode 100644 index 000000000..6722a7e1b --- /dev/null +++ b/internal/trigger/runner.go @@ -0,0 +1,99 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/ActiveMemory/ctx/internal/config/token" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" + ctxLog "github.com/ActiveMemory/ctx/internal/log/warn" +) + +// RunAll executes all enabled hooks for the given type in alphabetical +// order. It passes input as JSON via stdin and reads HookOutput as JSON +// from stdout. +// +// Behaviour per hook: +// - cancel:true in output → halt, set Cancelled/Message, return +// - non-empty context → append to AggregatedOutput.Context +// - non-zero exit → log error, record in Errors, continue +// - invalid JSON stdout → log warning, record in Errors, continue +// - timeout exceeded → kill process, log warning, continue +// +// Returns an empty AggregatedOutput (not nil) when no hooks exist. +// +// Parameters: +// - hooksDir: root hooks directory (e.g. .context/hooks) +// - hookType: lifecycle event category +// - input: JSON object sent to each hook via stdin +// - timeout: per-hook execution timeout; zero uses DefaultTimeout +// +// Returns: +// - *AggregatedOutput: aggregated results from all hooks +// - error: non-nil only on discovery failure +func RunAll( + hooksDir string, + hookType HookType, + input *HookInput, + timeout time.Duration, +) (*AggregatedOutput, error) { + if timeout <= 0 { + timeout = DefaultTimeout + } + + agg := &AggregatedOutput{} + + all, discoverErr := Discover(hooksDir) + if discoverErr != nil { + return nil, errTrigger.DiscoverFailed(discoverErr) + } + + hooks := all[hookType] + if len(hooks) == 0 { + return agg, nil + } + + inputJSON, marshalErr := json.Marshal(input) + if marshalErr != nil { + return nil, errTrigger.MarshalInput(marshalErr) + } + + for _, h := range hooks { + if !h.Enabled { + continue + } + + out, runErr := runOne(h, inputJSON, timeout) + if runErr != nil { + ctxLog.Warn("hook %s: %v", h.Path, runErr) + agg.Errors = append(agg.Errors, fmt.Sprintf("%s: %s", h.Path, runErr)) + continue + } + + if out.Cancel { + agg.Cancelled = true + agg.Message = out.Message + return agg, nil + } + + if out.Context != "" { + if agg.Context != "" { + agg.Context += token.NewlineLF + } + agg.Context += out.Context + } + } + + return agg, nil +} + +// DefaultTimeout is the per-hook execution timeout when none is +// specified by the caller. +const DefaultTimeout = 10 * time.Second diff --git a/internal/trigger/runner_test.go b/internal/trigger/runner_test.go new file mode 100644 index 000000000..918058656 --- /dev/null +++ b/internal/trigger/runner_test.go @@ -0,0 +1,319 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "path/filepath" + "runtime" + "strings" + "testing" + "time" +) + +// writeHookScript creates an executable shell script in the given hook +// type subdirectory. It creates the directory structure if needed. +func writeHookScript(t *testing.T, hooksDir, hookType, name, body string) { + t.Helper() + typeDir := filepath.Join(hooksDir, hookType) + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + script := filepath.Join(typeDir, name) + if err := os.WriteFile(script, []byte(body), 0o755); err != nil { + t.Fatal(err) + } +} + +// skipIfWindows skips the test on Windows where shell scripts cannot run. +func skipIfWindows(t *testing.T) { + t.Helper() + if runtime.GOOS == "windows" { + t.Skip("shell script execution not supported on Windows") + } +} + +// TestRunAll_CancelPropagation verifies that when a hook returns +// cancel:true, subsequent hooks are not executed and the aggregated +// output reflects the cancellation. +// Validates: Requirements 7.3 +func TestRunAll_CancelPropagation(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // First hook (alphabetically) cancels. + writeHookScript(t, hooksDir, "pre-tool-use", "01-block.sh", + "#!/bin/sh\necho '{\"cancel\": true, \"message\": \"blocked\"}'") + + // Second hook should never run; if it did it would add context. + writeHookScript(t, hooksDir, "pre-tool-use", "02-context.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"should not appear\"}'") + + input := &HookInput{TriggerType: "pre-tool-use", Tool: "test"} + agg, err := RunAll(hooksDir, PreToolUse, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if !agg.Cancelled { + t.Error("expected Cancelled to be true") + } + if agg.Message != "blocked" { + t.Errorf("expected message %q, got %q", "blocked", agg.Message) + } + if agg.Context != "" { + t.Errorf("expected empty context (second hook should not run), got %q", agg.Context) + } +} + +// TestRunAll_ContextAggregation verifies that non-empty context fields +// from multiple hooks are concatenated with newlines. +// Validates: Requirements 7.4 +func TestRunAll_ContextAggregation(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + writeHookScript(t, hooksDir, "session-start", "01-first.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"extra context\"}'") + + writeHookScript(t, hooksDir, "session-start", "02-second.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"more context\"}'") + + input := &HookInput{TriggerType: "session-start", Tool: "test"} + agg, err := RunAll(hooksDir, SessionStart, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + + want := "extra context\nmore context" + if agg.Context != want { + t.Errorf("expected context %q, got %q", want, agg.Context) + } +} + +// TestRunAll_NonZeroExitCode verifies that a hook exiting with a +// non-zero exit code is logged, skipped, and remaining hooks continue. +// Validates: Requirements 7.5 +func TestRunAll_NonZeroExitCode(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // First hook exits with error. + writeHookScript(t, hooksDir, "post-tool-use", "01-fail.sh", + "#!/bin/sh\nexit 1") + + // Second hook succeeds with context. + writeHookScript(t, hooksDir, "post-tool-use", "02-ok.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"survived\"}'") + + input := &HookInput{TriggerType: "post-tool-use", Tool: "test"} + agg, err := RunAll(hooksDir, PostToolUse, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if len(agg.Errors) == 0 { + t.Error("expected at least one error from the failing hook") + } + if agg.Context != "survived" { + t.Errorf("expected context %q, got %q", "survived", agg.Context) + } +} + +// TestRunAll_InvalidJSONOutput verifies that a hook producing invalid +// JSON on stdout is logged, skipped, and remaining hooks continue. +// Validates: Requirements 7.6 +func TestRunAll_InvalidJSONOutput(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // First hook outputs invalid JSON. + writeHookScript(t, hooksDir, "file-save", "01-bad.sh", + "#!/bin/sh\necho 'not json'") + + // Second hook succeeds. + writeHookScript(t, hooksDir, "file-save", "02-good.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"valid\"}'") + + input := &HookInput{TriggerType: "file-save", Tool: "test"} + agg, err := RunAll(hooksDir, FileSave, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if len(agg.Errors) == 0 { + t.Error("expected at least one error from the invalid JSON hook") + } + + // Verify the error mentions invalid JSON. + found := false + for _, e := range agg.Errors { + if strings.Contains(e, "invalid JSON") { + found = true + break + } + } + if !found { + t.Errorf("expected an error containing %q, got %v", "invalid JSON", agg.Errors) + } + + if agg.Context != "valid" { + t.Errorf("expected context %q, got %q", "valid", agg.Context) + } +} + +// TestRunAll_TimeoutEnforcement verifies that a hook exceeding the +// timeout is terminated and remaining hooks continue. +// Validates: Requirements 7.7, 7.8, 19.6 +func TestRunAll_TimeoutEnforcement(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // First hook sleeps well beyond the timeout. + // Use exec to replace the shell process with sleep so that + // CommandContext's kill signal reaches the sleeping process + // directly, avoiding orphaned child process issues. + writeHookScript(t, hooksDir, "context-add", "01-slow.sh", + "#!/bin/sh\nexec sleep 30") + + // Second hook succeeds quickly. + writeHookScript(t, hooksDir, "context-add", "02-fast.sh", + "#!/bin/sh\necho '{\"cancel\": false, \"context\": \"fast\"}'") + + input := &HookInput{TriggerType: "context-add", Tool: "test"} + + start := time.Now() + agg, err := RunAll(hooksDir, ContextAdd, input, 1*time.Second) + elapsed := time.Since(start) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should complete well under 30s (the sleep duration). + if elapsed > 10*time.Second { + t.Errorf("expected timeout enforcement to complete quickly, took %v", elapsed) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if len(agg.Errors) == 0 { + t.Error("expected at least one error from the timed-out hook") + } + + // Verify the error mentions timeout. + found := false + for _, e := range agg.Errors { + if strings.Contains(e, "timeout") { + found = true + break + } + } + if !found { + t.Errorf("expected an error containing %q, got %v", "timeout", agg.Errors) + } + + if agg.Context != "fast" { + t.Errorf("expected context %q from second hook, got %q", "fast", agg.Context) + } +} + +// TestRunAll_NoHooks verifies that RunAll returns an empty +// AggregatedOutput when no hooks exist for the given type. +// Validates: Requirements 7.1 +func TestRunAll_NoHooks(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + // Create the hooks directory but no type subdirectories. + if err := os.MkdirAll(hooksDir, 0o755); err != nil { + t.Fatal(err) + } + + input := &HookInput{TriggerType: "session-end", Tool: "test"} + agg, err := RunAll(hooksDir, SessionEnd, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if agg.Context != "" { + t.Errorf("expected empty context, got %q", agg.Context) + } + if len(agg.Errors) != 0 { + t.Errorf("expected no errors, got %v", agg.Errors) + } +} + +// TestRunAll_MissingHooksDir verifies that RunAll returns an empty +// AggregatedOutput when the hooks directory does not exist. +// Validates: Requirements 7.1 +func TestRunAll_MissingHooksDir(t *testing.T) { + input := &HookInput{TriggerType: "pre-tool-use", Tool: "test"} + agg, err := RunAll(filepath.Join(t.TempDir(), "nonexistent"), PreToolUse, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if agg.Context != "" { + t.Errorf("expected empty context, got %q", agg.Context) + } +} + +// TestRunAll_EmptyStdout verifies that a hook producing no output +// is treated as a no-op (no cancel, no context, no error). +// Validates: Requirements 7.2 +func TestRunAll_EmptyStdout(t *testing.T) { + skipIfWindows(t) + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + writeHookScript(t, hooksDir, "session-end", "01-silent.sh", + "#!/bin/sh\n# produces no output") + + input := &HookInput{TriggerType: "session-end", Tool: "test"} + agg, err := RunAll(hooksDir, SessionEnd, input, 5*time.Second) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if agg.Cancelled { + t.Error("expected Cancelled to be false") + } + if agg.Context != "" { + t.Errorf("expected empty context, got %q", agg.Context) + } + if len(agg.Errors) != 0 { + t.Errorf("expected no errors, got %v", agg.Errors) + } +} diff --git a/internal/trigger/security.go b/internal/trigger/security.go new file mode 100644 index 000000000..f84fd4661 --- /dev/null +++ b/internal/trigger/security.go @@ -0,0 +1,73 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "path/filepath" + + "github.com/ActiveMemory/ctx/internal/config/fs" + errTrigger "github.com/ActiveMemory/ctx/internal/err/trigger" +) + +// parentDir is the relative parent directory component used +// in boundary checks. +const parentDir = ".." + +// ValidatePath checks that a hook script path: +// 1. Is not a symlink +// 2. Resolves within the hooksDir boundary +// 3. Has the executable permission bit set +// +// Returns a descriptive error if any check fails. +// +// Parameters: +// - hooksDir: the root hooks directory (e.g. .context/hooks) +// - hookPath: the path to the hook script to validate +// +// Returns: +// - error: non-nil if the path is a symlink, escapes the boundary, +// or lacks the executable bit +func ValidatePath(hooksDir, hookPath string) error { + // 1. Symlink check via os.Lstat (does not follow symlinks). + fi, lstatErr := os.Lstat(hookPath) + if lstatErr != nil { + return errTrigger.StatPath(hookPath, lstatErr) + } + + if fi.Mode()&os.ModeSymlink != 0 { + return errTrigger.Symlink(hookPath) + } + + // 2. Boundary check — hookPath must resolve within hooksDir. + absHooksDir, absHooksDirErr := filepath.Abs(hooksDir) + if absHooksDirErr != nil { + return errTrigger.ResolveHooksDir(hooksDir, absHooksDirErr) + } + + absHookPath, absHookPathErr := filepath.Abs(hookPath) + if absHookPathErr != nil { + return errTrigger.ResolvePath(hookPath, absHookPathErr) + } + + rel, relErr := filepath.Rel(absHooksDir, absHookPath) + if relErr != nil { + return errTrigger.Boundary(hookPath, hooksDir) + } + + sep := string(filepath.Separator) + if rel == parentDir || len(rel) >= 3 && rel[:3] == parentDir+sep { + return errTrigger.Boundary(hookPath, hooksDir) + } + + // 3. Executable permission bit check. + if fi.Mode().Perm()&fs.ExecBitMask == 0 { + return errTrigger.NotExecutable(hookPath) + } + + return nil +} diff --git a/internal/trigger/security_test.go b/internal/trigger/security_test.go new file mode 100644 index 000000000..6afa8402d --- /dev/null +++ b/internal/trigger/security_test.go @@ -0,0 +1,146 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "path/filepath" + "runtime" + "testing" +) + +func TestValidatePath_Valid(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "pre-tool-use") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + script := filepath.Join(typeDir, "check.sh") + if err := os.WriteFile(script, []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + if err := ValidatePath(hooksDir, script); err != nil { + t.Fatalf("expected no error, got %v", err) + } +} + +func TestValidatePath_Symlink(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("symlinks require elevated privileges on Windows") + } + + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "pre-tool-use") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + // Create a real file and a symlink to it. + real := filepath.Join(dir, "real.sh") + if err := os.WriteFile(real, []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + link := filepath.Join(typeDir, "link.sh") + if err := os.Symlink(real, link); err != nil { + t.Fatal(err) + } + + err := ValidatePath(hooksDir, link) + if err == nil { + t.Fatal("expected symlink error, got nil") + } + + if got := err.Error(); !contains(got, "symlink") { + t.Fatalf("expected symlink error, got %q", got) + } +} + +func TestValidatePath_BoundaryEscape(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + outsideDir := filepath.Join(dir, "outside") + + if err := os.MkdirAll(hooksDir, 0o755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(outsideDir, 0o755); err != nil { + t.Fatal(err) + } + + script := filepath.Join(outsideDir, "evil.sh") + if err := os.WriteFile(script, []byte("#!/bin/sh\n"), 0o755); err != nil { + t.Fatal(err) + } + + err := ValidatePath(hooksDir, script) + if err == nil { + t.Fatal("expected boundary error, got nil") + } + + if got := err.Error(); !contains(got, "escapes") { + t.Fatalf("expected boundary error, got %q", got) + } +} + +func TestValidatePath_NotExecutable(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + typeDir := filepath.Join(hooksDir, "session-start") + + if err := os.MkdirAll(typeDir, 0o755); err != nil { + t.Fatal(err) + } + + script := filepath.Join(typeDir, "noexec.sh") + if err := os.WriteFile(script, []byte("#!/bin/sh\n"), 0o644); err != nil { + t.Fatal(err) + } + + err := ValidatePath(hooksDir, script) + if err == nil { + t.Fatal("expected not-executable error, got nil") + } + + if got := err.Error(); !contains(got, "not executable") { + t.Fatalf("expected not-executable error, got %q", got) + } +} + +func TestValidatePath_NonExistent(t *testing.T) { + dir := t.TempDir() + hooksDir := filepath.Join(dir, "hooks") + + if err := os.MkdirAll(hooksDir, 0o755); err != nil { + t.Fatal(err) + } + + err := ValidatePath(hooksDir, filepath.Join(hooksDir, "missing.sh")) + if err == nil { + t.Fatal("expected error for non-existent path, got nil") + } +} + +// contains is a small helper to check substring presence. +func contains(s, substr string) bool { + return len(s) >= len(substr) && searchSubstring(s, substr) +} + +func searchSubstring(s, substr string) bool { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return true + } + } + return false +} diff --git a/internal/trigger/testmain_test.go b/internal/trigger/testmain_test.go new file mode 100644 index 000000000..77a654086 --- /dev/null +++ b/internal/trigger/testmain_test.go @@ -0,0 +1,19 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "os" + "testing" + + "github.com/ActiveMemory/ctx/internal/assets/read/lookup" +) + +func TestMain(m *testing.M) { + lookup.Init() + os.Exit(m.Run()) +} diff --git a/internal/trigger/types.go b/internal/trigger/types.go new file mode 100644 index 000000000..0c3391600 --- /dev/null +++ b/internal/trigger/types.go @@ -0,0 +1,86 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import "github.com/ActiveMemory/ctx/internal/entity" + +// HookType is an alias for entity.TriggerType within the trigger package. +type HookType = entity.TriggerType + +// Lifecycle event constants re-exported from entity for convenience. +const ( + // PreToolUse fires before an AI tool invocation. + PreToolUse = entity.TriggerPreToolUse + // PostToolUse fires after an AI tool invocation. + PostToolUse = entity.TriggerPostToolUse + // SessionStart fires when an AI session begins. + SessionStart = entity.TriggerSessionStart + // SessionEnd fires when an AI session ends. + SessionEnd = entity.TriggerSessionEnd + // FileSave fires when a file is saved. + FileSave = entity.TriggerFileSave + // ContextAdd fires when context is added. + ContextAdd = entity.TriggerContextAdd +) + +// ValidTypes returns all valid trigger type strings. +func ValidTypes() []HookType { + return []HookType{ + PreToolUse, + PostToolUse, + SessionStart, + SessionEnd, + FileSave, + ContextAdd, + } +} + +// HookSession is an alias for entity.TriggerSession. +type HookSession = entity.TriggerSession + +// HookInput is an alias for entity.TriggerInput. +type HookInput = entity.TriggerInput + +// HookOutput is the JSON object returned by trigger scripts via stdout. +// +// Fields: +// - Cancel: If true, halt execution of subsequent triggers +// - Context: Optional text to append to AI conversation context +// - Message: Optional user-visible message +type HookOutput struct { + Cancel bool `json:"cancel"` + Context string `json:"context,omitempty"` + Message string `json:"message,omitempty"` +} + +// HookInfo describes a discovered trigger script. +// +// Fields: +// - Name: Script filename without extension +// - Type: Lifecycle event category +// - Path: Filesystem path to the script +// - Enabled: True if the executable permission bit is set +type HookInfo struct { + Name string + Type HookType + Path string + Enabled bool +} + +// AggregatedOutput collects results from all triggers in a run. +// +// Fields: +// - Cancelled: True if a trigger returned cancel:true +// - Message: Cancellation or summary message +// - Context: Concatenated context from all triggers +// - Errors: Warnings from failed triggers +type AggregatedOutput struct { + Cancelled bool + Message string + Context string + Errors []string +} diff --git a/internal/write/setup/hook.go b/internal/write/setup/hook.go index f2a4623d0..5b26fd46d 100644 --- a/internal/write/setup/hook.go +++ b/internal/write/setup/hook.go @@ -243,3 +243,134 @@ func Content(cmd *cobra.Command, content string) { } cmd.Print(content) } + +// Integration instruction lines for ctx setup output. +const ( + // infoCursorHead is the Cursor section header. + infoCursorHead = "Cursor integration:" + // infoCursorRun is the run command hint. + infoCursorRun = " Run: ctx setup cursor --write" + // infoCursorMCP is the MCP config path. + infoCursorMCP = " Creates: .cursor/mcp.json" + + " (MCP server config)" + // infoCursorSync is the steering sync path. + infoCursorSync = " Syncs: .cursor/rules/" + + " (steering files)" + // infoKiroHead is the Kiro section header. + infoKiroHead = "Kiro integration:" + // infoKiroRun is the run command hint. + infoKiroRun = " Run: ctx setup kiro --write" + // infoKiroMCP is the MCP config path. + infoKiroMCP = " Creates: .kiro/settings/mcp.json" + + " (MCP server config)" + // infoKiroSync is the steering sync path. + infoKiroSync = " Syncs: .kiro/steering/" + + " (steering files)" + // infoClineHead is the Cline section header. + infoClineHead = "Cline integration:" + // infoClineRun is the run command hint. + infoClineRun = " Run: ctx setup cline --write" + // infoClineMCP is the MCP config path. + infoClineMCP = " Creates: .vscode/mcp.json" + + " (MCP server config)" + // infoClineSync is the steering sync path. + infoClineSync = " Syncs: .clinerules/" + + " (steering files)" +) + +// InfoCursorIntegration prints Cursor integration instructions. +// +// Parameters: +// - cmd: Cobra command for output +func InfoCursorIntegration(cmd *cobra.Command) { + cmd.Println(infoCursorHead) + cmd.Println(infoCursorRun) + cmd.Println(infoCursorMCP) + cmd.Println(infoCursorSync) +} + +// InfoKiroIntegration prints Kiro integration instructions. +// +// Parameters: +// - cmd: Cobra command for output +func InfoKiroIntegration(cmd *cobra.Command) { + cmd.Println(infoKiroHead) + cmd.Println(infoKiroRun) + cmd.Println(infoKiroMCP) + cmd.Println(infoKiroSync) +} + +// InfoClineIntegration prints Cline integration instructions. +// +// Parameters: +// - cmd: Cobra command for output +func InfoClineIntegration(cmd *cobra.Command) { + cmd.Println(infoClineHead) + cmd.Println(infoClineRun) + cmd.Println(infoClineMCP) + cmd.Println(infoClineSync) +} + +// DeployComplete prints the completion message for a tool setup. +// +// Parameters: +// - cmd: Cobra command for output +// - tool: Tool name (e.g., "Cursor", "Kiro", "Cline") +// - mcpPath: Path to the MCP config file +// - steeringPath: Path to the steering directory +func DeployComplete(cmd *cobra.Command, tool, mcpPath, steeringPath string) { + cmd.Println() + cmd.Println(fmt.Sprintf("%s setup complete.", tool)) + cmd.Println(fmt.Sprintf(" MCP server: %s", mcpPath)) + cmd.Println(fmt.Sprintf(" Steering: %s", steeringPath)) +} + +// DeployFileExists prints that a file already exists and was skipped. +// +// Parameters: +// - cmd: Cobra command for output +// - path: Path to the existing file +func DeployFileExists(cmd *cobra.Command, path string) { + cmd.Println(fmt.Sprintf("\u2713 %s already exists (skipped)", path)) +} + +// DeployFileCreated prints that a file was created. +// +// Parameters: +// - cmd: Cobra command for output +// - path: Path to the created file +func DeployFileCreated(cmd *cobra.Command, path string) { + cmd.Println(fmt.Sprintf("\u2713 Created %s", path)) +} + +// DeploySteeringSynced prints that a steering file was synced. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Name of the synced file +func DeploySteeringSynced(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf("\u2713 Synced steering: %s", name)) +} + +// DeploySteeringSkipped prints that a steering file was skipped. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Name of the skipped file +func DeploySteeringSkipped(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf(" Skipped steering: %s (unchanged)", name)) +} + +// msgNoSteeringToSync is the message when no steering files +// are available for sync. +const msgNoSteeringToSync = " No steering files to sync" + + " (run ctx steering init first)" + +// DeployNoSteering prints that no steering files are +// available to sync. +// +// Parameters: +// - cmd: Cobra command for output +func DeployNoSteering(cmd *cobra.Command) { + cmd.Println(msgNoSteeringToSync) +} diff --git a/internal/write/skill/doc.go b/internal/write/skill/doc.go new file mode 100644 index 000000000..a08f79b10 --- /dev/null +++ b/internal/write/skill/doc.go @@ -0,0 +1,12 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package skill provides formatted output helpers for skill commands. +// +// All functions take *cobra.Command for output routing. +// Exports: [Installed], [NoSkillsFound], [SkillEntryWithDesc], +// [SkillEntry], [SkillCount], [Removed]. +package skill diff --git a/internal/write/skill/skill.go b/internal/write/skill/skill.go new file mode 100644 index 000000000..cbe922922 --- /dev/null +++ b/internal/write/skill/skill.go @@ -0,0 +1,71 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package skill + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +// Installed prints confirmation that a skill was installed. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Skill name +// - dir: Installation directory +func Installed(cmd *cobra.Command, name, dir string) { + cmd.Println(fmt.Sprintf("Installed %s → %s", name, dir)) +} + +// msgNoSkills is shown when no skills are installed. +const msgNoSkills = "No skills installed." + +// NoSkillsFound prints a message indicating no skills are installed. +// +// Parameters: +// - cmd: Cobra command for output +func NoSkillsFound(cmd *cobra.Command) { + cmd.Println(msgNoSkills) +} + +// EntryWithDesc prints a skill entry with name and description. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Skill name +// - description: Skill description +func EntryWithDesc(cmd *cobra.Command, name, description string) { + cmd.Println(fmt.Sprintf(" %-20s %s", name, description)) +} + +// Entry prints a skill entry with name only. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Skill name +func Entry(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf(" %s", name)) +} + +// Count prints the total skill count. +// +// Parameters: +// - cmd: Cobra command for output +// - count: Number of skills +func Count(cmd *cobra.Command, count int) { + cmd.Println(fmt.Sprintf("\n%d skill(s)", count)) +} + +// Removed prints confirmation that a skill was removed. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Skill name +func Removed(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf("Removed %s", name)) +} diff --git a/internal/write/steering/doc.go b/internal/write/steering/doc.go new file mode 100644 index 000000000..695466c0f --- /dev/null +++ b/internal/write/steering/doc.go @@ -0,0 +1,15 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package steering provides formatted output helpers for steering commands. +// +// All functions take *cobra.Command for output routing. +// Exports: [Created], [Skipped], [InitSummary], +// [NoFilesFound], [FileEntry], [FileCount], +// [NoFilesMatch], [PreviewHeader], [PreviewEntry], +// [PreviewCount], [SyncWritten], [SyncSkipped], +// [SyncError], [SyncSummary]. +package steering diff --git a/internal/write/steering/steering.go b/internal/write/steering/steering.go new file mode 100644 index 000000000..e42c23b76 --- /dev/null +++ b/internal/write/steering/steering.go @@ -0,0 +1,164 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package steering + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +// User-facing messages for steering commands. +const ( + // msgNoFiles is shown when no steering files exist. + msgNoFiles = "No steering files found." + // msgNoMatch is shown when no files match the prompt. + msgNoMatch = "No steering files match the given prompt." +) + +// Created prints confirmation that a steering file was created. +// +// Parameters: +// - cmd: Cobra command for output +// - path: Path to the created file +func Created(cmd *cobra.Command, path string) { + cmd.Println(fmt.Sprintf("Created %s", path)) +} + +// Skipped prints that a steering file was skipped because it exists. +// +// Parameters: +// - cmd: Cobra command for output +// - path: Path to the existing file +func Skipped(cmd *cobra.Command, path string) { + cmd.Println(fmt.Sprintf("Skipped %s (already exists)", path)) +} + +// InitSummary prints the summary after steering init. +// +// Parameters: +// - cmd: Cobra command for output +// - created: Number of files created +// - skipped: Number of files skipped +func InitSummary(cmd *cobra.Command, created, skipped int) { + cmd.Println(fmt.Sprintf("\n%d created, %d skipped", created, skipped)) +} + +// NoFilesFound prints a message indicating no steering files exist. +// +// Parameters: +// - cmd: Cobra command for output +func NoFilesFound(cmd *cobra.Command) { + cmd.Println(msgNoFiles) +} + +// FileEntry prints a single steering file entry with metadata. +// +// Parameters: +// - cmd: Cobra command for output +// - name: File name +// - inclusion: Inclusion mode +// - priority: Priority value +// - tools: Comma-separated tool list or "all" +func FileEntry( + cmd *cobra.Command, name, inclusion string, + priority int, tools string, +) { + cmd.Println(fmt.Sprintf("%-20s inclusion=%-7s priority=%-3d tools=%s", + name, inclusion, priority, tools)) +} + +// FileCount prints the total steering file count. +// +// Parameters: +// - cmd: Cobra command for output +// - count: Number of steering files +func FileCount(cmd *cobra.Command, count int) { + cmd.Println(fmt.Sprintf("\n%d steering file(s)", count)) +} + +// NoFilesMatch prints a message indicating no files match the prompt. +// +// Parameters: +// - cmd: Cobra command for output +func NoFilesMatch(cmd *cobra.Command) { + cmd.Println(msgNoMatch) +} + +// PreviewHeader prints the header for steering preview output. +// +// Parameters: +// - cmd: Cobra command for output +// - prompt: The prompt being matched against +func PreviewHeader(cmd *cobra.Command, prompt string) { + cmd.Println(fmt.Sprintf("Steering files matching prompt %q:", prompt)) + cmd.Println() +} + +// PreviewEntry prints a single preview match entry. +// +// Parameters: +// - cmd: Cobra command for output +// - name: File name +// - inclusion: Inclusion mode +// - priority: Priority value +// - tools: Comma-separated tool list or "all" +func PreviewEntry( + cmd *cobra.Command, name, inclusion string, + priority int, tools string, +) { + cmd.Println(fmt.Sprintf(" %-20s inclusion=%-7s priority=%-3d tools=%s", + name, inclusion, priority, tools)) +} + +// PreviewCount prints the count of files that would be included. +// +// Parameters: +// - cmd: Cobra command for output +// - count: Number of matched files +func PreviewCount(cmd *cobra.Command, count int) { + cmd.Println(fmt.Sprintf("\n%d file(s) would be included", count)) +} + +// SyncWritten prints that a file was written during sync. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Name of the written file +func SyncWritten(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf("Written: %s", name)) +} + +// SyncSkipped prints that a file was skipped during sync. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Name of the skipped file +func SyncSkipped(cmd *cobra.Command, name string) { + cmd.Println(fmt.Sprintf("Skipped: %s", name)) +} + +// SyncError prints a sync error. +// +// Parameters: +// - cmd: Cobra command for output +// - errMsg: The error message +func SyncError(cmd *cobra.Command, errMsg string) { + cmd.Println(fmt.Sprintf("Error: %s", errMsg)) +} + +// SyncSummary prints the sync summary with counts. +// +// Parameters: +// - cmd: Cobra command for output +// - written: Number of files written +// - skipped: Number of files skipped +// - errors: Number of errors +func SyncSummary(cmd *cobra.Command, written, skipped, errors int) { + cmd.Println(fmt.Sprintf("\n%d written, %d skipped, %d errors", + written, skipped, errors)) +} diff --git a/internal/write/trigger/doc.go b/internal/write/trigger/doc.go new file mode 100644 index 000000000..ee3d2db6e --- /dev/null +++ b/internal/write/trigger/doc.go @@ -0,0 +1,14 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +// Package trigger provides formatted output helpers for trigger commands. +// +// All functions take *cobra.Command for output routing. +// Exports: [Created], [Disabled], [Enabled], [TypeHeader], +// [HookEntry], [BlankLine], [NoHooksFound], [HookCount], +// [TestingHeader], [TestInput], [Cancelled], [ContextOutput], +// [ErrorsHeader], [ErrorLine], [NoOutput]. +package trigger diff --git a/internal/write/trigger/trigger.go b/internal/write/trigger/trigger.go new file mode 100644 index 000000000..93611ed52 --- /dev/null +++ b/internal/write/trigger/trigger.go @@ -0,0 +1,164 @@ +// / ctx: https://ctx.ist +// ,'`./ do you remember? +// `.,'\ +// \ Copyright 2026-present Context contributors. +// SPDX-License-Identifier: Apache-2.0 + +package trigger + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +// User-facing messages for hook list and test output. +const ( + // msgNoHooksFound is shown when no hooks are discovered. + msgNoHooksFound = "No hooks found." + // msgErrors is the section header for hook errors. + msgErrors = "Errors:" + // msgNoOutput is shown when hooks produce no output. + msgNoOutput = "No output from hooks." +) + +// Created prints confirmation that a hook script was created. +// +// Parameters: +// - cmd: Cobra command for output +// - path: Path to the created hook script +func Created(cmd *cobra.Command, path string) { + cmd.Println(fmt.Sprintf("Created %s", path)) +} + +// Disabled prints confirmation that a hook was disabled. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Hook name +// - path: Path to the hook script +func Disabled(cmd *cobra.Command, name, path string) { + cmd.Println(fmt.Sprintf("Disabled %s (%s)", name, path)) +} + +// Enabled prints confirmation that a hook was enabled. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Hook name +// - path: Path to the hook script +func Enabled(cmd *cobra.Command, name, path string) { + cmd.Println(fmt.Sprintf("Enabled %s (%s)", name, path)) +} + +// TypeHeader prints a hook type section header. +// +// Parameters: +// - cmd: Cobra command for output +// - hookType: The hook type name +func TypeHeader(cmd *cobra.Command, hookType string) { + cmd.Println(fmt.Sprintf("[%s]", hookType)) +} + +// Entry prints a single hook entry with name, status, and path. +// +// Parameters: +// - cmd: Cobra command for output +// - name: Hook name +// - status: "enabled" or "disabled" +// - path: Path to the hook script +func Entry(cmd *cobra.Command, name, status, path string) { + cmd.Println(fmt.Sprintf(" %-20s %-8s %s", name, status, path)) +} + +// BlankLine prints a blank line. Nil cmd is a no-op. +// +// Parameters: +// - cmd: Cobra command for output +func BlankLine(cmd *cobra.Command) { + if cmd == nil { + return + } + cmd.Println() +} + +// NoHooksFound prints a message indicating no hooks were found. +// +// Parameters: +// - cmd: Cobra command for output +func NoHooksFound(cmd *cobra.Command) { + cmd.Println(msgNoHooksFound) +} + +// Count prints the total hook count. +// +// Parameters: +// - cmd: Cobra command for output +// - count: Number of hooks +func Count(cmd *cobra.Command, count int) { + cmd.Println(fmt.Sprintf("%d hook(s)", count)) +} + +// TestingHeader prints the header for hook testing output. +// +// Parameters: +// - cmd: Cobra command for output +// - hookType: The hook type being tested +func TestingHeader(cmd *cobra.Command, hookType string) { + cmd.Println(fmt.Sprintf("Testing %s hooks...", hookType)) + cmd.Println() +} + +// TestInput prints the test input JSON block. +// +// Parameters: +// - cmd: Cobra command for output +// - inputJSON: Pretty-printed JSON input +func TestInput(cmd *cobra.Command, inputJSON string) { + cmd.Println(fmt.Sprintf("Input:\n%s", inputJSON)) + cmd.Println() +} + +// Cancelled prints a cancellation message from hook output. +// +// Parameters: +// - cmd: Cobra command for output +// - message: The cancellation reason +func Cancelled(cmd *cobra.Command, message string) { + cmd.Println(fmt.Sprintf("Cancelled: %s", message)) +} + +// ContextOutput prints context output from hook execution. +// +// Parameters: +// - cmd: Cobra command for output +// - context: The context string from hooks +func ContextOutput(cmd *cobra.Command, context string) { + cmd.Println(fmt.Sprintf("Context:\n%s", context)) + cmd.Println() +} + +// ErrorsHeader prints the errors section header. +// +// Parameters: +// - cmd: Cobra command for output +func ErrorsHeader(cmd *cobra.Command) { + cmd.Println(msgErrors) +} + +// ErrorLine prints a single error line. +// +// Parameters: +// - cmd: Cobra command for output +// - errMsg: The error message +func ErrorLine(cmd *cobra.Command, errMsg string) { + cmd.Println(fmt.Sprintf(" %s", errMsg)) +} + +// NoOutput prints a message indicating no output from hooks. +// +// Parameters: +// - cmd: Cobra command for output +func NoOutput(cmd *cobra.Command) { + cmd.Println(msgNoOutput) +} diff --git a/specs/commit-context-tracing.md b/specs/commit-context-tracing.md new file mode 100644 index 000000000..78116b77b --- /dev/null +++ b/specs/commit-context-tracing.md @@ -0,0 +1,377 @@ +# Spec: Commit Context Tracing (`ctx trace`) + +Link every git commit back to the decisions, tasks, learnings, and +sessions that motivated it. Today `git log` shows *what* changed and +`git blame` shows *who* — `ctx trace` shows *why*. + +--- + +## Problem + +Code changes lose their reasoning over time. A developer looking at +a six-month-old commit sees the diff but not the discussion, decision, +or lesson that drove it. The context exists — in `.context/` files, +in session histories, in task descriptions — but there is no link +from the commit to that context. + +Questions like "why did we implement it this way?" require archaeology: +reading old decisions, guessing which session produced the code, +asking teammates. The answers are often lost entirely. + +## Solution + +Embed **context pointers** in git commit trailers. A prepare-commit-msg +hook automatically detects which context is relevant — from three +sources: accumulated pending context, staged file changes, and current +working state — then injects trailers into the commit message. +`ctx trace` resolves those pointers back to the original reasoning. + +``` +Fix auth token expiry handling + +Refactored token refresh logic to handle edge case +where refresh token expires during request. + +ctx-context: decision:12, task:8, session:abc123 +``` + +--- + +## Core Concepts + +### ctx-context Trailer + +A standard git trailer added to commit messages. Contains one or more +comma-separated references: + +``` +ctx-context: decision:12, task:8, session:abc123 +ctx-context: learning:5 +ctx-context: "Manual note: legal compliance requirement" +``` + +**Reference types:** + +| Prefix | Points to | Example | +|--------|-----------|---------| +| `decision:` | Entry #n in DECISIONS.md | `decision:12` | +| `learning:` | Entry #n in LEARNINGS.md | `learning:5` | +| `task:` | Task #n in TASKS.md | `task:8` | +| `convention:` | Entry #n in CONVENTIONS.md | `convention:3` | +| `session:` | AI session by ID | `session:abc123` | +| `""` | Free-form context note | `"Performance fix for P1 incident"` | + +Multiple `ctx-context` trailers per commit are allowed. + +### Three-Source Detection + +The hook collects context from **three sources** — the user does not +specify it manually. + +**Source 1: Pending Context (accumulated during work)** + +As ctx commands run, they append references to an accumulator file: + +``` +.context/state/pending-context.jsonl +``` + +| Event | What gets recorded | +|-------|-------------------| +| `ctx add decision "..."` | `decision:N` appended | +| `ctx add learning "..."` | `learning:N` appended | +| `ctx add convention "..."` | `convention:N` appended | +| `ctx complete N` | `task:N` appended | +| Task marked in-progress | `task:N` appended | +| AI session starts | `session:` appended | + +Format: +```jsonl +{"ref":"decision:12","timestamp":"2026-03-14T10:00:00Z"} +{"ref":"task:8","timestamp":"2026-03-14T10:05:00Z"} +{"ref":"session:abc123","timestamp":"2026-03-14T10:00:00Z"} +``` + +This captures context that happened *before* the commit — decisions +made earlier in the session, tasks completed along the way, etc. + +**Source 2: Staged File Analysis (what's being committed right now)** + +The hook inspects staged `.context/` files at commit time: + +- If DECISIONS.md is staged → diff for added `##` headers → extract + entry numbers +- If LEARNINGS.md is staged → diff for added `##` headers → extract + entry numbers +- If CONVENTIONS.md is staged → diff for added `##` headers → extract + entry numbers +- If TASKS.md is staged → diff for newly completed tasks (`- [x]` + lines added) + +This catches context changes that are part of *this* commit itself. + +**Source 3: Current Working State (active context)** + +- In-progress tasks in TASKS.md → `task:N` +- Active AI session via `CTX_SESSION_ID` environment variable → + `session:` + +This captures the broader working context even when `.context/` files +didn't change. + +**Merge & Deduplicate** + +All three sources feed into a single refs list. Duplicates are removed +(same ref from pending + staged = one trailer entry). If the merged +list is empty, the hook exits silently. + +### Recording Mechanism + +Existing ctx commands gain a single side effect — appending one line +to `pending-context.jsonl`. This is a one-line call added to each +command's run function: + +```go +// internal/trace/pending.go +func Record(ref string) error { + f, err := os.OpenFile(pendingPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return nil // silent fail — tracing is best-effort + } + defer f.Close() + entry := PendingEntry{Ref: ref, Timestamp: time.Now().UTC()} + return json.NewEncoder(f).Encode(entry) +} +``` + +Called from: +- `ctx add` → `trace.Record("decision:N")` after successful write +- `ctx complete` → `trace.Record("task:N")` after marking done +- Session start → `trace.Record("session:")` if env var is set + +Recording is **best-effort** — if it fails, the command still +succeeds. Context tracing never blocks normal ctx operations. + +### Optional Hook + +The hook is **not installed by default**. Users opt in: + +```bash +ctx hook prepare-commit-msg enable +``` + +This registers the prepare-commit-msg hook in the project's ctx hook +configuration. Users who don't want automatic context tagging simply +don't enable the hook. + +--- + +## CLI Commands + +### `ctx trace` — Query commit context + +```bash +# Show context for a specific commit +ctx trace abc123 +# Commit: abc123 "Fix auth token expiry" +# Date: 2026-03-14 +# +# Context: +# Decision #12: Use short-lived tokens with server-side refresh +# Status: Accepted | Date: 2026-03-10 +# Rationale: Short-lived tokens reduce blast radius of token theft... +# +# Task #8: Implement token rotation for compliance +# Status: completed +# +# Session: 2026-03-14-abc123 (47 messages, 12 tool calls) + +# Show context for last N commits +ctx trace --last 5 +# abc123 Fix auth token expiry → decision:12, task:8 +# def456 Add rate limiting → decision:15, learning:7 +# 789abc Refactor middleware → task:12 +# ... + +# Show context trail for a file (combines git log + trailer resolution) +ctx trace file src/auth.go +# abc123 Fix auth token expiry → decision:12, task:8 +# older1 Initial auth implementation → decision:3 +# older2 Add OAuth2 support → decision:7, task:2 + +# Show context trail for a file at a specific line range +ctx trace file src/auth.go:42-60 +# abc123 Fix auth token expiry → decision:12 + +# Raw output (for scripting) +ctx trace abc123 --json +``` + +### `ctx trace tag` — Manually tag a commit + +For commits made without the hook, or to add extra context: + +```bash +# Tag HEAD with context +ctx trace tag HEAD --note "Hotfix for production outage" + +# Tag a specific commit +ctx trace tag abc123 --note "Part of Q1 compliance initiative" +``` + +Manual tags are stored in `.context/trace/overrides.jsonl` since +git trailers cannot be added to existing commits without rewriting +history. + +```json +{"commit":"abc123","refs":["\"Hotfix for production outage\""],"timestamp":"2026-03-14T10:00:00Z"} +``` + +`ctx trace` checks the history file, commit trailer, and overrides +file when resolving context. + +--- + +## Local Storage + +### Two-Layer Storage + +| File | Purpose | Lifecycle | +|------|---------|-----------| +| `state/pending-context.jsonl` | Accumulates refs during work | Truncated after each commit | +| `trace/history.jsonl` | Permanent commit→context map | Append-only, never truncated | +| `trace/overrides.jsonl` | Manual tags for existing commits | Append-only | + +``` +.context/ +├── state/ +│ └── pending-context.jsonl ← accumulates refs between commits +├── trace/ +│ ├── history.jsonl ← permanent record of all commits +│ └── overrides.jsonl ← manual tags for existing commits +``` + +### Pending Context Format + +```jsonl +{"ref":"decision:12","timestamp":"2026-03-14T10:00:00Z"} +{"ref":"task:8","timestamp":"2026-03-14T10:05:00Z"} +{"ref":"session:abc123","timestamp":"2026-03-14T10:00:00Z"} +``` + +Append-only during work. Truncated after each commit by the +prepare-commit-msg hook. + +### History Format (Permanent Record) + +```jsonl +{"commit":"abc123","refs":["decision:12","task:8","session:abc123"],"message":"Fix auth token expiry","timestamp":"2026-03-14T10:00:00Z"} +{"commit":"def456","refs":["decision:15","learning:7"],"message":"Add rate limiting","timestamp":"2026-03-14T11:30:00Z"} +{"commit":"789abc","refs":["task:12"],"message":"Refactor middleware","timestamp":"2026-03-14T14:00:00Z"} +``` + +Written by the prepare-commit-msg hook after injecting the trailer. +This is the **primary source** for `ctx trace` — it survives even +if commits are squashed, rebased, or cherry-picked and lose their +trailers. + +### Override Format + +```jsonl +{"commit":"abc123","refs":["\"Hotfix for production outage\""],"timestamp":"2026-03-14T10:00:00Z"} +{"commit":"def456","refs":["decision:15"],"timestamp":"2026-03-14T11:00:00Z"} +``` + +Written by `ctx trace tag`. Append-only. + +--- + +## Reference Resolution + +`ctx trace` resolves context from **three sources**, merged and +deduplicated: + +``` +ctx trace abc123 + 1. trace/history.jsonl → refs for commit abc123 (primary) + 2. git trailer → ctx-context from commit message (portable) + 3. trace/overrides.jsonl → manual tags (supplemental) + 4. merge all, deduplicate + 5. resolve each ref +``` + +The history file is the **primary source** — always available locally. +The git trailer is the **portable copy** — travels with the commit +across forks and cherry-picks. Overrides are **supplemental** — added +after the fact by `ctx trace tag`. + +### Resolving Individual References + +When resolving a ref, `ctx trace` reads the current state of context +files: + +- **decision:12** → reads entry #12 from current DECISIONS.md +- **task:8** → reads task #8 from current TASKS.md (may be completed) +- **session:abc123** → looks up session in recall history + +If an entry has been archived (via `ctx compact`), `ctx trace` falls +back to the archive directory: + +``` +.context/archive/YYYY-MM-DD-DECISIONS.md +``` + +If the reference cannot be resolved (entry deleted, session purged), +`ctx trace` shows the raw reference with a `[not found]` marker: + +``` +Decision #12: [not found — may have been archived] +``` + + +## Examples + +### Developer asks "why was this implemented this way?" + +```bash +$ git blame src/auth/token.go | head -5 +abc123 (dev1 2026-03-14) func refreshToken(ctx context.Context) { + +$ ctx trace abc123 +Commit: abc123 "Fix auth token expiry" +Date: 2026-03-14 + +Context: + Decision #12: Use short-lived tokens with server-side refresh + Status: Accepted | Date: 2026-03-10 + Rationale: Short-lived tokens reduce blast radius of token + theft. Server-side refresh avoids client-side complexity. + + Task #8: Implement token rotation for compliance + Status: completed + + Session: 2026-03-14-abc123 + 47 messages, 12 tool calls + Run `ctx recall show abc123` for full session +``` + +### Team reviews recent changes with context + +```bash +$ ctx trace --last 10 +abc123 Fix auth token expiry → decision:12, task:8 +def456 Add rate limiting middleware → decision:15, learning:7 +789abc Refactor error handling → task:12, convention:3 +aaa111 Update dependencies → (no context) +bbb222 Fix typo in README → (no context) +ccc333 Implement audit logging → decision:18, task:15, session:xyz +``` + +### Trace a file's history + +```bash +$ ctx trace file src/auth/token.go +abc123 2026-03-14 Fix auth token expiry → decision:12, task:8 +older1 2026-02-28 Initial auth implementation → decision:3 +older2 2026-01-15 Add OAuth2 support → decision:7, task:2 +``` diff --git a/specs/hub_implementation.md b/specs/hub_implementation.md new file mode 100644 index 000000000..c6f77fea1 --- /dev/null +++ b/specs/hub_implementation.md @@ -0,0 +1,134 @@ +# Shared Context Hub — Implementation & Federation + +Companion to [shared-context-hub.md](shared-context-hub.md). Contains +package layout, compliance, implementation phases, and future +federation design. + +--- + +## Package Layout + +``` +internal/ +├── hub/ +│ ├── doc.go ← package documentation +│ ├── server.go ← gRPC server implementation +│ ├── client.go ← gRPC client (used by ctx connect) +│ ├── store.go ← JSONL append-only storage +│ ├── auth.go ← token generation, validation, interceptor +│ ├── types.go ← Entry, Subscription, ConnectionConfig +│ ├── encrypt.go ← connection config encryption (reuse notify pattern) +│ └── proto/ +│ └── hub.proto ← gRPC service definition +├── cli/ +│ ├── serve/ +│ │ └── shared.go ← ctx serve --shared command +│ └── connect/ +│ ├── register.go ← ctx connect register +│ ├── subscribe.go ← ctx connect subscribe +│ ├── sync.go ← ctx connect sync +│ ├── listen.go ← ctx connect listen +│ ├── publish.go ← ctx connect publish +│ └── status.go ← ctx connect status +``` + +--- + +## Compliance & Invariants + +### Design Invariants Preserved + +| Invariant | How preserved | +|-----------|---------------| +| Markdown-on-filesystem | Shared entries stored as .md in .context/shared/ | +| Zero runtime deps (core) | gRPC scoped to `internal/hub/` — not in local-only list | +| Deterministic assembly | Shared budget is additive, same files + budget = same output | +| Human authority | `--share` is explicit, shared knowledge is informational | +| Local-first | Core ctx works without hub; shared is opt-in | +| No telemetry | Hub is self-hosted, no external services | + +### Compliance Test Update + +The existing `TestNoNetworkImportsInCore` checks a curated list of +`localOnlyPackages` that must not import `net` or `net/http`. The +`internal/hub/` package is not added to that list (same approach as +`internal/notify/`). Core packages (`context`, `config`, `drift`, +`task`, `validation`, `crypto`, `assets`, `index`) remain network-free. + +### Security + +- **TLS** — `grpcs://` for encrypted transport +- **Token auth** — per-client tokens, validated via gRPC interceptor +- **Encrypted config** — connection config stored with AES-256-GCM + (same pattern as webhook URL via `internal/crypto`) +- **No sensitive data** — entries are architectural knowledge, not + secrets. CONSTITUTION.md invariant on secrets still applies. + +--- + +## Implementation Phases + +### Phase 1: Foundation + +- Proto definition and code generation +- Hub server with JSONL storage +- Register, Publish, Sync RPCs +- `ctx serve --shared` and `ctx connect register/sync/publish` +- Token-based auth with encrypted local storage + +### Phase 2: Real-Time + +- Listen RPC (server-streaming with fan-out) +- `ctx connect listen` with auto-reconnect +- `ctx add --share` flag integration +- Background listener option + +### Phase 3: Agent Integration + +- `ctx agent --include-shared` with Tier 6 budget +- Shared file rendering in agent packet +- Scoring shared entries (recency + type relevance) + +### Phase 4: Operational + +- `ctx connect status` with detailed stats +- Hub-side Status RPC +- Connection health monitoring +- Graceful shutdown and reconnection + +--- + +## Future: Distributed Hub (Federation) + +The append-only, sequence-based design enables future hub-to-hub +replication: + +``` +[Hub-EU] ←──gRPC──→ [Hub-US] + ↑ ↑ + clients clients +``` + +Each hub maintains its own sequence space. Federation maps remote +sequences to local ones. The entry UUID ensures global deduplication. + +### Federation Protocol + +- Hubs connect to each other as peers (bidirectional gRPC streams) +- Each hub assigns local sequence numbers to replicated entries +- Entry UUID prevents duplicate ingestion +- Conflict-free: append-only means no write conflicts between hubs +- Partition-tolerant: hubs queue entries during disconnection, + replay on reconnect using the same since-sequence mechanism + clients use + +### Hub Discovery + +- Manual configuration: `ctx serve --shared --peer grpcs://hub-us:9900` +- Future: DNS-based discovery or a lightweight registry + +### Consistency Model + +- **Eventual consistency** — all hubs converge to the same entry set +- No ordering guarantee across hubs (local sequence only) +- Entry UUID + timestamp allows consumers to sort globally if needed diff --git a/specs/shared-context-hub.md b/specs/shared-context-hub.md new file mode 100644 index 000000000..220acf139 --- /dev/null +++ b/specs/shared-context-hub.md @@ -0,0 +1,367 @@ +# Spec: Shared Context Hub (`ctx serve --shared` + `ctx connect`) + +Share knowledge between ctx instances across projects via a centralized +gRPC hub. Projects publish decisions, learnings, and conventions to a +shared knowledge base; other projects receive them in real-time and use +them alongside local context for better-informed agent work. + +--- + +## Problem + +Each ctx instance is an island. A team running five microservices has +five separate `.context/` directories. When project-alpha learns "gRPC +deadline must be set on client side," project-beta discovers the same +lesson independently — days or weeks later, often the hard way. + +There is no mechanism to share knowledge across projects. Copy-pasting +entries between `.context/` directories is manual, error-prone, and +doesn't scale. + +## Solution + +A **hub-and-spoke** architecture where a shared server aggregates +published entries from multiple ctx instances and streams them to +subscribers in real-time. + +``` +[project-alpha ctx] ──gRPC──→ [ctx hub :9900] ←──gRPC── [project-beta ctx] +[project-gamma ctx] ──gRPC──┘ └──gRPC── [project-delta ctx] +``` + +**Key principles:** + +1. **Append-only** — published entries are never modified or deleted +2. **Curated sharing** — each project chooses what to publish (`--share`) +3. **Local authority** — shared knowledge is informational, not imposed +4. **Explicit action** — nothing enters or leaves local context without + the user's explicit intent + +--- + +## Core Concepts + +### Entry — the unit of sharing + +Every published piece of context is an Entry: + +```go +type Entry struct { + ID string // UUID, globally unique + Type string // entry.Decision, entry.Learning, entry.Convention, entry.Task + Content string // the actual text (markdown) + Origin string // project name that published it + Author string // optional, who wrote it + Timestamp time.Time // when it was published + Sequence uint64 // monotonic, assigned by hub +} +``` + +- Entries are **append-only** — once published, never modified or deleted +- Each entry gets a **sequence number** from the hub (monotonically + increasing global counter) +- Clients track their last-seen sequence to know where to resume + +### Subscription — what a client cares about + +```go +type Subscription struct { + Types []string // e.g., ["decision", "learning"] +} +``` + +Type-based filter. The server only streams entries matching the +client's subscription. + + +--- + +## gRPC Service Definition + +### Proto Service + +```protobuf +syntax = "proto3"; +package ctx.hub.v1; + +option go_package = "github.com/ActiveMemory/ctx/internal/hub/hubpb"; + +service CtxHub { + // Auth — one-time registration + rpc Register(RegisterRequest) returns (RegisterResponse); + + // Publish entries to the hub + rpc Publish(PublishRequest) returns (PublishResponse); + + // Initial sync — pull all entries matching subscription since a sequence + rpc Sync(SyncRequest) returns (stream Entry); + + // Incremental updates — long-lived server stream + rpc Listen(ListenRequest) returns (stream Entry); + + // Query hub state + rpc Status(StatusRequest) returns (StatusResponse); +} + +message RegisterRequest { + string admin_token = 1; // admin token from server startup + string project_name = 2; // this project's identifier +} + +message RegisterResponse { + string client_id = 1; // assigned client identifier + string client_token = 2; // token for future RPCs +} + +message PublishRequest { + repeated Entry entries = 1; +} + +message PublishResponse { + repeated uint64 sequences = 1; // assigned sequence numbers +} + +message SyncRequest { + repeated string types = 1; // entry types to sync + uint64 since_sequence = 2; // 0 for full sync +} + +message ListenRequest { + repeated string types = 1; + uint64 since_sequence = 2; +} + +message StatusRequest {} + +message StatusResponse { + uint64 total_entries = 1; + uint32 connected_clients = 2; + map entries_by_type = 3; + map entries_by_project = 4; +} + +message Entry { + string id = 1; + string type = 2; + string content = 3; + string origin = 4; + string author = 5; + int64 timestamp = 6; // Unix epoch seconds + uint64 sequence = 7; +} +``` + +### Message Flow + +``` +CLIENT HUB + │ │ + │── Register(token, project) ────→ │ one-time setup + │←── RegisterResponse(client_id) ──│ + │ │ + │── Sync(types, since_seq=0) ────→ │ initial pull + │←── Entry stream ─────────────────│ all matching entries + │←── Entry stream ─────────────────│ + │←── (stream closes) ─────────────│ + │ │ + │── Listen(types, since_seq=N) ──→ │ long-lived stream + │←── Entry (when available) ───────│ real-time updates + │←── Entry (when available) ───────│ + │ ... │ + │ │ + │── Publish(entries) ────────────→ │ push local entries + │←── PublishResponse(sequences) ──│ + │ │ hub fans out to + │ │ other listeners +``` + +### Authentication + +- **Register** — client presents admin token + project name. Hub returns + a client-specific token. One-time operation. +- **All other RPCs** — client token as gRPC metadata: + `authorization: Bearer ` +- Server validates via unary/stream interceptor before any handler runs. +- TLS encryption via `--tls-cert` and `--tls-key` flags. + +--- + +## CLI Commands + +### Server Side + +```bash +# Start the shared hub +ctx serve --shared --port 9900 +ctx serve --shared --port 9900 --tls-cert cert.pem --tls-key key.pem + +# First run generates an admin token, printed to stderr: +# Hub started on :9900 +# Admin token: ctx_adm_7f3a... (save this, shown only once) +``` + +The hub stores entries in an append-only log. Storage options: + +- **v1**: Single JSONL file (`hub-data/entries.jsonl`) — simple, good + for small-to-medium deployments +- **Future**: SQLite for indexed queries and better concurrent access + +### Client Side + +```bash +# 1. Register with the hub (one-time) +ctx connect register grpcs://hub.example.com:9900 --token ctx_adm_7f3a... +# → stores encrypted config in .context/.connect.enc +# → registers this project with the hub + +# 2. Set what you want to receive +ctx connect subscribe decisions learnings +# → updates local subscription config + +# 3. Initial sync (pull all matching entries from hub) +ctx connect sync +# → streams all matching entries +# → writes to .context/shared/ +# → records last-seen sequence + +# 4. Listen for real-time updates (long-lived) +ctx connect listen +# → gRPC server-stream, writes new entries to .context/shared/ +# → reconnects automatically on disconnect +# → ctrl-c to stop + +# 5. Publish a local entry to the hub +ctx add decision "Use UTC timestamps everywhere" --share +# → adds to local DECISIONS.md AND pushes to hub + +ctx connect publish --entry decision:5 +# → pushes existing local entry #5 to hub + +ctx connect publish --new +# → pushes all entries created since last publish + +# 6. Check connection status +ctx connect status +# → server, connected, last sync, subscription, entry counts +``` + +--- + +## Local File Layout + +Shared entries land in a **separate directory**, never mixed with +local context: + +``` +.context/ +├── DECISIONS.md ← local (this project's decisions) +├── LEARNINGS.md ← local (this project's learnings) +├── CONVENTIONS.md ← local (this project's conventions) +├── shared/ ← from the hub (read-only) +│ ├── decisions.md ← shared decisions, append-only +│ ├── learnings.md ← shared learnings, append-only +│ ├── conventions.md ← shared conventions, append-only +│ └── .sync-state.json ← last sequence, subscription config +``` + +### Shared File Format + +Each shared file uses the same markdown format as local files, with +origin tags: + +```markdown +## [2026-03-14] Use UTC timestamps everywhere + +**Origin**: project-alpha + +All timestamps in APIs, databases, and logs must use UTC. +Timezone conversion happens only at the UI layer. + +--- + +## [2026-03-14] Never mock the database in integration tests + +**Origin**: project-beta + +Mocked tests passed but prod migration failed. Integration tests +must hit a real database instance. + +--- +``` + +--- + +## Agent Integration + + +### Usage + +```bash +ctx agent # local only (default, unchanged) +ctx agent --include-shared # local + shared knowledge +ctx agent --include-shared --shared-budget 3000 # custom shared budget +``` + +### How Agents Use Shared Knowledge + +No special logic needed. Context files already influence agent behavior +by being present in the context window. Shared knowledge works the +same way — it's additional context the agent reads and weighs +alongside local context. + +When the agent loads shared knowledge, it sees: + +```markdown +## Shared Knowledge (from hub) +- [decision] project-alpha: "Use UTC timestamps everywhere" +- [learning] project-beta: "Never mock the database in integration tests" +- [convention] project-alpha: "All APIs return JSON envelope format" +``` + +The agent uses these naturally when: + +- **Making decisions** → shared decisions inform cross-project consistency +- **Writing code** → shared conventions guide patterns +- **Avoiding mistakes** → shared learnings prevent repeating others' bugs + +--- + +## Connection Lifecycle + +``` +1. ctx connect register grpcs://hub:9900 --token xxx + └── one-time: registers project, stores encrypted config + +2. ctx connect subscribe decisions learnings + └── declares what entry types to receive + +3. ctx connect sync + └── pulls all matching entries from hub → .context/shared/ + +4. ctx connect listen + └── long-lived stream for incremental updates + └── auto-reconnects on disconnect + └── writes new entries to .context/shared/ as they arrive + +5. ctx add decision "..." --share + └── normal local add + publish to hub in one step +``` + +--- + +## Hub Storage + +### v1: JSONL Append-Only Log + +``` +hub-data/ +├── entries.jsonl ← one JSON object per line +├── clients.json ← registered clients and tokens +└── meta.json ← sequence counter, hub metadata +``` + +Each line in `entries.jsonl`: + +```json +{"id":"uuid","type":"decision","content":"...","origin":"project-alpha","author":"","timestamp":1710422400,"sequence":1} +``` diff --git a/specs/task-allocation.md b/specs/task-allocation.md new file mode 100644 index 000000000..d0f252596 --- /dev/null +++ b/specs/task-allocation.md @@ -0,0 +1,202 @@ +# Spec: Task Allocation Across AI Agents (`ctx fleet`) + +Route tasks to the best-fit AI agent based on context window size, +token budget, and capabilities. ctx becomes the orchestrator that +knows what work exists and which agent should handle it. + +--- + +## Problem + +A developer using Claude Code, Cline, Cursor, and Copilot has four +AI agents — each with different context windows, usage limits, and +strengths. Today each agent works in isolation. There is no way to +route tasks intelligently, track which agent is working on what, or +avoid wasting expensive credits on simple work. + +## Solution + +**ctx fleet** — a task allocation system built on the shared context +hub. One ctx instance acts as the host, others connect as agents. +The host classifies tasks, allocates them, and publishes assignments +through the hub. + +``` + ┌─────────────────────┐ + │ ctx hub (:9900) │ + │ allocation engine │ + └──────┬──────────────┘ + │ gRPC + ┌──────────────┼──────────────┐ + │ │ │ + ┌───────┴──────┐ ┌─────┴──────┐ ┌─────┴──────┐ + │ ctx (Claude) │ │ ctx (Cline)│ │ ctx (Cursor)│ + │ 1M context │ │ 200k ctx │ │ 200k ctx │ + │ 5M tokens/d │ │ 1M tokens/d│ │ 1.5M tok/d │ + └──────────────┘ └────────────┘ └────────────┘ +``` + +**Key principles:** + +1. **Hub-based** — reuses the shared context hub +2. **Automatic classification** — ctx estimates task complexity + via token estimation +3. **Human authority** — plan/dispatch split, manual override +4. **Tiered credit tracking** — real API where available, + session counting as fallback + +--- + +## Agent Registry + +Each agent is registered in `.context/fleet.yaml` with its context +window, daily token budget, tracking mode, and capabilities. + +```yaml +agents: + - id: claude-pro + tool: claude-code + context_window: 1000000 + budget: + daily_tokens: 5000000 + tracking: api # api | oauth | session + capabilities: [architecture, multi-file, testing] + + - id: cline-free + tool: cline + context_window: 200000 + budget: + daily_tokens: 1000000 + tracking: session + capabilities: [single-file, quick-fix, testing] + + - id: cursor-team + tool: cursor + context_window: 200000 + budget: + daily_tokens: 1500000 + tracking: session + capabilities: [single-file, quick-fix, boilerplate] +``` + +--- + +## Task Classification + +ctx automatically estimates the token cost of each task: + +- Parse task description for file references (explicit paths or + implicit mentions) +- Count actual tokens of referenced files on disk +- Factor in subtask count and description complexity +- Map estimate to agent context windows + +| Token Estimate | Suitable Agents | +|----------------|-----------------| +| < 50k | Any agent | +| 50k–150k | 200k+ context window | +| 150k–500k | 1M context window | +| > 500k | Split task or 1M with budget management | + +--- + +## Allocation Algorithm + +Weighted best-fit: heaviest tasks assigned first. + +- Filter agents by context window and remaining budget +- Score by: capability match (0.4) + budget remaining (0.3) + + context window headroom (0.3) +- Assign to highest-scoring agent +- Human override always available via manual assignment + +--- + +## Communication + +Uses the shared context hub ([shared-context-hub.md](shared-context-hub.md)). +Two new entry types: + +- `assignment` — host dispatches task to an agent +- `assignment-update` — agent reports status back + +``` +HOST AGENT + │── Publish(assignment) ────────→ │ receives via ctx connect listen + │ │ works on task... + │←── Publish(assignment-update) ──│ reports completion +``` + +--- + +## Credit Tracking + +### Tiered Approach + +Each provider exposes different levels of usage visibility: + +| Provider | Access | Method | +|----------|--------|--------| +| Claude (API) | Full | Usage & Cost Admin API | +| Claude (Pro/Max) | Partial | Internal OAuth endpoint | +| Cline | Depends | Uses underlying provider's API | +| Cursor | None | Dashboard only | +| Copilot | None | Subscription-based | + +ctx uses **three tracking modes**, selected per agent in fleet.yaml: + +- **`api`** — queries provider usage API for real token consumption + (Anthropic, OpenRouter) +- **`oauth`** — reads Claude Pro/Max quota from the same endpoint + Claude Code uses internally (best-effort, may break) +- **`session`** — counts tokens from session history (fallback for + Cursor, Copilot, anything without an API) + +### Error Detection + +Across all tiers, ctx also watches session logs for rate limit +signals (HTTP 429, "quota exceeded" messages, throttling gaps). +Throttled agents are excluded from allocation until next reset. + +### Budget Reset + +Daily auto-reset or manual. Configured in `.ctxrc`: + +```yaml +fleet: + credit_reset: daily +``` + +--- + +## CLI Commands + +```bash +# Setup +ctx fleet init # create fleet.yaml +ctx fleet agents # list agents with status +ctx fleet agents add / remove + +# Classification & allocation +ctx fleet classify # show task token estimates +ctx fleet plan # generate allocation (dry-run) +ctx fleet dispatch # publish assignments to hub +ctx fleet assign --agent # manual override + +# Monitoring +ctx fleet status # show assignment states +ctx fleet credits refresh # refresh from APIs/sessions +ctx fleet credits reset # reset daily budgets + +# Agent side +ctx fleet report --status # report completion/blocked/rejected +``` + +--- + +## Integration + +- **Shared hub**: primary communication channel for assignments +- **Commit tracing**: adds `agent:` to trace refs +- **Webhooks**: fleet.dispatch/complete/blocked/rejected events +- **`ctx complete`**: auto-updates fleet assignment status