diff --git a/.claude/skills/codemap/SKILL.md b/.claude/skills/codemap/SKILL.md index 674fb47..bb1f6ab 100644 --- a/.claude/skills/codemap/SKILL.md +++ b/.claude/skills/codemap/SKILL.md @@ -1,6 +1,6 @@ --- name: codemap -description: Analyze codebase structure, dependencies, and changes. Use when user asks about project structure, where code is located, how files connect, what changed, or before starting any coding task. Provides instant architectural context. +description: Analyze codebase structure, dependencies, changes, and cross-agent handoffs. Use when user asks about project structure, where code is located, how files connect, what changed, how to resume work, or before starting any coding task. --- # Codemap @@ -14,6 +14,13 @@ codemap . # Project structure and top files codemap --deps # Dependency flow (imports/functions) codemap --diff # Changes vs main branch codemap --diff --ref # Changes vs specific branch +codemap handoff . # Build + save handoff artifact +codemap handoff --latest . # Read latest saved handoff +codemap handoff --json . # Machine-readable handoff payload +codemap handoff --since 2h . # Limit timeline lookback when building +codemap handoff --prefix . # Stable prefix snapshot only +codemap handoff --delta . # Recent delta snapshot only +codemap handoff --detail a.go . # Lazy-load full detail for one changed file ``` ## When to Use @@ -39,6 +46,12 @@ codemap --diff --ref # Changes vs specific branch - Assessing what might break - Use `--ref ` when comparing against something other than main +### ALWAYS run `codemap handoff` when: +- Handing work from one agent to another (Claude, Codex, MCP client) +- Resuming work after a break and you want a compact recap +- User asks "what should the next agent know?" +- You want a durable summary in `.codemap/handoff.latest.json` + ## Output Interpretation ### Tree View (`codemap .`) @@ -58,6 +71,22 @@ codemap --diff --ref # Changes vs specific branch - `(+N -M)` = lines added/removed - Warning icons show files imported by others (impact analysis) +### Handoff (`codemap handoff`) +- layered output: `prefix` (stable hubs/context) + `delta` (recent changed-file stubs + timeline) +- changed file transport uses stubs (`path`, `hash`, `status`, `size`) for lower context cost +- `risk_files` highlights high-impact changed files when dependency context is available +- includes deterministic hashes (`prefix_hash`, `delta_hash`, `combined_hash`) and cache metrics +- `--latest` reads saved artifact without rebuilding + +## Daemon and Hooks + +- With daemon state: handoff includes richer timeline and better risk context. +- Without daemon state: handoff still works using git-based changed files. +- Hook behavior: + - `session-stop` writes `.codemap/handoff.latest.json` + - `session-start` may show recent handoff summary (24h freshness window) + - session-start structure output is capped/adaptive for large repos + ## Examples **User asks:** "Where is the authentication handled?" @@ -71,3 +100,9 @@ codemap --diff --ref # Changes vs specific branch **User asks:** "I want to refactor the utils module" **Action:** Run `codemap --deps` first to see what depends on utils before making changes. + +**User asks:** "I'm switching to another agent, what should I pass along?" +**Action:** Run `codemap handoff .` and share the summary (or `--json` for tools). + +**User asks:** "I just came back, what was in progress?" +**Action:** Run `codemap handoff --latest .` and continue from that state. diff --git a/.gitignore b/.gitignore index 3f563db..e9dea3e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,10 @@ hub-check # Runtime state .codemap/ +codemap-dev +dev_codemap +firebase-debug.log +firebalse-debug.log # Local settings .claude/settings.local.json diff --git a/README.md b/README.md index 37c454b..883ee38 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ codemap --exclude .xcassets,Fonts,.png . # Hide assets codemap --depth 2 . # Limit depth codemap --diff # What changed vs main codemap --deps . # Dependency flow +codemap handoff . # Save cross-agent handoff summary codemap github.com/user/repo # Remote GitHub repo ``` @@ -133,9 +134,52 @@ Uses a shallow clone to a temp directory (fast, no history, auto-cleanup). If yo **Hooks (Recommended)** — Automatic context at session start, before/after edits, and more. → See [docs/HOOKS.md](docs/HOOKS.md) -**MCP Server** — Deep integration with 7 tools for codebase analysis. +**MCP Server** — Deep integration with project analysis + handoff tools. → See [docs/MCP.md](docs/MCP.md) +## Multi-Agent Handoff + +codemap now supports a shared handoff artifact so you can switch between agents (Claude, Codex, MCP clients) without re-briefing. + +```bash +codemap handoff . # Build + save layered handoff artifacts +codemap handoff --latest . # Read latest saved artifact +codemap handoff --json . # Machine-readable handoff payload +codemap handoff --since 2h . # Limit timeline lookback window +codemap handoff --prefix . # Stable prefix layer only +codemap handoff --delta . # Recent delta layer only +codemap handoff --detail a.go . # Lazy-load full detail for one changed file +codemap handoff --no-save . # Build/read without writing artifacts +``` + +What it captures (layered for cache reuse): +- `prefix` (stable): hub summaries + repo file-count context +- `delta` (dynamic): changed file stubs (`path`, `hash`, `status`, `size`), risk files, recent events, next steps +- deterministic hashes: `prefix_hash`, `delta_hash`, `combined_hash` +- cache metrics: reuse ratio + unchanged bytes vs previous handoff + +Artifacts written: +- `.codemap/handoff.latest.json` (full artifact) +- `.codemap/handoff.prefix.json` (stable prefix snapshot) +- `.codemap/handoff.delta.json` (dynamic delta snapshot) +- `.codemap/handoff.metrics.log` (append-only metrics stream, one JSON line per save) + +Save defaults: +- CLI saves by default; use `--no-save` to make generation read-only. +- MCP does not save by default; set `save=true` to persist artifacts. + +Compatibility note: +- legacy top-level fields (`changed_files`, `risk_files`, etc.) are still included for compatibility and will be removed in a future schema version after migration. + +Why this matters: +- default transport is compact stubs (low context cost) +- full per-file context is lazy-loaded only when needed (`--detail` / `file=...`) +- output is deterministic and budgeted to reduce context churn across agent turns + +Hook integration: +- `session-stop` writes `.codemap/handoff.latest.json` +- `session-start` shows a compact recent handoff summary (24h freshness window) + **CLAUDE.md** — Add to your project root to teach Claude when to run codemap: ```bash cp /path/to/codemap/CLAUDE.md your-project/ @@ -147,6 +191,7 @@ cp /path/to/codemap/CLAUDE.md your-project/ - [x] Tree depth limiting (`--depth`) - [x] File filtering (`--only`, `--exclude`) - [x] Claude Code hooks & MCP server +- [x] Cross-agent handoff artifact (`.codemap/handoff.latest.json`) - [x] Remote repo support (GitHub, GitLab) - [ ] Enhanced analysis (entry points, key types) diff --git a/cmd/hooks.go b/cmd/hooks.go index da6d04e..dfa7601 100644 --- a/cmd/hooks.go +++ b/cmd/hooks.go @@ -9,10 +9,12 @@ import ( "os/exec" "path/filepath" "regexp" + "sort" "strconv" "strings" "time" + "codemap/handoff" "codemap/limits" "codemap/scanner" "codemap/watch" @@ -149,20 +151,16 @@ func hookSessionStart(root string) error { cmd.Run() output := buf.String() - const maxBytes = limits.MaxContextOutputBytes - - if len(output) > maxBytes { - // Truncate and add warning - output = output[:maxBytes] - // Find last newline to avoid cutting mid-line - if idx := strings.LastIndex(output, "\n"); idx > maxBytes-1000 { - output = output[:idx] - } + if len(output) > limits.MaxStructureOutputBytes { repoSummary := "repo size unknown" if fileCountKnown { repoSummary = fmt.Sprintf("repo has %d files", fileCount) } - output += "\n\n... (truncated - " + repoSummary + ", use `codemap .` for full tree)\n" + output = limits.TruncateAtLineBoundary( + output, + limits.MaxStructureOutputBytes, + "\n\n... (truncated - "+repoSummary+", use `codemap .` for full tree)\n", + ) } fmt.Print(output) @@ -185,13 +183,23 @@ func hookSessionStart(root string) error { fmt.Printf("ℹ️ Hub analysis skipped for large repo (%d files)\n", fileCount) } - // Show diff vs main if on a feature branch - showDiffVsMain(root, fileCount, fileCountKnown) + currentBranch, branchKnown := gitCurrentBranch(root) + recentHandoff := getRecentHandoff(root) + recentHandoffMatchesBranch := handoffMatchesBranch(recentHandoff, currentBranch, branchKnown) + hasRecentHandoffChanges := recentHandoffMatchesBranch && handoffHasChangedFiles(recentHandoff) - // Show last session context if resuming work - if len(lastSessionEvents) > 0 { + // Show diff vs main only when we do not already have a recent structured handoff. + if !hasRecentHandoffChanges { + showDiffVsMain(root, fileCount, fileCountKnown) + } + + // Show last session context only when recent handoff is unavailable/incomplete. + if len(lastSessionEvents) > 0 && !hasRecentHandoffChanges { showLastSessionContext(root, lastSessionEvents) } + if recentHandoffMatchesBranch { + showRecentHandoffSummary(recentHandoff) + } return nil } @@ -313,17 +321,83 @@ func showLastSessionContext(root string, events []string) { fmt.Println() fmt.Println("🕐 Last session worked on:") - count := 0 - for file, op := range files { - if count >= 5 { + orderedFiles := make([]string, 0, len(files)) + for file := range files { + orderedFiles = append(orderedFiles, file) + } + sort.Strings(orderedFiles) + + for i, file := range orderedFiles { + op := files[file] + if i >= 5 { fmt.Printf(" ... and %d more files\n", len(files)-5) break } fmt.Printf(" • %s (%s)\n", file, strings.ToLower(op)) - count++ } } +func getRecentHandoff(root string) *handoff.Artifact { + artifact, err := handoff.ReadLatest(root) + if err != nil || artifact == nil { + return nil + } + if time.Since(artifact.GeneratedAt) > 24*time.Hour { + return nil + } + return artifact +} + +func handoffHasChangedFiles(artifact *handoff.Artifact) bool { + if artifact == nil { + return false + } + return len(artifact.Delta.Changed) > 0 || len(artifact.ChangedFiles) > 0 +} + +func handoffMatchesBranch(artifact *handoff.Artifact, currentBranch string, branchKnown bool) bool { + if artifact == nil || !branchKnown { + return false + } + return strings.TrimSpace(artifact.Branch) == strings.TrimSpace(currentBranch) +} + +func gitCurrentBranch(root string) (string, bool) { + cmd := exec.Command("git", "rev-parse", "--abbrev-ref", "HEAD") + cmd.Dir = root + out, err := cmd.Output() + if err != nil { + return "", false + } + branch := strings.TrimSpace(string(out)) + if branch == "" { + return "", false + } + return branch, true +} + +func showRecentHandoffSummary(artifact *handoff.Artifact) { + if artifact == nil { + return + } + summary := handoff.RenderCompact(artifact, 5) + if summary == "" { + return + } + + if len(summary) > limits.MaxHandoffCompactBytes { + summary = limits.TruncateAtLineBoundary( + summary, + limits.MaxHandoffCompactBytes, + "\n ... (handoff summary truncated)\n", + ) + } + + fmt.Println() + fmt.Println("🤝 Recent handoff:") + fmt.Print(summary) +} + // startDaemon launches the watch daemon in background func startDaemon(root string) { exe, err := os.Executable() @@ -587,10 +661,69 @@ func hookSessionStop(root string) error { } } + if err := writeSessionHandoff(root, state); err == nil { + fmt.Printf("🤝 Saved handoff to .codemap/handoff.latest.json\n") + } + fmt.Println() return nil } +func writeSessionHandoff(root string, state *watch.State) error { + baseRef := resolveHandoffBaseRef(root) + artifact, err := handoff.Build(root, handoff.BuildOptions{ + State: state, + BaseRef: baseRef, + }) + if err != nil { + return err + } + return handoff.WriteLatest(root, artifact) +} + +func resolveHandoffBaseRef(root string) string { + if remoteDefault, ok := gitSymbolicRef(root, "refs/remotes/origin/HEAD"); ok && remoteDefault != "" { + if gitRefExists(root, remoteDefault) { + return remoteDefault + } + } + + for _, ref := range []string{"main", "master", "trunk", "develop"} { + if gitRefExists(root, ref) { + return ref + } + } + + for _, ref := range []string{"origin/main", "origin/master", "origin/trunk", "origin/develop"} { + if gitRefExists(root, ref) { + return ref + } + } + + // Last-resort fallback that always exists in committed repos. + return "HEAD" +} + +func gitRefExists(root, ref string) bool { + cmd := exec.Command("git", "rev-parse", "--verify", "--quiet", ref) + cmd.Dir = root + return cmd.Run() == nil +} + +func gitSymbolicRef(root, ref string) (string, bool) { + cmd := exec.Command("git", "symbolic-ref", "--quiet", "--short", ref) + cmd.Dir = root + out, err := cmd.Output() + if err != nil { + return "", false + } + value := strings.TrimSpace(string(out)) + if value == "" { + return "", false + } + return value, true +} + // stopDaemon stops the watch daemon func stopDaemon(root string) { if !watch.IsRunning(root) { diff --git a/cmd/hooks_base_ref_test.go b/cmd/hooks_base_ref_test.go new file mode 100644 index 0000000..1bd4b68 --- /dev/null +++ b/cmd/hooks_base_ref_test.go @@ -0,0 +1,63 @@ +package cmd + +import ( + "os" + "os/exec" + "path/filepath" + "testing" +) + +func runGitTestCmd(t *testing.T, dir string, args ...string) { + t.Helper() + cmd := exec.Command("git", args...) + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git %v failed: %v\n%s", args, err, string(out)) + } +} + +func makeRepoOnBranch(t *testing.T, branch string) string { + t.Helper() + root := t.TempDir() + + runGitTestCmd(t, root, "init") + + if err := os.WriteFile(filepath.Join(root, "main.go"), []byte("package main\n"), 0644); err != nil { + t.Fatal(err) + } + runGitTestCmd(t, root, "add", ".") + runGitTestCmd(t, root, "-c", "user.name=Test", "-c", "user.email=test@example.com", "commit", "-m", "init") + runGitTestCmd(t, root, "branch", "-M", branch) + + return root +} + +func TestResolveHandoffBaseRef(t *testing.T) { + if _, err := exec.LookPath("git"); err != nil { + t.Skip("git not available") + } + + t.Run("prefers main branch when present", func(t *testing.T) { + root := makeRepoOnBranch(t, "main") + got := resolveHandoffBaseRef(root) + if got != "main" { + t.Fatalf("expected base ref main, got %q", got) + } + }) + + t.Run("falls back to master when main is absent", func(t *testing.T) { + root := makeRepoOnBranch(t, "master") + got := resolveHandoffBaseRef(root) + if got != "master" { + t.Fatalf("expected base ref master, got %q", got) + } + }) + + t.Run("falls back to HEAD when no known default branch exists", func(t *testing.T) { + root := makeRepoOnBranch(t, "feature/no-default") + got := resolveHandoffBaseRef(root) + if got != "HEAD" { + t.Fatalf("expected base ref HEAD, got %q", got) + } + }) +} diff --git a/cmd/hooks_test.go b/cmd/hooks_test.go index 992f73d..0f4b68e 100644 --- a/cmd/hooks_test.go +++ b/cmd/hooks_test.go @@ -9,6 +9,8 @@ import ( "regexp" "strings" "testing" + + "codemap/handoff" ) // TestHubInfoIsHub tests the hub detection threshold (3+ importers) @@ -445,6 +447,118 @@ func TestHubInfoWithMultipleHubs(t *testing.T) { } } +func TestHandoffHasChangedFiles(t *testing.T) { + tests := []struct { + name string + artifact *handoff.Artifact + want bool + }{ + { + name: "nil artifact", + artifact: nil, + want: false, + }, + { + name: "legacy changed files", + artifact: &handoff.Artifact{ + ChangedFiles: []string{"main.go"}, + }, + want: true, + }, + { + name: "delta changed stubs", + artifact: &handoff.Artifact{ + Delta: handoff.DeltaSnapshot{ + Changed: []handoff.FileStub{{Path: "main.go"}}, + }, + }, + want: true, + }, + { + name: "no changed files", + artifact: &handoff.Artifact{ + Delta: handoff.DeltaSnapshot{ + Changed: []handoff.FileStub{}, + }, + ChangedFiles: []string{}, + }, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := handoffHasChangedFiles(tt.artifact) + if got != tt.want { + t.Fatalf("handoffHasChangedFiles() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestHandoffMatchesBranch(t *testing.T) { + tests := []struct { + name string + artifact *handoff.Artifact + currentBranch string + branchKnown bool + want bool + }{ + { + name: "nil artifact", + artifact: nil, + currentBranch: "feature/a", + branchKnown: true, + want: false, + }, + { + name: "matching branch", + artifact: &handoff.Artifact{ + Branch: "feature/a", + }, + currentBranch: "feature/a", + branchKnown: true, + want: true, + }, + { + name: "different branch", + artifact: &handoff.Artifact{ + Branch: "feature/old", + }, + currentBranch: "feature/new", + branchKnown: true, + want: false, + }, + { + name: "unknown current branch", + artifact: &handoff.Artifact{ + Branch: "feature/a", + }, + currentBranch: "", + branchKnown: false, + want: false, + }, + { + name: "trimmed whitespace matches", + artifact: &handoff.Artifact{ + Branch: " feature/a ", + }, + currentBranch: "feature/a", + branchKnown: true, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := handoffMatchesBranch(tt.artifact, tt.currentBranch, tt.branchKnown) + if got != tt.want { + t.Fatalf("handoffMatchesBranch() = %v, want %v", got, tt.want) + } + }) + } +} + // captureOutput captures stdout during function execution func captureOutput(f func()) string { old := os.Stdout diff --git a/docs/HOOKS.md b/docs/HOOKS.md index 4d402ac..11e48d6 100644 --- a/docs/HOOKS.md +++ b/docs/HOOKS.md @@ -12,7 +12,7 @@ Turn Claude into a codebase-aware assistant. These hooks give Claude automatic c | **Before editing** | Claude sees who imports the file AND what hubs it imports | | **After editing** | Claude sees the impact of what was just changed | | **Before memory clears** | Hub state is saved so Claude remembers what's important | -| **Session ends** | Timeline of all edits with line counts and hub warnings | +| **Session ends** | Timeline of all edits + saves layered handoff artifacts for next agent/session | --- @@ -166,6 +166,21 @@ Edit Timeline: 14:30:11 CREATE cmd/new_feature.go +45 Stats: 8 events, 3 files touched, +63 lines, 1 hub edits +🤝 Saved handoff to .codemap/handoff.latest.json +``` + +### Next Session Start (Handoff Resume) +If a recent handoff exists **for the current branch**, session start includes a compact resume block: +``` +🤝 Recent handoff: + Branch: feature-x + Base ref: main + Changed files: 6 + Top changes: + • cmd/hooks.go + • mcp/main.go + Risk files: + ⚠️ scanner/types.go (10 importers) ``` --- @@ -179,7 +194,22 @@ Stats: 8 events, 3 files touched, +63 lines, 1 hub edits | `codemap hook post-edit` | `PostToolUse` (Edit\|Write) | Impact of changes (same as pre-edit) | | `codemap hook prompt-submit` | `UserPromptSubmit` | Hub context for mentioned files + session progress | | `codemap hook pre-compact` | `PreCompact` | Saves hub state to .codemap/hubs.txt | -| `codemap hook session-stop` | `SessionEnd` | Edit timeline with line counts and stats | +| `codemap hook session-stop` | `SessionEnd` | Edit timeline + writes `.codemap/handoff.latest.json`, `.codemap/handoff.prefix.json`, `.codemap/handoff.delta.json` | + +--- + +## Handoff Command + +Use handoff directly when switching between agents: + +```bash +codemap handoff . # build + save handoff +codemap handoff --latest . # read latest saved handoff +codemap handoff --json . # JSON payload for tooling +codemap handoff --prefix . # stable prefix layer only +codemap handoff --delta . # dynamic delta layer only +codemap handoff --detail a.go . # lazy-load full detail for one changed file +``` --- diff --git a/docs/MCP.md b/docs/MCP.md index e629726..78357a6 100644 --- a/docs/MCP.md +++ b/docs/MCP.md @@ -56,6 +56,7 @@ Add to `~/Library/Application Support/Claude/claude_desktop_config.json`: | `get_diff` | Changed files with line counts and impact analysis | | `find_file` | Find files by name pattern | | `get_importers` | Find all files that import a specific file | +| `get_handoff` | Build/read layered handoff artifact (`prefix` + `delta`) with lazy file detail loading | ## Usage @@ -65,3 +66,26 @@ Once configured, Claude can use these tools automatically. Try asking: - "Show me the dependency flow" - "What files import utils.go?" - "What changed since the last commit?" +- "Build a handoff summary I can continue in another agent" + +## Handoff Tool Notes + +`get_handoff` supports: +- `latest=true` to read previously saved handoff artifact +- `since="2h"` and `ref="main"` to tune generation +- `json=true` for machine-readable output +- `save=true` to persist generated artifacts (`handoff.latest.json`, `handoff.prefix.json`, `handoff.delta.json`) +- `prefix=true` to return only the stable prefix snapshot +- `delta=true` to return only the recent delta snapshot +- `file="path/to/file"` to lazy-load full detail for one changed file stub + +By default, `get_handoff` does **not** write to disk unless `save=true` is set. + +Surface behavior note: +- MCP: read-only by default (`save=false`) +- CLI `codemap handoff`: save by default (`--no-save` to disable) + +Output and budget notes: +- text responses are byte-budgeted and line-truncated to protect context +- handoff payload includes deterministic hashes (`prefix_hash`, `delta_hash`, `combined_hash`) +- handoff payload includes cache metrics (`reuse_ratio`, `unchanged_bytes`, etc.) diff --git a/handoff/build.go b/handoff/build.go new file mode 100644 index 0000000..c9979d4 --- /dev/null +++ b/handoff/build.go @@ -0,0 +1,661 @@ +package handoff + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + "time" + + "codemap/limits" + "codemap/scanner" + "codemap/watch" +) + +type changedEntry struct { + Path string + Status string +} + +var changedStatusRank = map[string]int{ + "branch": 1, + "modified": 2, + "staged": 3, + "untracked": 4, + "event": 5, +} + +func normalizeOptions(opts BuildOptions, fileCount int) BuildOptions { + if opts.BaseRef == "" { + opts.BaseRef = DefaultBaseRef + } + if opts.Since <= 0 { + opts.Since = DefaultSince + } + + budget := limits.HandoffBudgetForRepo(fileCount) + if opts.MaxChanged <= 0 { + opts.MaxChanged = budget.MaxChanged + } + if opts.MaxRisk <= 0 { + opts.MaxRisk = budget.MaxRisk + } + if opts.MaxEvents <= 0 { + opts.MaxEvents = budget.MaxEvents + } + if opts.MaxHubs <= 0 { + opts.MaxHubs = max(budget.MaxRisk, 8) + } + return opts +} + +// Build creates a multi-agent handoff artifact from git + daemon state. +func Build(root string, opts BuildOptions) (*Artifact, error) { + absRoot, err := filepath.Abs(root) + if err != nil { + return nil, err + } + + state := opts.State + if state == nil { + state = watch.ReadState(absRoot) + } + + fileCount := resolveRepoFileCount(absRoot, state) + opts = normalizeOptions(opts, fileCount) + + branch, err := gitCurrentBranch(absRoot) + if err != nil { + return nil, fmt.Errorf("failed to read git branch: %w", err) + } + + entries, diffErr := collectChangedEntries(absRoot, opts.BaseRef) + if diffErr != nil { + return nil, diffErr + } + changedAll := entryPaths(entries) + + recentEvents := summarizeEvents(state, opts.Since, opts.MaxEvents) + if len(changedAll) == 0 && len(recentEvents) > 0 { + changedAll = changedFromEvents(recentEvents) + sort.Strings(changedAll) + entries = make([]changedEntry, 0, len(changedAll)) + for _, path := range changedAll { + entries = append(entries, changedEntry{Path: path, Status: "event"}) + } + } + + importers := dependencyImportersForHandoff(absRoot, state, fileCount) + riskFiles := summarizeRiskFiles(changedAll, importers, opts.MaxRisk) + selectedPaths := prioritizeChangedPaths(changedAll, riskFiles, opts.MaxChanged) + entries = selectEntries(entries, selectedPaths) + + changedStubs := buildFileStubs(absRoot, entries) + hubs := summarizeHubs(importers, opts.MaxHubs) + + nextSteps, openQuestions := deriveGuidance(selectedPaths, riskFiles, recentEvents, opts.BaseRef, state != nil, len(importers) > 0) + + prefix := PrefixSnapshot{ + FileCount: fileCount, + Hubs: nonNilHubs(hubs), + } + delta := DeltaSnapshot{ + Changed: nonNilStubs(changedStubs), + RiskFiles: nonNilRiskFiles(riskFiles), + RecentEvents: nonNilEvents(recentEvents), + NextSteps: nonNilStrings(nextSteps), + OpenQuestions: nonNilStrings(openQuestions), + } + + prefixHash, prefixBytes, err := hashCanonical(prefix) + if err != nil { + return nil, fmt.Errorf("failed to hash prefix snapshot: %w", err) + } + deltaHash, deltaBytes, err := hashCanonical(delta) + if err != nil { + return nil, fmt.Errorf("failed to hash delta snapshot: %w", err) + } + combinedHash := hashFromStrings(prefixHash, deltaHash) + + previous := opts.Previous + if previous == nil { + previous, _ = ReadLatest(absRoot) + } + metrics := buildCacheMetrics(previous, prefixHash, deltaHash, prefixBytes, deltaBytes) + generatedAt := time.Now() + if previous != nil && previous.PrefixHash == prefixHash && previous.DeltaHash == deltaHash && !previous.GeneratedAt.IsZero() { + // Preserve timestamp across identical artifacts to keep output deterministic. + generatedAt = previous.GeneratedAt + } + + return &Artifact{ + SchemaVersion: SchemaVersion, + GeneratedAt: generatedAt, + Root: absRoot, + Branch: branch, + BaseRef: opts.BaseRef, + Prefix: prefix, + Delta: delta, + PrefixHash: prefixHash, + DeltaHash: deltaHash, + CombinedHash: combinedHash, + Metrics: metrics, + + // Legacy top-level mirrors. + ChangedFiles: stubPaths(changedStubs), + RiskFiles: nonNilRiskFiles(riskFiles), + RecentEvents: nonNilEvents(recentEvents), + NextSteps: nonNilStrings(nextSteps), + OpenQuestions: nonNilStrings(openQuestions), + }, nil +} + +func collectChangedEntries(root, baseRef string) ([]changedEntry, error) { + changed := make(map[string]changedEntry) + + branchLines, branchErr := runGitLines(root, "diff", "--name-only", baseRef+"...HEAD") + for _, line := range branchLines { + addChangedEntry(changed, root, line, "branch") + } + + workingLines, _ := runGitLines(root, "diff", "--name-only") + for _, line := range workingLines { + addChangedEntry(changed, root, line, "modified") + } + + stagedLines, _ := runGitLines(root, "diff", "--name-only", "--cached") + for _, line := range stagedLines { + addChangedEntry(changed, root, line, "staged") + } + + untrackedLines, _ := runGitLines(root, "ls-files", "--others", "--exclude-standard") + for _, line := range untrackedLines { + addChangedEntry(changed, root, line, "untracked") + } + + if len(changed) == 0 && branchErr != nil { + return nil, fmt.Errorf("failed to compute changed files: %w", branchErr) + } + + result := make([]changedEntry, 0, len(changed)) + for _, entry := range changed { + result = append(result, entry) + } + sort.Slice(result, func(i, j int) bool { + return result[i].Path < result[j].Path + }) + return result, nil +} + +func addChangedEntry(changed map[string]changedEntry, root, path, status string) { + normalized := filepath.ToSlash(strings.TrimSpace(path)) + if normalized == "" || !includeChangedPath(root, normalized) { + return + } + + previous, ok := changed[normalized] + if !ok || changedStatusRank[status] > changedStatusRank[previous.Status] { + changed[normalized] = changedEntry{Path: normalized, Status: status} + } +} + +func includeChangedPath(root, path string) bool { + normalized := filepath.ToSlash(strings.TrimSpace(path)) + if normalized == "" { + return false + } + + // Ignore tool/build/vendor directories. + parts := strings.Split(normalized, "/") + for _, p := range parts { + switch p { + case ".git", ".codemap", "node_modules", "vendor", "dist", "build", "target", "__pycache__", ".next", ".nuxt": + return false + } + } + + ext := strings.ToLower(filepath.Ext(normalized)) + switch ext { + case ".exe", ".dll", ".bin", ".o", ".a", ".so", ".dylib", ".wasm", ".class", ".jar", ".zip", ".tar", ".gz", ".7z", + ".png", ".jpg", ".jpeg", ".gif", ".webp", ".ico", ".bmp", ".tiff", ".mp3", ".wav", ".ogg", ".mp4", ".mov", ".avi", + ".log", ".out", ".pdf", ".ttf", ".otf", ".woff", ".woff2": + return false + } + + // Keep extensionless or uncommon files unless they appear binary. + return !isLikelyBinary(root, normalized) +} + +func isLikelyBinary(root, relPath string) bool { + abs := filepath.Join(root, filepath.FromSlash(relPath)) + info, err := os.Stat(abs) + if err != nil || info.IsDir() { + return false + } + + f, err := os.Open(abs) + if err != nil { + return false + } + defer f.Close() + + buf := make([]byte, 2048) + n, err := f.Read(buf) + if err != nil || n == 0 { + return false + } + return bytes.IndexByte(buf[:n], 0) >= 0 +} + +func buildFileStubs(root string, changed []changedEntry) []FileStub { + if len(changed) == 0 { + return []FileStub{} + } + + stubs := make([]FileStub, 0, len(changed)) + for _, entry := range changed { + stub := FileStub{ + Path: entry.Path, + Status: entry.Status, + } + + absPath := filepath.Join(root, filepath.FromSlash(entry.Path)) + info, err := os.Stat(absPath) + if err == nil && !info.IsDir() { + stub.Size = info.Size() + stub.Hash = fileSHA256(absPath) + } + stubs = append(stubs, stub) + } + return stubs +} + +func fileSHA256(path string) string { + f, err := os.Open(path) + if err != nil { + return "" + } + defer f.Close() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return "" + } + return hex.EncodeToString(h.Sum(nil)) +} + +func summarizeHubs(importersByFile map[string][]string, maxHubs int) []HubSummary { + if len(importersByFile) == 0 { + return []HubSummary{} + } + + hubs := make([]HubSummary, 0, len(importersByFile)) + for path, importers := range importersByFile { + if len(importers) < 3 { + continue + } + hubs = append(hubs, HubSummary{ + Path: path, + Importers: len(importers), + }) + } + + sort.Slice(hubs, func(i, j int) bool { + if hubs[i].Importers != hubs[j].Importers { + return hubs[i].Importers > hubs[j].Importers + } + return hubs[i].Path < hubs[j].Path + }) + + if maxHubs > 0 && len(hubs) > maxHubs { + hubs = hubs[:maxHubs] + } + return hubs +} + +func summarizeRiskFiles(changed []string, importersByFile map[string][]string, maxRisk int) []RiskFile { + if len(importersByFile) == 0 { + return []RiskFile{} + } + + risk := make([]RiskFile, 0, len(changed)) + for _, file := range changed { + importers := len(importersByFile[file]) + if importers < 2 { + continue + } + + isHub := importers >= 3 + reason := fmt.Sprintf("imported by %d files", importers) + if isHub { + reason = fmt.Sprintf("hub file imported by %d files", importers) + } + risk = append(risk, RiskFile{ + Path: file, + Importers: importers, + IsHub: isHub, + Reason: reason, + }) + } + + sort.Slice(risk, func(i, j int) bool { + if risk[i].Importers != risk[j].Importers { + return risk[i].Importers > risk[j].Importers + } + return risk[i].Path < risk[j].Path + }) + + if len(risk) > maxRisk { + risk = risk[:maxRisk] + } + return risk +} + +func summarizeEvents(state *watch.State, since time.Duration, maxEvents int) []EventSummary { + if state == nil || len(state.RecentEvents) == 0 { + return []EventSummary{} + } + + cutoff := time.Now().Add(-since) + result := make([]EventSummary, 0, len(state.RecentEvents)) + for _, e := range state.RecentEvents { + if e.Time.Before(cutoff) { + continue + } + result = append(result, EventSummary{ + Time: e.Time, + Op: e.Op, + Path: e.Path, + Delta: e.Delta, + IsHub: e.IsHub, + }) + } + + sort.Slice(result, func(i, j int) bool { + if !result[i].Time.Equal(result[j].Time) { + return result[i].Time.Before(result[j].Time) + } + if result[i].Path != result[j].Path { + return result[i].Path < result[j].Path + } + return result[i].Op < result[j].Op + }) + + if len(result) > maxEvents { + result = result[len(result)-maxEvents:] + } + return result +} + +func changedFromEvents(events []EventSummary) []string { + if len(events) == 0 { + return []string{} + } + seen := make(map[string]struct{}) + for _, e := range events { + seen[e.Path] = struct{}{} + } + changed := make([]string, 0, len(seen)) + for path := range seen { + changed = append(changed, path) + } + sort.Strings(changed) + return changed +} + +func deriveGuidance(changed []string, risk []RiskFile, events []EventSummary, baseRef string, hasState bool, hasDependencyContext bool) ([]string, []string) { + nextSteps := make([]string, 0, 2) + openQuestions := make([]string, 0, 3) + + if len(changed) == 0 { + openQuestions = append(openQuestions, fmt.Sprintf("No changed files detected vs %s. Confirm the base ref and branch state.", baseRef)) + } + + if len(risk) > 0 { + nextSteps = append(nextSteps, "Review downstream dependents for high-impact files before merge.") + } + if len(changed) > 0 { + nextSteps = append(nextSteps, "Run tests covering changed files before handoff.") + } + + if !hasState { + openQuestions = append(openQuestions, "Live watch state was unavailable; timeline may be incomplete.") + } + if !hasDependencyContext { + openQuestions = append(openQuestions, "Dependency graph context was unavailable; risk files may be incomplete.") + } + if len(events) == 0 && hasState { + openQuestions = append(openQuestions, "No recent timeline events matched the lookback window.") + } + + return nextSteps, openQuestions +} + +func prioritizeChangedPaths(changed []string, risk []RiskFile, maxChanged int) []string { + if len(changed) <= maxChanged { + return nonNilStrings(changed) + } + + available := make(map[string]struct{}, len(changed)) + for _, path := range changed { + available[path] = struct{}{} + } + + out := make([]string, 0, maxChanged) + seen := make(map[string]struct{}, maxChanged) + for _, r := range risk { + if _, ok := available[r.Path]; !ok { + continue + } + if _, ok := seen[r.Path]; ok { + continue + } + out = append(out, r.Path) + seen[r.Path] = struct{}{} + if len(out) >= maxChanged { + return out + } + } + + for _, path := range changed { + if _, ok := seen[path]; ok { + continue + } + out = append(out, path) + if len(out) >= maxChanged { + break + } + } + return out +} + +func selectEntries(entries []changedEntry, selectedPaths []string) []changedEntry { + if len(selectedPaths) == 0 { + return []changedEntry{} + } + byPath := make(map[string]changedEntry, len(entries)) + for _, entry := range entries { + byPath[entry.Path] = entry + } + + selected := make([]changedEntry, 0, len(selectedPaths)) + for _, path := range selectedPaths { + if entry, ok := byPath[path]; ok { + selected = append(selected, entry) + } else { + selected = append(selected, changedEntry{Path: path, Status: "event"}) + } + } + return selected +} + +func entryPaths(entries []changedEntry) []string { + if len(entries) == 0 { + return []string{} + } + paths := make([]string, 0, len(entries)) + for _, entry := range entries { + paths = append(paths, entry.Path) + } + return paths +} + +func stubPaths(stubs []FileStub) []string { + if len(stubs) == 0 { + return []string{} + } + paths := make([]string, 0, len(stubs)) + for _, stub := range stubs { + paths = append(paths, stub.Path) + } + return paths +} + +func hashCanonical(v any) (string, int, error) { + data, err := json.Marshal(v) + if err != nil { + return "", 0, err + } + sum := sha256.Sum256(data) + return hex.EncodeToString(sum[:]), len(data), nil +} + +func hashFromStrings(parts ...string) string { + h := sha256.New() + for _, part := range parts { + _, _ = h.Write([]byte(part)) + _, _ = h.Write([]byte{':'}) + } + return hex.EncodeToString(h.Sum(nil)) +} + +func buildCacheMetrics(previous *Artifact, prefixHash, deltaHash string, prefixBytes, deltaBytes int) CacheMetrics { + totalBytes := prefixBytes + deltaBytes + metrics := CacheMetrics{ + PrefixBytes: prefixBytes, + DeltaBytes: deltaBytes, + TotalBytes: totalBytes, + } + if previous == nil { + return metrics + } + + metrics.PreviousCombinedHash = previous.CombinedHash + if previous.PrefixHash == prefixHash && prefixHash != "" { + metrics.PrefixReused = true + metrics.UnchangedBytes += prefixBytes + } + if previous.DeltaHash == deltaHash && deltaHash != "" { + metrics.DeltaReused = true + metrics.UnchangedBytes += deltaBytes + } + if totalBytes > 0 { + metrics.ReuseRatio = float64(metrics.UnchangedBytes) / float64(totalBytes) + } + return metrics +} + +func runGitLines(root string, args ...string) ([]string, error) { + cmd := exec.Command("git", args...) + cmd.Dir = root + out, err := cmd.Output() + if err != nil { + return nil, err + } + + raw := strings.Split(strings.TrimSpace(string(out)), "\n") + if len(raw) == 1 && raw[0] == "" { + return nil, nil + } + + lines := make([]string, 0, len(raw)) + for _, line := range raw { + line = strings.TrimSpace(line) + if line != "" { + lines = append(lines, line) + } + } + return lines, nil +} + +func gitCurrentBranch(root string) (string, error) { + cmd := exec.Command("git", "rev-parse", "--abbrev-ref", "HEAD") + cmd.Dir = root + out, err := cmd.Output() + if err != nil { + return "", err + } + return strings.TrimSpace(string(out)), nil +} + +func resolveRepoFileCount(root string, state *watch.State) int { + if state != nil && state.FileCount > 0 { + return state.FileCount + } + + gitCache := scanner.NewGitIgnoreCache(root) + files, err := scanner.ScanFiles(root, gitCache, nil, nil) + if err != nil { + return 0 + } + return len(files) +} + +func dependencyImportersForHandoff(root string, state *watch.State, fileCount int) map[string][]string { + if state != nil && len(state.Importers) > 0 { + return state.Importers + } + + // Reuse daemon file count when available to avoid an extra scan. + if fileCount > limits.LargeRepoFileCount { + return nil + } + + fg, err := scanner.BuildFileGraph(root) + if err != nil { + return nil + } + return fg.Importers +} + +func nonNilStrings(items []string) []string { + if items == nil { + return []string{} + } + return items +} + +func nonNilRiskFiles(items []RiskFile) []RiskFile { + if items == nil { + return []RiskFile{} + } + return items +} + +func nonNilEvents(items []EventSummary) []EventSummary { + if items == nil { + return []EventSummary{} + } + return items +} + +func nonNilStubs(items []FileStub) []FileStub { + if items == nil { + return []FileStub{} + } + return items +} + +func nonNilHubs(items []HubSummary) []HubSummary { + if items == nil { + return []HubSummary{} + } + return items +} diff --git a/handoff/detail.go b/handoff/detail.go new file mode 100644 index 0000000..90baf92 --- /dev/null +++ b/handoff/detail.go @@ -0,0 +1,101 @@ +package handoff + +import ( + "fmt" + "path/filepath" + "sort" + "strings" + + "codemap/limits" + "codemap/scanner" + "codemap/watch" +) + +// BuildFileDetail resolves detailed context for one changed file stub. +func BuildFileDetail(root string, artifact *Artifact, targetPath string, state *watch.State) (*FileDetail, error) { + if artifact == nil { + return nil, fmt.Errorf("handoff artifact is nil") + } + normalizeArtifact(artifact) + + target := filepath.ToSlash(strings.TrimSpace(targetPath)) + if target == "" { + return nil, fmt.Errorf("file path is required") + } + + var selected *FileStub + for i := range artifact.Delta.Changed { + if artifact.Delta.Changed[i].Path == target { + selected = &artifact.Delta.Changed[i] + break + } + } + if selected == nil { + return nil, fmt.Errorf("file %q was not found in current handoff delta", target) + } + + absRoot, err := filepath.Abs(root) + if err != nil { + return nil, err + } + if state == nil { + state = watch.ReadState(absRoot) + } + + importers, imports := dependencyContextForFile(absRoot, state, target) + importers = uniqueSorted(importers) + imports = uniqueSorted(imports) + + events := make([]EventSummary, 0, len(artifact.Delta.RecentEvents)) + for _, event := range artifact.Delta.RecentEvents { + if event.Path == target { + events = append(events, event) + } + } + + return &FileDetail{ + Path: selected.Path, + Hash: selected.Hash, + Size: selected.Size, + Status: selected.Status, + Importers: importers, + Imports: imports, + RecentEvents: events, + IsHub: len(importers) >= 3, + }, nil +} + +func dependencyContextForFile(root string, state *watch.State, path string) ([]string, []string) { + if state != nil && (len(state.Importers) > 0 || len(state.Imports) > 0) { + return append([]string{}, state.Importers[path]...), append([]string{}, state.Imports[path]...) + } + + if state != nil && state.FileCount > limits.LargeRepoFileCount { + return nil, nil + } + + fg, err := scanner.BuildFileGraph(root) + if err != nil { + return nil, nil + } + return append([]string{}, fg.Importers[path]...), append([]string{}, fg.Imports[path]...) +} + +func uniqueSorted(items []string) []string { + if len(items) == 0 { + return []string{} + } + seen := make(map[string]struct{}, len(items)) + for _, item := range items { + if item == "" { + continue + } + seen[item] = struct{}{} + } + out := make([]string, 0, len(seen)) + for item := range seen { + out = append(out, item) + } + sort.Strings(out) + return out +} diff --git a/handoff/handoff_test.go b/handoff/handoff_test.go new file mode 100644 index 0000000..c6825a4 --- /dev/null +++ b/handoff/handoff_test.go @@ -0,0 +1,333 @@ +package handoff + +import ( + "encoding/json" + "os" + "os/exec" + "path/filepath" + "strings" + "testing" + "time" + + "codemap/watch" +) + +func runCmd(t *testing.T, dir, name string, args ...string) { + t.Helper() + cmd := exec.Command(name, args...) + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("%s %v failed: %v\n%s", name, args, err, string(out)) + } +} + +func contains(items []string, value string) bool { + for _, item := range items { + if item == value { + return true + } + } + return false +} + +func TestBuildWriteRead(t *testing.T) { + root := t.TempDir() + + runCmd(t, root, "git", "init") + + if err := os.WriteFile(filepath.Join(root, "a.go"), []byte("package main\n\nfunc A() {}\n"), 0644); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(root, "b.go"), []byte("package main\n\nfunc B() {}\n"), 0644); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(root, "go.mod"), []byte("module example\n\ngo 1.24\n"), 0644); err != nil { + t.Fatal(err) + } + + runCmd(t, root, "git", "add", ".") + runCmd(t, root, "git", "-c", "user.name=Test", "-c", "user.email=test@example.com", "commit", "-m", "init") + + // Local modification to show up in handoff changed files. + if err := os.WriteFile(filepath.Join(root, "a.go"), []byte("package main\n\nfunc A() int { return 1 }\n"), 0644); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(root, "go.mod"), []byte("module example\n\ngo 1.25\n"), 0644); err != nil { + t.Fatal(err) + } + // Noise file should not be included in changed files handoff output. + if err := os.WriteFile(filepath.Join(root, "firebase-debug.log"), []byte("debug noise\n"), 0644); err != nil { + t.Fatal(err) + } + // Binary noise file (no extension) should also be excluded. + if err := os.WriteFile(filepath.Join(root, "codemap-dev"), []byte{0xCA, 0xFE, 0xBA, 0xBE, 0x00}, 0755); err != nil { + t.Fatal(err) + } + + state := &watch.State{ + Importers: map[string][]string{ + "a.go": {"x.go", "y.go", "z.go"}, + }, + RecentEvents: []watch.Event{ + { + Time: time.Now().Add(-20 * time.Minute), + Op: "WRITE", + Path: "a.go", + Delta: 3, + IsHub: true, + }, + }, + } + + artifact, err := Build(root, BuildOptions{ + BaseRef: "HEAD", + Since: time.Hour, + State: state, + }) + if err != nil { + t.Fatalf("Build failed: %v", err) + } + + if artifact.SchemaVersion != SchemaVersion { + t.Fatalf("expected schema version %d, got %d", SchemaVersion, artifact.SchemaVersion) + } + if !contains(artifact.ChangedFiles, "a.go") { + t.Fatalf("expected changed file a.go in %+v", artifact.ChangedFiles) + } + if !contains(artifact.ChangedFiles, "go.mod") { + t.Fatalf("expected config file go.mod in changed files: %+v", artifact.ChangedFiles) + } + if contains(artifact.ChangedFiles, "firebase-debug.log") { + t.Fatalf("did not expect log noise file in changed files: %+v", artifact.ChangedFiles) + } + if contains(artifact.ChangedFiles, "codemap-dev") { + t.Fatalf("did not expect binary noise file in changed files: %+v", artifact.ChangedFiles) + } + if len(artifact.RiskFiles) == 0 { + t.Fatalf("expected risk files in artifact") + } + if artifact.RiskFiles[0].Path != "a.go" { + t.Fatalf("expected first risk file to be a.go, got %s", artifact.RiskFiles[0].Path) + } + if artifact.PrefixHash == "" || artifact.DeltaHash == "" || artifact.CombinedHash == "" { + t.Fatalf("expected non-empty hashes, got prefix=%q delta=%q combined=%q", artifact.PrefixHash, artifact.DeltaHash, artifact.CombinedHash) + } + if len(artifact.Prefix.Hubs) == 0 { + t.Fatalf("expected prefix hubs to be populated") + } + if len(artifact.Delta.Changed) == 0 { + t.Fatalf("expected delta changed stubs") + } + + if err := WriteLatest(root, artifact); err != nil { + t.Fatalf("WriteLatest failed: %v", err) + } + if _, err := os.Stat(PrefixPath(root)); err != nil { + t.Fatalf("expected prefix snapshot file: %v", err) + } + if _, err := os.Stat(DeltaPath(root)); err != nil { + t.Fatalf("expected delta snapshot file: %v", err) + } + if _, err := os.Stat(MetricsPath(root)); err != nil { + t.Fatalf("expected metrics log file: %v", err) + } + + // Validate split snapshots are parseable JSON. + var prefix PrefixSnapshot + if data, err := os.ReadFile(PrefixPath(root)); err != nil { + t.Fatalf("read prefix snapshot failed: %v", err) + } else if err := json.Unmarshal(data, &prefix); err != nil { + t.Fatalf("parse prefix snapshot failed: %v", err) + } + if len(prefix.Hubs) == 0 { + t.Fatalf("expected hubs in persisted prefix snapshot") + } + + readBack, err := ReadLatest(root) + if err != nil { + t.Fatalf("ReadLatest failed: %v", err) + } + if readBack == nil { + t.Fatalf("expected artifact from ReadLatest") + } + if !contains(readBack.ChangedFiles, "a.go") { + t.Fatalf("expected read-back changed file a.go in %+v", readBack.ChangedFiles) + } + if len(readBack.Delta.Changed) == 0 { + t.Fatalf("expected read-back delta stubs") + } + if readBack.Metrics.TotalBytes == 0 { + t.Fatalf("expected cache metrics to be populated") + } +} + +func TestBuildReturnsNonNilSlicesWithoutState(t *testing.T) { + root := t.TempDir() + runCmd(t, root, "git", "init") + if err := os.WriteFile(filepath.Join(root, "main.go"), []byte("package main\n"), 0644); err != nil { + t.Fatal(err) + } + runCmd(t, root, "git", "add", ".") + runCmd(t, root, "git", "-c", "user.name=Test", "-c", "user.email=test@example.com", "commit", "-m", "init") + + artifact, err := Build(root, BuildOptions{ + BaseRef: "HEAD", + Since: time.Hour, + State: nil, + }) + if err != nil { + t.Fatalf("Build failed: %v", err) + } + + if artifact.ChangedFiles == nil { + t.Fatal("ChangedFiles should be non-nil") + } + if artifact.RiskFiles == nil { + t.Fatal("RiskFiles should be non-nil") + } + if artifact.RecentEvents == nil { + t.Fatal("RecentEvents should be non-nil") + } + if artifact.NextSteps == nil { + t.Fatal("NextSteps should be non-nil") + } + if artifact.OpenQuestions == nil { + t.Fatal("OpenQuestions should be non-nil") + } + if artifact.Prefix.FileCount == 0 { + t.Fatal("Prefix.FileCount should be populated when daemon state is unavailable") + } +} + +func TestReadLatestMissing(t *testing.T) { + root := t.TempDir() + got, err := ReadLatest(root) + if err != nil { + t.Fatalf("expected nil error for missing file, got %v", err) + } + if got != nil { + t.Fatalf("expected nil artifact when file missing") + } +} + +func TestRenderMarkdown(t *testing.T) { + a := &Artifact{ + Branch: "feature/test", + BaseRef: "main", + GeneratedAt: time.Now(), + PrefixHash: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + DeltaHash: "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + Metrics: CacheMetrics{ReuseRatio: 0.5, UnchangedBytes: 50, TotalBytes: 100}, + Delta: DeltaSnapshot{ + Changed: []FileStub{{Path: "a.go", Status: "modified"}}, + RiskFiles: []RiskFile{ + {Path: "a.go", Importers: 3, IsHub: true}, + }, + RecentEvents: []EventSummary{ + {Time: time.Now(), Op: "WRITE", Path: "a.go", Delta: 2}, + }, + NextSteps: []string{"Run tests"}, + }, + } + + md := RenderMarkdown(a) + if !strings.Contains(md, "Handoff") || !strings.Contains(md, "a.go") || !strings.Contains(md, "Prefix (Stable Context)") { + t.Fatalf("markdown render missing expected content: %s", md) + } + if strings.Contains(md, "Prefix hash:") || strings.Contains(md, "Cache reuse:") { + t.Fatalf("markdown output should hide cache telemetry details: %s", md) + } +} + +func TestRenderCompactDeterministic(t *testing.T) { + base := &Artifact{ + Branch: "feature/test", + BaseRef: "main", + GeneratedAt: time.Now(), + Delta: DeltaSnapshot{ + Changed: []FileStub{{Path: "a.go", Status: "modified"}}, + }, + } + other := *base + other.GeneratedAt = base.GeneratedAt.Add(45 * time.Minute) + + out1 := RenderCompact(base, 5) + out2 := RenderCompact(&other, 5) + if out1 != out2 { + t.Fatalf("compact output should be deterministic across generated_at changes:\n%s\n---\n%s", out1, out2) + } +} + +func TestBuildFileDetail(t *testing.T) { + root := t.TempDir() + runCmd(t, root, "git", "init") + if err := os.WriteFile(filepath.Join(root, "a.go"), []byte("package main\n"), 0644); err != nil { + t.Fatal(err) + } + runCmd(t, root, "git", "add", ".") + runCmd(t, root, "git", "-c", "user.name=Test", "-c", "user.email=test@example.com", "commit", "-m", "init") + if err := os.WriteFile(filepath.Join(root, "a.go"), []byte("package main\n\n// changed\n"), 0644); err != nil { + t.Fatal(err) + } + + state := &watch.State{ + Importers: map[string][]string{ + "a.go": {"x.go", "y.go", "z.go"}, + }, + Imports: map[string][]string{ + "a.go": {"dep.go"}, + }, + RecentEvents: []watch.Event{ + {Time: time.Now(), Op: "WRITE", Path: "a.go", Delta: 2, IsHub: true}, + }, + } + + artifact, err := Build(root, BuildOptions{BaseRef: "HEAD", State: state}) + if err != nil { + t.Fatalf("build failed: %v", err) + } + detail, err := BuildFileDetail(root, artifact, "a.go", state) + if err != nil { + t.Fatalf("BuildFileDetail failed: %v", err) + } + if detail.Path != "a.go" { + t.Fatalf("expected path a.go, got %s", detail.Path) + } + if !detail.IsHub { + t.Fatalf("expected a.go to be marked as hub detail") + } + if len(detail.Importers) != 3 { + t.Fatalf("expected 3 importers, got %d", len(detail.Importers)) + } +} + +func TestMetricsLogCapped(t *testing.T) { + root := t.TempDir() + if err := os.MkdirAll(filepath.Dir(MetricsPath(root)), 0755); err != nil { + t.Fatalf("mkdir .codemap failed: %v", err) + } + + artifact := &Artifact{ + GeneratedAt: time.Now(), + Branch: "feature/test", + BaseRef: "main", + PrefixHash: "p", + DeltaHash: "d", + CombinedHash: "c", + } + + for i := 0; i < maxMetricsLines+50; i++ { + if err := appendMetrics(root, artifact); err != nil { + t.Fatalf("appendMetrics failed: %v", err) + } + } + + data, err := os.ReadFile(MetricsPath(root)) + if err != nil { + t.Fatalf("read metrics failed: %v", err) + } + lines := strings.Split(strings.TrimSpace(string(data)), "\n") + if len(lines) != maxMetricsLines { + t.Fatalf("expected %d metrics lines after cap, got %d", maxMetricsLines, len(lines)) + } +} diff --git a/handoff/render.go b/handoff/render.go new file mode 100644 index 0000000..1e6df99 --- /dev/null +++ b/handoff/render.go @@ -0,0 +1,237 @@ +package handoff + +import ( + "fmt" + "strings" +) + +// RenderMarkdown returns a markdown handoff summary suitable for chat context. +// Output is deterministic for the same artifact content. +func RenderMarkdown(a *Artifact) string { + if a == nil { + return "" + } + normalizeArtifact(a) + + var b strings.Builder + b.WriteString(fmt.Sprintf("## Handoff (%s)\n", a.Branch)) + b.WriteString(fmt.Sprintf("Base ref: `%s`\n", a.BaseRef)) + + b.WriteString("\n### Prefix (Stable Context)\n") + if a.Prefix.FileCount > 0 { + b.WriteString(fmt.Sprintf("- File count: %d\n", a.Prefix.FileCount)) + } + if len(a.Prefix.Hubs) == 0 { + b.WriteString("- Hub files: none\n") + } else { + b.WriteString(fmt.Sprintf("- Hub files: %d\n", len(a.Prefix.Hubs))) + for i, hub := range a.Prefix.Hubs { + if i >= 15 { + b.WriteString(fmt.Sprintf("- ... and %d more\n", len(a.Prefix.Hubs)-15)) + break + } + b.WriteString(fmt.Sprintf("- `%s` (%d importers)\n", hub.Path, hub.Importers)) + } + } + + b.WriteString("\n### Delta (Recent Work)\n") + if len(a.Delta.Changed) == 0 { + b.WriteString("- Changed files: none detected\n") + } else { + b.WriteString(fmt.Sprintf("- Changed files: %d\n", len(a.Delta.Changed))) + for i, stub := range a.Delta.Changed { + if i >= 20 { + b.WriteString(fmt.Sprintf("- ... and %d more\n", len(a.Delta.Changed)-20)) + break + } + status := stub.Status + if status == "" { + status = "changed" + } + b.WriteString(fmt.Sprintf("- `%s` (%s)\n", stub.Path, status)) + } + } + + b.WriteString("\n### Risk Files\n") + if len(a.Delta.RiskFiles) == 0 { + b.WriteString("- None flagged\n") + } else { + for _, r := range a.Delta.RiskFiles { + hub := "" + if r.IsHub { + hub = " [HUB]" + } + b.WriteString(fmt.Sprintf("- `%s` (%d importers)%s\n", r.Path, r.Importers, hub)) + } + } + + b.WriteString("\n### Recent Timeline\n") + if len(a.Delta.RecentEvents) == 0 { + b.WriteString("- No recent events captured\n") + } else { + for _, e := range a.Delta.RecentEvents { + delta := "" + if e.Delta > 0 { + delta = fmt.Sprintf(" (+%d)", e.Delta) + } else if e.Delta < 0 { + delta = fmt.Sprintf(" (%d)", e.Delta) + } + hub := "" + if e.IsHub { + hub = " [HUB]" + } + b.WriteString(fmt.Sprintf("- %s `%s` `%s`%s%s\n", e.Time.Format("15:04:05"), e.Op, e.Path, delta, hub)) + } + } + + if len(a.Delta.NextSteps) > 0 { + b.WriteString("\n### Next Steps\n") + for _, step := range a.Delta.NextSteps { + b.WriteString(fmt.Sprintf("- %s\n", step)) + } + } + if len(a.Delta.OpenQuestions) > 0 { + b.WriteString("\n### Open Questions\n") + for _, q := range a.Delta.OpenQuestions { + b.WriteString(fmt.Sprintf("- %s\n", q)) + } + } + + return b.String() +} + +// RenderPrefixMarkdown renders only the stable prefix layer. +func RenderPrefixMarkdown(p PrefixSnapshot) string { + var b strings.Builder + b.WriteString("## Handoff Prefix\n") + if p.FileCount > 0 { + b.WriteString(fmt.Sprintf("- File count: %d\n", p.FileCount)) + } + if len(p.Hubs) == 0 { + b.WriteString("- Hub files: none\n") + return b.String() + } + b.WriteString(fmt.Sprintf("- Hub files: %d\n", len(p.Hubs))) + for _, hub := range p.Hubs { + b.WriteString(fmt.Sprintf("- `%s` (%d importers)\n", hub.Path, hub.Importers)) + } + return b.String() +} + +// RenderDeltaMarkdown renders only the delta layer. +func RenderDeltaMarkdown(d DeltaSnapshot) string { + var b strings.Builder + b.WriteString("## Handoff Delta\n") + if len(d.Changed) == 0 { + b.WriteString("- Changed files: none\n") + } else { + b.WriteString(fmt.Sprintf("- Changed files: %d\n", len(d.Changed))) + for _, stub := range d.Changed { + status := stub.Status + if status == "" { + status = "changed" + } + b.WriteString(fmt.Sprintf("- `%s` (%s)\n", stub.Path, status)) + } + } + + if len(d.RiskFiles) > 0 { + b.WriteString("\n### Risk Files\n") + for _, r := range d.RiskFiles { + b.WriteString(fmt.Sprintf("- `%s` (%d importers)\n", r.Path, r.Importers)) + } + } + return b.String() +} + +// RenderFileDetailMarkdown renders lazy-loaded detail for one file stub. +func RenderFileDetailMarkdown(d *FileDetail) string { + if d == nil { + return "" + } + + var b strings.Builder + b.WriteString(fmt.Sprintf("## Handoff File Detail: `%s`\n", d.Path)) + if d.Status != "" { + b.WriteString(fmt.Sprintf("- Status: %s\n", d.Status)) + } + if d.Hash != "" { + b.WriteString(fmt.Sprintf("- Hash: `%s`\n", d.Hash)) + } + if d.Size > 0 { + b.WriteString(fmt.Sprintf("- Size: %d bytes\n", d.Size)) + } + if d.IsHub { + b.WriteString("- Hub: yes\n") + } + + b.WriteString("\n### Importers\n") + if len(d.Importers) == 0 { + b.WriteString("- none\n") + } else { + for _, importer := range d.Importers { + b.WriteString(fmt.Sprintf("- `%s`\n", importer)) + } + } + + b.WriteString("\n### Imports\n") + if len(d.Imports) == 0 { + b.WriteString("- none\n") + } else { + for _, imp := range d.Imports { + b.WriteString(fmt.Sprintf("- `%s`\n", imp)) + } + } + + if len(d.RecentEvents) > 0 { + b.WriteString("\n### Recent Events\n") + for _, e := range d.RecentEvents { + b.WriteString(fmt.Sprintf("- %s `%s` (%d)\n", e.Time.Format("15:04:05"), e.Op, e.Delta)) + } + } + return b.String() +} + +// RenderCompact produces a short plain-text summary for session-start hooks. +func RenderCompact(a *Artifact, maxItems int) string { + if a == nil { + return "" + } + normalizeArtifact(a) + if maxItems <= 0 { + maxItems = 5 + } + + var b strings.Builder + b.WriteString(fmt.Sprintf(" Branch: %s\n", a.Branch)) + b.WriteString(fmt.Sprintf(" Base ref: %s\n", a.BaseRef)) + b.WriteString(fmt.Sprintf(" Changed files: %d\n", len(a.Delta.Changed))) + + if len(a.Delta.Changed) > 0 { + b.WriteString(" Top changes:\n") + for i, stub := range a.Delta.Changed { + if i >= maxItems { + b.WriteString(fmt.Sprintf(" ... and %d more\n", len(a.Delta.Changed)-maxItems)) + break + } + status := stub.Status + if status == "" { + status = "changed" + } + b.WriteString(fmt.Sprintf(" • %s (%s)\n", stub.Path, status)) + } + } + + if len(a.Delta.RiskFiles) > 0 { + b.WriteString(" Risk files:\n") + for i, r := range a.Delta.RiskFiles { + if i >= maxItems { + b.WriteString(fmt.Sprintf(" ... and %d more\n", len(a.Delta.RiskFiles)-maxItems)) + break + } + b.WriteString(fmt.Sprintf(" ⚠️ %s (%d importers)\n", r.Path, r.Importers)) + } + } + + return b.String() +} diff --git a/handoff/storage.go b/handoff/storage.go new file mode 100644 index 0000000..8ca39a9 --- /dev/null +++ b/handoff/storage.go @@ -0,0 +1,252 @@ +package handoff + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" +) + +const ( + latestFilename = "handoff.latest.json" + prefixFilename = "handoff.prefix.json" + deltaFilename = "handoff.delta.json" + metricsFilename = "handoff.metrics.log" + maxMetricsLines = 500 +) + +// LatestPath returns the absolute location of the latest handoff artifact. +func LatestPath(root string) string { + absRoot, err := filepath.Abs(root) + if err != nil { + return filepath.Join(root, ".codemap", latestFilename) + } + return filepath.Join(absRoot, ".codemap", latestFilename) +} + +// PrefixPath returns the absolute location of the prefix snapshot. +func PrefixPath(root string) string { + absRoot, err := filepath.Abs(root) + if err != nil { + return filepath.Join(root, ".codemap", prefixFilename) + } + return filepath.Join(absRoot, ".codemap", prefixFilename) +} + +// DeltaPath returns the absolute location of the delta snapshot. +func DeltaPath(root string) string { + absRoot, err := filepath.Abs(root) + if err != nil { + return filepath.Join(root, ".codemap", deltaFilename) + } + return filepath.Join(absRoot, ".codemap", deltaFilename) +} + +// MetricsPath returns the absolute location of the handoff metrics log. +func MetricsPath(root string) string { + absRoot, err := filepath.Abs(root) + if err != nil { + return filepath.Join(root, ".codemap", metricsFilename) + } + return filepath.Join(absRoot, ".codemap", metricsFilename) +} + +// ReadLatest reads the latest handoff artifact if it exists. +// Returns (nil, nil) when no artifact is present. +func ReadLatest(root string) (*Artifact, error) { + path := LatestPath(root) + data, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + var artifact Artifact + if err := json.Unmarshal(data, &artifact); err != nil { + return nil, err + } + normalizeArtifact(&artifact) + + return &artifact, nil +} + +// WriteLatest writes an artifact atomically to .codemap/handoff.latest.json. +func WriteLatest(root string, artifact *Artifact) error { + normalizeArtifact(artifact) + + path := LatestPath(root) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + return err + } + + if err := writeJSONAtomic(path, artifact); err != nil { + return err + } + if err := writeJSONAtomic(PrefixPath(root), artifact.Prefix); err != nil { + return err + } + if err := writeJSONAtomic(DeltaPath(root), artifact.Delta); err != nil { + return err + } + return appendMetrics(root, artifact) +} + +func writeJSONAtomic(path string, value any) error { + data, err := json.MarshalIndent(value, "", " ") + if err != nil { + return err + } + tmpPath := path + ".tmp" + if err := os.WriteFile(tmpPath, data, 0644); err != nil { + return err + } + return os.Rename(tmpPath, path) +} + +func appendMetrics(root string, artifact *Artifact) error { + entry := struct { + GeneratedAt string `json:"generated_at"` + Branch string `json:"branch"` + BaseRef string `json:"base_ref"` + PrefixHash string `json:"prefix_hash"` + DeltaHash string `json:"delta_hash"` + CombinedHash string `json:"combined_hash"` + Metrics CacheMetrics `json:"metrics"` + }{ + GeneratedAt: artifact.GeneratedAt.Format("2006-01-02T15:04:05Z07:00"), + Branch: artifact.Branch, + BaseRef: artifact.BaseRef, + PrefixHash: artifact.PrefixHash, + DeltaHash: artifact.DeltaHash, + CombinedHash: artifact.CombinedHash, + Metrics: artifact.Metrics, + } + + data, err := json.Marshal(entry) + if err != nil { + return err + } + + f, err := os.OpenFile(MetricsPath(root), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer f.Close() + + if _, err := f.Write(append(data, '\n')); err != nil { + return err + } + return capMetricsLog(root, maxMetricsLines) +} + +func normalizeArtifact(artifact *Artifact) { + ensureSchemaVersion(artifact) + promoteLegacyFieldsIntoDelta(artifact) + ensureNonNilSnapshotFields(artifact) + mirrorDeltaToLegacy(artifact) + backfillHashes(artifact) +} + +func ensureSchemaVersion(artifact *Artifact) { + if artifact.SchemaVersion == 0 { + artifact.SchemaVersion = SchemaVersion + } +} + +func promoteLegacyFieldsIntoDelta(artifact *Artifact) { + if artifact.Delta.Changed == nil && len(artifact.ChangedFiles) > 0 { + artifact.Delta.Changed = make([]FileStub, 0, len(artifact.ChangedFiles)) + for _, path := range artifact.ChangedFiles { + artifact.Delta.Changed = append(artifact.Delta.Changed, FileStub{Path: path}) + } + } + if artifact.Delta.RiskFiles == nil { + artifact.Delta.RiskFiles = append([]RiskFile{}, artifact.RiskFiles...) + } + if artifact.Delta.RecentEvents == nil { + artifact.Delta.RecentEvents = append([]EventSummary{}, artifact.RecentEvents...) + } + if artifact.Delta.NextSteps == nil { + artifact.Delta.NextSteps = append([]string{}, artifact.NextSteps...) + } + if artifact.Delta.OpenQuestions == nil { + artifact.Delta.OpenQuestions = append([]string{}, artifact.OpenQuestions...) + } +} + +func ensureNonNilSnapshotFields(artifact *Artifact) { + artifact.Prefix.Hubs = nonNilHubs(artifact.Prefix.Hubs) + artifact.Delta.Changed = nonNilStubs(artifact.Delta.Changed) + artifact.Delta.RiskFiles = nonNilRiskFiles(artifact.Delta.RiskFiles) + artifact.Delta.RecentEvents = nonNilEvents(artifact.Delta.RecentEvents) + artifact.Delta.NextSteps = nonNilStrings(artifact.Delta.NextSteps) + artifact.Delta.OpenQuestions = nonNilStrings(artifact.Delta.OpenQuestions) +} + +func mirrorDeltaToLegacy(artifact *Artifact) { + if artifact.ChangedFiles == nil { + artifact.ChangedFiles = stubPaths(artifact.Delta.Changed) + } + if artifact.RiskFiles == nil { + artifact.RiskFiles = append([]RiskFile{}, artifact.Delta.RiskFiles...) + } + if artifact.RecentEvents == nil { + artifact.RecentEvents = append([]EventSummary{}, artifact.Delta.RecentEvents...) + } + if artifact.NextSteps == nil { + artifact.NextSteps = append([]string{}, artifact.Delta.NextSteps...) + } + if artifact.OpenQuestions == nil { + artifact.OpenQuestions = append([]string{}, artifact.Delta.OpenQuestions...) + } +} + +func backfillHashes(artifact *Artifact) { + if artifact.PrefixHash == "" { + if hash, _, err := hashCanonical(artifact.Prefix); err == nil { + artifact.PrefixHash = hash + } + } + if artifact.DeltaHash == "" { + if hash, _, err := hashCanonical(artifact.Delta); err == nil { + artifact.DeltaHash = hash + } + } + if artifact.CombinedHash == "" { + artifact.CombinedHash = hashFromStrings(artifact.PrefixHash, artifact.DeltaHash) + } +} + +func capMetricsLog(root string, maxLines int) error { + if maxLines <= 0 { + return nil + } + + path := MetricsPath(root) + data, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + data = bytes.TrimSpace(data) + if len(data) == 0 { + return nil + } + + lines := bytes.Split(data, []byte("\n")) + if len(lines) <= maxLines { + return nil + } + + trimmed := bytes.Join(lines[len(lines)-maxLines:], []byte("\n")) + trimmed = append(trimmed, '\n') + tmpPath := path + ".tmp" + if err := os.WriteFile(tmpPath, trimmed, 0644); err != nil { + return err + } + return os.Rename(tmpPath, path) +} diff --git a/handoff/types.go b/handoff/types.go new file mode 100644 index 0000000..585ed78 --- /dev/null +++ b/handoff/types.go @@ -0,0 +1,118 @@ +package handoff + +import ( + "time" + + "codemap/watch" +) + +const ( + SchemaVersion = 1 + DefaultBaseRef = "main" + DefaultSince = 6 * time.Hour +) + +// HubSummary captures stable hub metadata for prefix context. +type HubSummary struct { + Path string `json:"path"` + Importers int `json:"importers"` +} + +// FileStub is a lightweight file descriptor for lazy detail loading. +type FileStub struct { + Path string `json:"path"` + Hash string `json:"hash,omitempty"` + Size int64 `json:"size,omitempty"` + Status string `json:"status,omitempty"` +} + +// RiskFile captures high-impact changed files in a handoff. +type RiskFile struct { + Path string `json:"path"` + Importers int `json:"importers"` + IsHub bool `json:"is_hub"` + Reason string `json:"reason"` +} + +// EventSummary is a compact event entry for handoff output. +type EventSummary struct { + Time time.Time `json:"time"` + Op string `json:"op"` + Path string `json:"path"` + Delta int `json:"delta,omitempty"` + IsHub bool `json:"is_hub,omitempty"` +} + +// PrefixSnapshot contains slow-changing structural context. +type PrefixSnapshot struct { + FileCount int `json:"file_count,omitempty"` + Hubs []HubSummary `json:"hubs"` +} + +// DeltaSnapshot contains fast-changing work-in-progress context. +type DeltaSnapshot struct { + Changed []FileStub `json:"changed"` + RiskFiles []RiskFile `json:"risk_files"` + RecentEvents []EventSummary `json:"recent_events"` + NextSteps []string `json:"next_steps"` + OpenQuestions []string `json:"open_questions"` +} + +// CacheMetrics tracks how much handoff context was reused from the previous artifact. +type CacheMetrics struct { + PrefixBytes int `json:"prefix_bytes"` + DeltaBytes int `json:"delta_bytes"` + TotalBytes int `json:"total_bytes"` + UnchangedBytes int `json:"unchanged_bytes"` + ReuseRatio float64 `json:"reuse_ratio"` + PrefixReused bool `json:"prefix_reused"` + DeltaReused bool `json:"delta_reused"` + PreviousCombinedHash string `json:"previous_combined_hash,omitempty"` +} + +// Artifact is the persisted handoff payload shared between agents. +type Artifact struct { + SchemaVersion int `json:"schema_version"` + GeneratedAt time.Time `json:"generated_at"` + Root string `json:"root"` + Branch string `json:"branch"` + BaseRef string `json:"base_ref"` + Prefix PrefixSnapshot `json:"prefix"` + Delta DeltaSnapshot `json:"delta"` + PrefixHash string `json:"prefix_hash"` + DeltaHash string `json:"delta_hash"` + CombinedHash string `json:"combined_hash"` + Metrics CacheMetrics `json:"metrics"` + + // Legacy top-level fields preserved for backward compatibility. + // Deprecated: these mirrors will be removed in schema v2 after clients migrate to Prefix/Delta. + ChangedFiles []string `json:"changed_files"` + RiskFiles []RiskFile `json:"risk_files"` + RecentEvents []EventSummary `json:"recent_events"` + NextSteps []string `json:"next_steps"` + OpenQuestions []string `json:"open_questions"` +} + +// FileDetail is loaded lazily from a file stub when deeper context is requested. +type FileDetail struct { + Path string `json:"path"` + Hash string `json:"hash,omitempty"` + Size int64 `json:"size,omitempty"` + Status string `json:"status,omitempty"` + Importers []string `json:"importers"` + Imports []string `json:"imports"` + RecentEvents []EventSummary `json:"recent_events"` + IsHub bool `json:"is_hub"` +} + +// BuildOptions controls handoff generation behavior. +type BuildOptions struct { + BaseRef string + Since time.Duration + State *watch.State + MaxChanged int + MaxRisk int + MaxEvents int + MaxHubs int + Previous *Artifact +} diff --git a/limits/budget.go b/limits/budget.go new file mode 100644 index 0000000..7fa671a --- /dev/null +++ b/limits/budget.go @@ -0,0 +1,75 @@ +package limits + +import "strings" + +const ( + // Shared text response budgets. + MaxStructureOutputBytes = MaxContextOutputBytes + MaxHandoffCompactBytes = 3000 + MaxHandoffMarkdownBytes = 20000 + MaxHandoffDetailBytes = 12000 +) + +// HandoffBudget controls list sizes and rendering budget for handoff payloads. +type HandoffBudget struct { + MaxChanged int + MaxRisk int + MaxEvents int + MaxMarkdownBytes int + MaxCompactBytes int + MaxDetailBytes int +} + +// HandoffBudgetForRepo returns a budget profile that scales down on larger repos. +func HandoffBudgetForRepo(fileCount int) HandoffBudget { + switch { + case fileCount > LargeRepoFileCount: + return HandoffBudget{ + MaxChanged: 25, + MaxRisk: 8, + MaxEvents: 10, + MaxMarkdownBytes: MaxHandoffMarkdownBytes, + MaxCompactBytes: MaxHandoffCompactBytes, + MaxDetailBytes: MaxHandoffDetailBytes, + } + case fileCount > MediumRepoFileCount: + return HandoffBudget{ + MaxChanged: 40, + MaxRisk: 10, + MaxEvents: 15, + MaxMarkdownBytes: MaxHandoffMarkdownBytes, + MaxCompactBytes: MaxHandoffCompactBytes, + MaxDetailBytes: MaxHandoffDetailBytes, + } + default: + return HandoffBudget{ + MaxChanged: 60, + MaxRisk: 15, + MaxEvents: 25, + MaxMarkdownBytes: MaxHandoffMarkdownBytes, + MaxCompactBytes: MaxHandoffCompactBytes, + MaxDetailBytes: MaxHandoffDetailBytes, + } + } +} + +// TruncateAtLineBoundary trims output to maxBytes, preferring a clean newline cut. +func TruncateAtLineBoundary(output string, maxBytes int, truncatedMessage string) string { + if maxBytes <= 0 || len(output) <= maxBytes { + return output + } + + trimmed := output[:maxBytes] + lineCutThreshold := maxBytes - 1000 + if lineCutThreshold < 0 { + lineCutThreshold = 0 + } + if idx := strings.LastIndex(trimmed, "\n"); idx > lineCutThreshold { + trimmed = trimmed[:idx] + } + + if truncatedMessage == "" { + truncatedMessage = "\n\n... (truncated)\n" + } + return trimmed + truncatedMessage +} diff --git a/main.go b/main.go index aef887e..a0679ed 100644 --- a/main.go +++ b/main.go @@ -13,6 +13,8 @@ import ( "time" "codemap/cmd" + "codemap/handoff" + "codemap/limits" "codemap/render" "codemap/scanner" "codemap/watch" @@ -52,6 +54,12 @@ func main() { return } + // Handle "handoff" subcommand before global flag parsing + if len(os.Args) >= 2 && os.Args[1] == "handoff" { + runHandoffSubcommand(os.Args[2:]) + return + } + skylineMode := flag.Bool("skyline", false, "Enable skyline visualization mode") animateMode := flag.Bool("animate", false, "Enable animation (use with --skyline)") depsMode := flag.Bool("deps", false, "Enable dependency graph mode (function/import analysis)") @@ -112,6 +120,7 @@ func main() { fmt.Println(" codemap hook prompt-submit # Parse user prompt (stdin)") fmt.Println(" codemap hook pre-compact # Save state before compact") fmt.Println(" codemap hook session-stop # Session summary") + fmt.Println(" codemap handoff [path] # Build handoff artifact for agent switching") os.Exit(0) } @@ -446,6 +455,137 @@ func runWatchSubcommand(subCmd, root string) { } } +func runHandoffSubcommand(args []string) { + fs := flag.NewFlagSet("handoff", flag.ExitOnError) + since := fs.String("since", "6h", "Look back window for recent events (Go duration, e.g. 2h, 30m)") + baseRef := fs.String("ref", handoff.DefaultBaseRef, "Git base ref for diff (default: main)") + jsonMode := fs.Bool("json", false, "Output raw handoff JSON") + latest := fs.Bool("latest", false, "Read the latest saved handoff instead of generating a new one") + prefixOnly := fs.Bool("prefix", false, "Render only the stable prefix layer") + deltaOnly := fs.Bool("delta", false, "Render only the recent delta layer") + detailPath := fs.String("detail", "", "Load full detail for a changed file path from handoff delta") + noSave := fs.Bool("no-save", false, "Do not persist generated handoff artifact") + if err := fs.Parse(args); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + if *prefixOnly && *deltaOnly { + fmt.Fprintln(os.Stderr, "Error: --prefix and --delta are mutually exclusive") + os.Exit(1) + } + + root := "." + if fs.NArg() > 0 { + root = fs.Arg(0) + } + + absRoot, err := filepath.Abs(root) + if err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + + var artifact *handoff.Artifact + if *latest { + artifact, err = handoff.ReadLatest(absRoot) + if err != nil { + fmt.Fprintf(os.Stderr, "Error reading handoff: %v\n", err) + os.Exit(1) + } + if artifact == nil { + fmt.Printf("No handoff artifact found at %s\n", handoff.LatestPath(absRoot)) + return + } + } else { + sinceDuration, err := time.ParseDuration(*since) + if err != nil { + fmt.Fprintf(os.Stderr, "Invalid --since duration: %v\n", err) + os.Exit(1) + } + if sinceDuration <= 0 { + fmt.Fprintf(os.Stderr, "Invalid --since duration: must be > 0\n") + os.Exit(1) + } + artifact, err = handoff.Build(absRoot, handoff.BuildOptions{ + BaseRef: *baseRef, + Since: sinceDuration, + }) + if err != nil { + fmt.Fprintf(os.Stderr, "Error building handoff: %v\n", err) + os.Exit(1) + } + if !*noSave { + if err := handoff.WriteLatest(absRoot, artifact); err != nil { + fmt.Fprintf(os.Stderr, "Error saving handoff: %v\n", err) + os.Exit(1) + } + } + } + + if *detailPath != "" { + detail, err := handoff.BuildFileDetail(absRoot, artifact, *detailPath, nil) + if err != nil { + fmt.Fprintf(os.Stderr, "Error loading handoff detail: %v\n", err) + os.Exit(1) + } + if *jsonMode { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + if err := enc.Encode(detail); err != nil { + fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err) + os.Exit(1) + } + return + } + out := handoff.RenderFileDetailMarkdown(detail) + out = limits.TruncateAtLineBoundary(out, limits.MaxHandoffDetailBytes, "\n\n... (handoff detail truncated)\n") + fmt.Print(out) + return + } + + if *jsonMode { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + switch { + case *prefixOnly: + if err := enc.Encode(artifact.Prefix); err != nil { + fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err) + os.Exit(1) + } + case *deltaOnly: + if err := enc.Encode(artifact.Delta); err != nil { + fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err) + os.Exit(1) + } + default: + if err := enc.Encode(artifact); err != nil { + fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err) + os.Exit(1) + } + } + return + } + + var out string + switch { + case *prefixOnly: + out = handoff.RenderPrefixMarkdown(artifact.Prefix) + case *deltaOnly: + out = handoff.RenderDeltaMarkdown(artifact.Delta) + default: + out = handoff.RenderMarkdown(artifact) + } + out = limits.TruncateAtLineBoundary(out, limits.MaxHandoffMarkdownBytes, "\n\n... (handoff output truncated)\n") + fmt.Print(out) + if !*latest && !*noSave { + fmt.Println() + fmt.Printf("Saved: %s\n", handoff.LatestPath(absRoot)) + fmt.Printf("Prefix: %s\n", handoff.PrefixPath(absRoot)) + fmt.Printf("Delta: %s\n", handoff.DeltaPath(absRoot)) + fmt.Printf("Metrics: %s\n", handoff.MetricsPath(absRoot)) + } +} + func runDaemon(root string) { daemon, err := watch.NewDaemon(root, false) if err != nil { diff --git a/mcp/main.go b/mcp/main.go index a55aad9..66de8d9 100644 --- a/mcp/main.go +++ b/mcp/main.go @@ -4,6 +4,7 @@ package main import ( "bytes" "context" + "encoding/json" "fmt" "log" "os" @@ -14,6 +15,7 @@ import ( "sync" "time" + "codemap/handoff" "codemap/limits" "codemap/render" "codemap/scanner" @@ -63,6 +65,18 @@ type WatchActivityInput struct { Minutes int `json:"minutes,omitempty" jsonschema:"Look back this many minutes (default: 30)"` } +type HandoffInput struct { + Path string `json:"path" jsonschema:"Path to the project directory"` + Since string `json:"since,omitempty" jsonschema:"Look back window for recent events (Go duration, e.g. 2h, 30m)"` + Ref string `json:"ref,omitempty" jsonschema:"Git base ref for diff (default: main)"` + Latest bool `json:"latest,omitempty" jsonschema:"Read latest saved handoff artifact instead of generating a new one"` + Save bool `json:"save,omitempty" jsonschema:"When true, persist generated artifact to .codemap/handoff.latest.json"` + JSON bool `json:"json,omitempty" jsonschema:"Return raw JSON output"` + Prefix bool `json:"prefix,omitempty" jsonschema:"Return only the stable prefix snapshot"` + Delta bool `json:"delta,omitempty" jsonschema:"Return only the recent delta snapshot"` + File string `json:"file,omitempty" jsonschema:"Load detailed context for one changed file path from handoff delta"` +} + func main() { server := mcp.NewServer(&mcp.Implementation{ Name: "codemap", @@ -145,6 +159,12 @@ func main() { Description: "Get complete dependency context for a specific file: what it imports, what imports it, whether it's a hub, and all connected files. Use this before editing a file to understand its role in the codebase.", }, handleGetFileContext) + // Tool: get_handoff - Build/read cross-agent handoff artifact + mcp.AddTool(server, &mcp.Tool{ + Name: "get_handoff", + Description: "Build or read a layered handoff artifact for agent switching. Prefix = stable project context, delta = recent work. Supports lazy per-file detail via file=. Set save=true to persist generated artifacts.", + }, handleGetHandoff) + // Run server on stdio if err := server.Run(context.Background(), &mcp.StdioTransport{}); err != nil { log.Printf("Server error: %v", err) @@ -196,17 +216,13 @@ func handleGetStructure(ctx context.Context, req *mcp.CallToolRequest, input Pat render.Tree(&buf, project) output := stripANSI(buf.String()) - // IMPORTANT: MCP tool output contributes to Claude's context window. - // Large repos can produce megabytes of tree output, causing instant context overflow. - // Cap at 60KB (~15k tokens) to stay under 10% of typical 200k context limit. - const maxBytes = limits.MaxContextOutputBytes - if len(output) > maxBytes { - output = output[:maxBytes] - // Find last newline to avoid cutting mid-line - if idx := strings.LastIndex(output, "\n"); idx > maxBytes-1000 { - output = output[:idx] - } - output += fmt.Sprintf("\n\n... (truncated - repo has %d files, use `codemap --depth N` for full tree)\n", fileCount) + // IMPORTANT: MCP tool output contributes directly to context window usage. + if len(output) > limits.MaxStructureOutputBytes { + output = limits.TruncateAtLineBoundary( + output, + limits.MaxStructureOutputBytes, + fmt.Sprintf("\n\n... (truncated - repo has %d files, use `codemap --depth N` for full tree)\n", fileCount), + ) } // Add hub file summary. Prefer daemon cache; avoid expensive graph builds on @@ -370,6 +386,7 @@ Available tools: get_diff - Changed files vs branch find_file - Search by filename get_importers - Find what imports a file + get_handoff - Build/read cross-agent handoff summary Live watch tools: start_watch - Start watching a project for changes @@ -496,6 +513,100 @@ func handleGetImporters(ctx context.Context, req *mcp.CallToolRequest, input Imp return textResult(fmt.Sprintf("%d files import '%s':%s\n%s", len(importers), input.File, hubNote, strings.Join(importers, "\n"))), nil, nil } +func handleGetHandoff(ctx context.Context, req *mcp.CallToolRequest, input HandoffInput) (*mcp.CallToolResult, any, error) { + if input.Prefix && input.Delta { + return errorResult("prefix and delta options are mutually exclusive"), nil, nil + } + + absRoot, err := filepath.Abs(input.Path) + if err != nil { + return errorResult("Invalid path: " + err.Error()), nil, nil + } + + var artifact *handoff.Artifact + if input.Latest { + artifact, err = handoff.ReadLatest(absRoot) + if err != nil { + return errorResult("Failed to read handoff: " + err.Error()), nil, nil + } + if artifact == nil { + return textResult("No saved handoff found at " + handoff.LatestPath(absRoot)), nil, nil + } + } else { + baseRef := input.Ref + if baseRef == "" { + baseRef = handoff.DefaultBaseRef + } + since := handoff.DefaultSince + if input.Since != "" { + since, err = time.ParseDuration(input.Since) + if err != nil { + return errorResult("Invalid since duration: " + err.Error()), nil, nil + } + if since <= 0 { + return errorResult("Invalid since duration: must be > 0"), nil, nil + } + } + + artifact, err = handoff.Build(absRoot, handoff.BuildOptions{ + BaseRef: baseRef, + Since: since, + }) + if err != nil { + return errorResult("Failed to build handoff: " + err.Error()), nil, nil + } + if input.Save { + if err := handoff.WriteLatest(absRoot, artifact); err != nil { + return errorResult("Failed to save handoff: " + err.Error()), nil, nil + } + } + } + + if input.File != "" { + detail, err := handoff.BuildFileDetail(absRoot, artifact, input.File, nil) + if err != nil { + return errorResult("Failed to load handoff detail: " + err.Error()), nil, nil + } + if input.JSON { + data, err := json.MarshalIndent(detail, "", " ") + if err != nil { + return errorResult("Failed to serialize handoff detail: " + err.Error()), nil, nil + } + return textResult(string(data)), nil, nil + } + out := handoff.RenderFileDetailMarkdown(detail) + out = limits.TruncateAtLineBoundary(out, limits.MaxHandoffDetailBytes, "\n\n... (handoff detail truncated)\n") + return textResult(out), nil, nil + } + + if input.JSON { + var payload any = artifact + switch { + case input.Prefix: + payload = artifact.Prefix + case input.Delta: + payload = artifact.Delta + } + data, err := json.MarshalIndent(payload, "", " ") + if err != nil { + return errorResult("Failed to serialize handoff: " + err.Error()), nil, nil + } + return textResult(string(data)), nil, nil + } + + var out string + switch { + case input.Prefix: + out = handoff.RenderPrefixMarkdown(artifact.Prefix) + case input.Delta: + out = handoff.RenderDeltaMarkdown(artifact.Delta) + default: + out = handoff.RenderMarkdown(artifact) + } + out = limits.TruncateAtLineBoundary(out, limits.MaxHandoffMarkdownBytes, "\n\n... (handoff output truncated)\n") + return textResult(out), nil, nil +} + // ANSI escape code pattern var ansiRegex = regexp.MustCompile(`\x1b\[[0-9;]*m`)