diff --git a/.claude/hooks/enforce-bun.sh b/.claude/hooks/enforce-bun.sh new file mode 100755 index 0000000000..a465a69bfd --- /dev/null +++ b/.claude/hooks/enforce-bun.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# PreToolUse hook: Block npm, npx, yarn, and pnpm commands — this project uses bun exclusively. + +INPUT=$(cat) +COMMAND=$(echo "$INPUT" | jq -r '.tool_input.command // empty') + +if [ -z "$COMMAND" ]; then + exit 0 +fi + +# Check for npm commands (including npx) +if echo "$COMMAND" | grep -qE '(^|[;&|]\s*)npm\s+(install|ci|run|test|exec|start|publish|version|update|uninstall|link|pack|audit|init|dedupe|ls|outdated|prune|rebuild|cache)\b'; then + echo "This project uses bun, not npm. Use the equivalent bun command instead." >&2 + exit 2 +fi + +if echo "$COMMAND" | grep -qE '(^|[;&|]\s*)npx(\s|$)'; then + echo "This project uses bun, not npx. Use 'bunx' instead of 'npx'." >&2 + exit 2 +fi + +# Check for yarn commands +if echo "$COMMAND" | grep -qE '(^|[;&|]\s*)yarn(\s|$)'; then + echo "This project uses bun, not yarn. Use the equivalent bun command instead." >&2 + exit 2 +fi + +# Check for pnpm commands +if echo "$COMMAND" | grep -qE '(^|[;&|]\s*)pnpm(\s|$)'; then + echo "This project uses bun, not pnpm. Use the equivalent bun command instead." >&2 + exit 2 +fi + +exit 0 diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000000..be2941c10f --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,15 @@ +{ + "hooks": { + "PreToolUse": [ + { + "matcher": "Bash", + "hooks": [ + { + "type": "command", + "command": ".claude/hooks/enforce-bun.sh" + } + ] + } + ] + } +} diff --git a/.gitignore b/.gitignore index 8478eb6fc6..0a09bab02a 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,6 @@ release/ apps/web/.playwright apps/web/playwright-report apps/web/src/components/__screenshots__ +*.local.* +**/coverage/ +.playwright-mcp/ diff --git a/.plans/17-claude-code.md b/.plans/17-claude-code.md new file mode 100644 index 0000000000..822e978f50 --- /dev/null +++ b/.plans/17-claude-code.md @@ -0,0 +1,441 @@ +# Plan: Claude Code Integration (Orchestration Architecture) + +## Why this plan was rewritten + +The previous plan targeted a pre-orchestration architecture (`ProviderManager`, provider-native WS event methods, and direct provider UI wiring). The current app now routes everything through: + +1. `orchestration.dispatchCommand` (client intent) +2. `OrchestrationEngine` (decide + persist + publish domain events) +3. `ProviderCommandReactor` (domain intent -> `ProviderService`) +4. `ProviderService` (adapter routing + canonical runtime stream) +5. `ProviderRuntimeIngestion` (provider runtime -> internal orchestration commands) +6. `orchestration.domainEvent` (single push channel consumed by web) + +Claude integration must plug into this path instead of reintroducing legacy provider-specific flows. + +--- + +## Current constraints to design around (post-Stage 1) + +1. Provider runtime ingestion expects canonical `ProviderRuntimeEvent` shapes, not provider-native payloads. +2. Start input now uses typed `providerOptions` and generic `resumeCursor`; top-level provider-specific fields were removed. +3. `resumeCursor` is intentionally opaque outside adapters and must never be synthesized from `providerThreadId`. +4. `ProviderService` still requires adapter `startSession()` to return a `ProviderSession` with `threadId`. +5. Checkpoint revert currently calls `providerService.rollbackConversation()`, so Claude adapter needs a rollback strategy compatible with current reactor behavior. +6. Web currently marks Claude as unavailable (`"Claude Code (soon)"`) and model picker is Codex-only. + +--- + +## Architecture target + +Add Claude as a first-class provider adapter that emits canonical runtime events and works with existing orchestration reactors without adding new WS channels or bypass paths. + +Key decisions: + +1. Keep orchestration provider-agnostic; adapt Claude inside adapter/layer boundaries. +2. Use the existing canonical runtime stream (`ProviderRuntimeEvent`) as the only ingestion contract. +3. Keep provider session routing in `ProviderService` and `ProviderSessionDirectory`. +4. Add explicit provider selection to turn-start intent so first turn can start Claude session intentionally. + +--- + +## Phase 1: Contracts and command shape updates + +### 1.1 Provider-aware model contract + +Update `packages/contracts/src/model.ts` so model resolution can be provider-aware instead of Codex-only. + +Expected outcomes: + +1. Introduce provider-scoped model lists (Codex + Claude). +2. Add helpers that resolve model by provider. +3. Preserve backwards compatibility for existing Codex defaults. + +### 1.2 Turn-start provider intent + +Update `packages/contracts/src/orchestration.ts`: + +1. Add optional `provider: ProviderKind` to `ThreadTurnStartCommand`. +2. Carry provider through `ThreadTurnStartRequestedPayload`. +3. Keep existing command valid when provider is omitted. + +This removes the implicit “Codex unless session already exists” behavior as the only path. + +### 1.3 Provider session start input for Claude runtime knobs (completed) + +Update `packages/contracts/src/provider.ts`: + +1. Move provider-specific start fields into typed `providerOptions`: + - `providerOptions.codex` + - `providerOptions.claudeCode` +2. Keep `resumeCursor` as the single cross-provider resume input in `ProviderSessionStartInput`. +3. Deprecate/remove `resumeThreadId` from the generic start contract. +4. Treat `resumeCursor` as adapter-owned opaque state. + +### 1.4 Contract tests (completed) + +Update/add tests in `packages/contracts/src/*.test.ts` for: + +1. New command payload shape. +2. Provider-aware model resolution behavior. +3. Breaking-change expectations for removed top-level provider fields. + +--- + +## Phase 2: Claude adapter implementation + +### 2.1 Add adapter service + layer + +Create: + +1. `apps/server/src/provider/Services/ClaudeCodeAdapter.ts` +2. `apps/server/src/provider/Layers/ClaudeCodeAdapter.ts` + +Adapter must implement `ProviderAdapterShape`. + +### 2.1.a SDK dependency and baseline config + +Add server dependency: + +1. `@anthropic-ai/claude-agent-sdk` + +Baseline adapter options to support from day one: + +1. `cwd` +2. `model` +3. `pathToClaudeCodeExecutable` (from `providerOptions.claudeCode.binaryPath`) +4. `permissionMode` (from `providerOptions.claudeCode.permissionMode`) +5. `maxThinkingTokens` (from `providerOptions.claudeCode.maxThinkingTokens`) +6. `resume` +7. `resumeSessionAt` +8. `includePartialMessages` +9. `canUseTool` +10. `hooks` +11. `env` and `additionalDirectories` (if needed for sandbox/workspace parity) + +### 2.2 Claude runtime bridge + +Implement a Claude runtime bridge (either directly in adapter layer or via dedicated manager file) that wraps Agent SDK query lifecycle. + +Required capabilities: + +1. Long-lived session context per adapter session. +2. Multi-turn input queue. +3. Interrupt support. +4. Approval request/response bridge. +5. Resume support via opaque `resumeCursor` (parsed inside Claude adapter only). + +#### 2.2.a Agent SDK details to preserve + +The adapter should explicitly rely on these SDK capabilities: + +1. `query()` returns an async iterable message stream and control methods (`interrupt`, `setModel`, `setPermissionMode`, `setMaxThinkingTokens`, account/status helpers). +2. Multi-turn input is supported via async-iterable prompt input. +3. Tool approval decisions are provided via `canUseTool`. +4. Resume support uses `resume` and optional `resumeSessionAt`, both derived by parsing adapter-owned `resumeCursor`. +5. Hooks can be used for lifecycle signals (`Stop`, `PostToolUse`, etc.) when we need adapter-originated checkpoint/runtime events. + +#### 2.2.b Effect-native session lifecycle skeleton + +```ts +import { query } from "@anthropic-ai/claude-agent-sdk"; +import { Effect } from "effect"; + +const acquireSession = (input: ProviderSessionStartInput) => + Effect.acquireRelease( + Effect.tryPromise({ + try: async () => { + const claudeOptions = input.providerOptions?.claudeCode; + const resumeState = readClaudeResumeState(input.resumeCursor); + const abortController = new AbortController(); + const result = query({ + prompt: makePromptAsyncIterable(), + options: { + cwd: input.cwd, + model: input.model, + permissionMode: claudeOptions?.permissionMode, + maxThinkingTokens: claudeOptions?.maxThinkingTokens, + pathToClaudeCodeExecutable: claudeOptions?.binaryPath, + resume: resumeState?.threadId, + resumeSessionAt: resumeState?.sessionAt, + signal: abortController.signal, + includePartialMessages: true, + canUseTool: makeCanUseTool(), + hooks: makeClaudeHooks(), + }, + }); + return { abortController, result }; + }, + catch: (cause) => + new ProviderAdapterProcessError({ + provider: "claudeCode", + sessionId: "pending", + detail: "Failed to start Claude runtime session.", + cause, + }), + }), + ({ abortController }) => Effect.sync(() => abortController.abort()), + ); +``` + +#### 2.2.c AsyncIterable -> Effect Stream integration + +Preferred when available in the pinned Effect version: + +```ts +const sdkMessageStream = Stream.fromAsyncIterable( + session.result, + (cause) => + new ProviderAdapterProcessError({ + provider: "claudeCode", + sessionId, + detail: "Claude runtime stream failed.", + cause, + }), +); +``` + +Portable fallback (already aligned with current server patterns): + +```ts +const sdkMessageStream = Stream.async((emit) => { + let cancelled = false; + void (async () => { + try { + for await (const message of session.result) { + if (cancelled) break; + emit.single(message); + } + emit.end(); + } catch (cause) { + emit.fail( + new ProviderAdapterProcessError({ + provider: "claudeCode", + sessionId, + detail: "Claude runtime stream failed.", + cause, + }), + ); + } + })(); + return Effect.sync(() => { + cancelled = true; + }); +}); +``` + +### 2.3 Canonical event mapping + +Claude adapter must translate Agent SDK output into canonical `ProviderRuntimeEvent`. + +Initial mapping target: + +1. assistant text deltas -> `content.delta` +2. final assistant text -> `item.completed` and/or `turn.completed` +3. approval requests -> `request.opened` +4. approval results -> `request.resolved` +5. system lifecycle -> `session.*`, `thread.*`, `turn.*` +6. errors -> `runtime.error` +7. plan/proposed-plan content when derivable + +Implementation note: + +1. Keep raw Claude message on `raw` for debugging. +2. Prefer canonical item/request kinds over provider-native enums. +3. If Claude emits extra event kinds we do not model yet, map them to `tool.summary`, `runtime.warning`, or `unknown`-compatible payloads instead of dropping silently. + +### 2.4 Resume cursor strategy + +Define Claude-owned opaque resume state, e.g.: + +```ts +interface ClaudeResumeCursor { + readonly version: 1; + readonly threadId?: string; + readonly sessionAt?: string; +} +``` + +Rules: + +1. Serialize only adapter-owned state into `resumeCursor`. +2. Parse/validate only inside Claude adapter. +3. Store updated cursor when Claude runtime yields enough data to resume safely. +4. Never overload orchestration thread id as Claude thread id. + +### 2.5 Interrupt and stop semantics + +Map orchestration stop/interrupt expectations onto SDK controls: + +1. `interruptTurn()` -> active query interrupt. +2. `stopSession()` -> close session resources and prevent future sends. +3. `rollbackThread()` -> see Phase 4. + +--- + +## Phase 3: Provider service and composition + +### 3.1 Register Claude adapter + +Update provider registry layer to include Claude: + +1. add `claudeCode` -> `ClaudeCodeAdapter` +2. ensure `ProviderService.listProviderStatuses()` reports Claude availability + +### 3.2 Persist provider binding + +Current `ProviderSessionDirectory` already stores provider/thread binding and opaque `resumeCursor`. + +Required validation: + +1. Claude bindings survive restart. +2. resume cursor remains opaque and round-trips untouched. +3. stopAll + restart can recover Claude sessions when possible. + +### 3.3 Provider start routing + +Update `ProviderCommandReactor` / orchestration flow: + +1. If a thread turn start requests `provider: "claudeCode"`, start Claude if no active session exists. +2. If a thread already has Claude session binding, reuse it. +3. If provider switches between Codex and Claude, explicitly stop/rebind before next send. + +--- + +## Phase 4: Checkpoint and revert strategy + +Claude does not necessarily expose the same conversation rewind primitive as Codex app-server. Current architecture expects `providerService.rollbackConversation()`. + +Pick one explicit strategy: + +### Option A: provider-native rewind + +If SDK/runtime supports safe rewind: + +1. implement in Claude adapter +2. keep `CheckpointReactor` unchanged + +### Option B: session restart + state truncation shim + +If no native rewind exists: + +1. Claude adapter returns successful rollback by: + - stopping current Claude session + - clearing/rewriting stored Claude resume cursor to last safe resumable point + - forcing next turn to recreate session from persisted orchestration state +2. Document that rollback is “conversation reset to checkpoint boundary”, not provider-native turn deletion. + +Whichever option is chosen: + +1. behavior must be deterministic +2. checkpoint revert tests must pass under orchestration expectations +3. user-visible activity log should explain failures clearly when provider rollback is impossible + +--- + +## Phase 5: Web integration + +### 5.1 Provider picker and model picker + +Update web state/UI: + +1. allow choosing Claude as thread provider before first turn +2. show Claude model list from provider-aware model helpers +3. preserve existing Codex default behavior when provider omitted + +Likely touch points: + +1. `apps/web/src/store.ts` +2. `apps/web/src/components/ChatView.tsx` +3. `apps/web/src/types.ts` +4. `packages/shared/src/model.ts` + +### 5.2 Settings for Claude executable/options + +Add app settings if needed for: + +1. Claude binary path +2. default permission mode +3. default max thinking tokens + +Do not hardcode provider-specific config into generic session state if it belongs in app settings or typed `providerOptions`. + +### 5.3 Session rendering + +No new WS channel should be needed. Claude should appear through existing: + +1. thread messages +2. activities/worklog +3. approvals +4. session state +5. checkpoints/diffs + +--- + +## Phase 6: Testing strategy + +### 6.1 Contract tests + +Cover: + +1. provider-aware model schemas +2. provider field on turn-start command +3. provider-specific start options schema + +### 6.2 Adapter layer tests + +Add `ClaudeCodeAdapter.test.ts` covering: + +1. session start +2. event mapping +3. approval bridge +4. resume cursor parse/serialize +5. interrupt behavior +6. rollback behavior or explicit unsupported error path + +Use SDK-facing layer tests/mocks only at the boundary. Do not mock orchestration business logic in higher-level tests. + +### 6.3 Provider service integration tests + +Extend provider integration coverage so Claude is exercised through `ProviderService`: + +1. start Claude session +2. send turn +3. receive canonical runtime events +4. restart/recover using persisted binding + +### 6.4 Orchestration integration tests + +Add/extend integration tests around: + +1. first-turn provider selection +2. Claude approval requests routed through orchestration +3. Claude runtime ingestion -> messages/activities/session updates +4. checkpoint revert behavior under Claude +5. stopAll/restart recovery + +These should validate real orchestration flows, not just adapter behavior. + +--- + +## Phase 7: Rollout order + +Recommended implementation order: + +1. contracts/provider-aware models +2. provider field on turn-start +3. Claude adapter skeleton + start/send/stream +4. canonical event mapping +5. provider registry/service wiring +6. orchestration recovery + checkpoint strategy +7. web provider/model picker +8. full integration tests + +--- + +## Non-goals + +1. Reintroducing provider-specific WS methods/channels. +2. Storing provider-native thread ids as orchestration ids. +3. Bypassing orchestration engine for Claude-specific UI flows. +4. Encoding Claude resume semantics outside adapter-owned `resumeCursor`. diff --git a/.plans/18-cursor-agent-provider.md b/.plans/18-cursor-agent-provider.md new file mode 100644 index 0000000000..452592e68d --- /dev/null +++ b/.plans/18-cursor-agent-provider.md @@ -0,0 +1,327 @@ +# Plan: Cursor ACP (`agent acp`) Provider Integration + +## Goal + +Add Cursor as a first-class provider in T3 Code using ACP (`agent acp`) over JSON-RPC 2.0 stdio, with robust session lifecycle handling and canonical `ProviderRuntimeEvent` projection. + +--- + +## 1) Exploration Findings (from live ACP probes) + +### 1.1 Core invocation and transport + +1. Binary is `agent` on PATH (`2026.02.27-e7d2ef6` observed). +2. ACP server command is `agent acp`. +3. Transport is newline-delimited JSON-RPC 2.0 over stdio. +4. Messages: + - client -> server: requests and responses to server-initiated requests + - server -> client: responses, notifications (`session/update`), and server requests (`session/request_permission`) + +### 1.2 Handshake and session calls observed + +1. `initialize` returns: + - `protocolVersion` + - `agentCapabilities` (`loadSession`, `mcpCapabilities`, `promptCapabilities`) + - `authMethods` (includes `cursor_login`) +2. `authenticate { methodId: "cursor_login" }` returns `{}` when logged in. +3. `session/new` returns: + - `sessionId` + - `modes` (`agent`, `plan`, `ask`) +4. `session/load` works and requires `sessionId`, `cwd`, `mcpServers`. +5. `session/prompt` returns terminal response `{ stopReason: "end_turn" | "cancelled" }`. + +Important sequence note: +1. ACP currently allows `session/new` even without explicit `initialize`/`authenticate` when local auth already exists. +2. For adapter consistency and forward compatibility, we should still send `initialize` and `authenticate` during startup. + +### 1.3 `session/update` event families observed + +Observed `params.update.sessionUpdate` values: + +1. `available_commands_update` +2. `agent_thought_chunk` +3. `agent_message_chunk` +4. `tool_call` +5. `tool_call_update` + +Observed payload behavior: + +1. `agent_*_chunk` provides `content: { type: "text", text: string }`. +2. `tool_call` may be emitted multiple times for same `toolCallId`: + - initial generic form (`title: "Terminal"`, `rawInput: {}`) + - enriched form (`title: "\`pwd\`"`, `rawInput: { command: "pwd" }`) +3. `tool_call_update` statuses observed: + - `in_progress` + - `completed` +4. `tool_call_update` on completion may include `rawOutput`: + - terminal: `{ exitCode, stdout, stderr }` + - search/find: `{ totalFiles, truncated }` + +### 1.4 Permission flow observed + +1. ACP server sends `session/request_permission` (JSON-RPC request with `id`). +2. Request shape includes: + - `params.sessionId` + - `params.toolCall` + - `params.options` (`allow-once`, `allow-always`, `reject-once`) +3. Client must respond on same `id` with: + - `{ outcome: { outcome: "selected", optionId: "" } }` +4. Reject path still results in tool lifecycle completion events (`tool_call_update status: completed`), typically without `rawOutput`. + +### 1.5 Error and capability quirks + +1. `session/cancel` currently returns: + - JSON-RPC error `-32601` Method not found +2. Error shape examples: + - unknown auth method: `-32602` + - `session/load` missing/invalid params: `-32602` + - `session/prompt` unknown session: `-32603` with details +3. Parallel prompts on same session are effectively single-flight: + - second prompt can cause first to complete with `stopReason: "cancelled"`. +4. `session/new` accepts a `model` field (no explicit echo in response). + +Probe artifacts: +1. `.tmp/acp-probe/*/transcript.ndjson` +2. `.tmp/acp-probe/*/summary.json` +3. `scripts/cursor-acp-probe.mjs` + +--- + +## 2) Integration Constraints for T3 + +1. T3 adapter contract still requires: + - `startSession`, `sendTurn`, `interruptTurn`, `respondToRequest`, `readThread`, `rollbackThread`, `stopSession`, `listSessions`, `hasSession`, `stopAll`, `streamEvents`. +2. Orchestration consumes canonical `ProviderRuntimeEvent` only. +3. `ProviderCommandReactor` provider precedence fix remains required (respect explicit provider on turn start). +4. ACP now supports external permission decisions, so Cursor can participate in T3 approval UX via adapter-managed request/response plumbing. + +--- + +## 3) Proposed Architecture + +### 3.1 New server components + +1. `apps/server/src/provider/Services/CursorAdapter.ts` (service contract/tag + ACP event schemas). +2. `apps/server/src/provider/Layers/CursorAdapter.ts` (single implementation unit; owns ACP process lifecycle, JSON-RPC routing, runtime projection). +3. No manager indirection; keep logic in layer implementation. + +### 3.2 Session model + +1. One long-lived ACP child process per T3 Cursor provider session. +2. Track: + - `providerSessionId` (T3 synthetic ID) + - `acpSessionId` (from `session/new` or restored via `session/load`) + - `cwd`, `model`, in-flight turn state + - pending permission requests by JSON-RPC request id +3. Resume support: + - persist `acpSessionId` in provider resume metadata and call `session/load` on reattach. + +### 3.3 Command strategy + +1. `startSession`: + - spawn `agent acp` + - `initialize` + - `authenticate(cursor_login)` (best-effort, typed failure handling) + - `session/new` or `session/load` +2. `sendTurn`: + - send `session/prompt { sessionId, prompt: [...] }` + - consume streaming `session/update` notifications until terminal prompt response +3. `interruptTurn`: + - no native `session/cancel` today; implement fallback: + - terminate ACP process + restart + `session/load` for subsequent turns + - mark in-flight turn as interrupted/failed in canonical events +4. `respondToRequest`: + - map T3 approval decision -> ACP `optionId` + - reply to exact JSON-RPC request id from `session/request_permission` + +### 3.4 Effect-first implementation style (required) + +1. Keep logic inside `CursorAdapterLive`. +2. Use Effect primitives: + - `Queue` + `Stream.fromQueue` for event fan-out + - `Ref` / `Ref.Synchronized` for session/process/request state + - scoped fibers for stdout/stderr read loops +3. Typed JSON decode at boundary: + - request/response envelopes + - `session/update` union schema + - permission-request schema +4. Keep adapter errors in typed error algebra with explicit mapping at process/protocol boundaries. + +--- + +## 4) Canonical Event Mapping Plan (ACP -> ProviderRuntimeEvent) + +1. `session/update: agent_message_chunk` + - emit `message.delta` for assistant stream +2. prompt terminal response (`session/prompt` result `stopReason: end_turn`) + - emit `message.completed` + `turn.completed` +3. `session/update: agent_thought_chunk` + - initial mapping: emit thinking activity (or ignore if we keep current canonical surface minimal) +4. `session/update: tool_call` + - first-seen `toolCallId` emits `tool.started` + - subsequent `tool_call` for same ID treated as metadata update (no duplicate started event) +5. `session/update: tool_call_update` + - `in_progress`: optional progress activity + - `completed`: emit `tool.completed` with summarized `rawOutput` when present +6. `session/request_permission` + - emit `approval.requested` with mapped options + - when client decision sent, emit `approval.resolved` +7. protocol/process error + - emit `runtime.error` + - fail active turn/session as appropriate + +Synthetic IDs: +1. `turnId`: T3-generated UUID per `sendTurn`. +2. `itemId`: + - assistant stream: `${turnId}:assistant` + - tools: `${turnId}:${toolCallId}` + +--- + +## 5) Approval, Resume, and Rollback Behavior + +### 5.1 Approvals + +1. Cursor ACP permission requests are externally controllable; implement full `respondToRequest` path in v1. +2. Decision mapping: + - allow once -> `allow-once` + - allow always -> `allow-always` + - reject -> `reject-once` + +### 5.2 Resume + +1. `session/load` is available and should be first-class for adapter restart/reconnect. +2. Must send required params: `sessionId`, `cwd`, `mcpServers`. + +### 5.3 Rollback / thread read + +1. ACP currently has no observed rollback API. +2. Plan for v1: + - `readThread`: adapter-maintained snapshot projection + - `rollbackThread`: explicit unsupported error +3. Product guard: + - disable checkpoint revert for Cursor threads in UI until rollback exists. + +--- + +## 6) Required Contract and Runtime Changes + +### 6.1 Contracts + +1. Add `cursor` to `ProviderKind`. +2. Add Cursor provider start options (`providerOptions.cursor`), ACP-oriented: + - optional `binaryPath` + - optional auth/mode knobs if needed later +3. Extend model options for Cursor list and traits mapping. +4. Add schemas for ACP-native event union in Cursor adapter service file. + +### 6.2 Server orchestration and registry + +1. Register `CursorAdapter` in provider registry and server layer wiring. +2. Update provider-kind persistence decoding for `cursor`. +3. Fix `ProviderCommandReactor` precedence to honor explicit provider in turn-start command. + +### 6.3 Web + +1. Cursor in provider picker and model picker (already partially done). +2. Trait controls map to concrete Cursor model identifiers. +3. Surface unsupported rollback behavior in UX. + +--- + +## 7) Implementation Phases + +### Phase A: ACP process and protocol skeleton + +1. Implement ACP process lifecycle in `CursorAdapterLive`. +2. Implement JSON-RPC request/response multiplexer. +3. Implement `initialize`/`authenticate`/`session/new|load` flow. +4. Wire `streamEvents` from ACP notifications. + +### Phase B: Runtime projection and approvals + +1. Map `session/update` variants to canonical runtime events. +2. Implement permission-request bridging to `respondToRequest`. +3. Implement dedupe for repeated `tool_call` on same `toolCallId`. + +### Phase C: Turn control and interruption + +1. Implement single in-flight prompt protection per session. +2. Implement interruption fallback (process restart + reload) because `session/cancel` unavailable. +3. Ensure clean state recovery on ACP process crash. + +### Phase D: Orchestration + UX polish + +1. Provider routing precedence fix. +2. Cursor-specific UX notes for unsupported rollback. +3. End-to-end smoke and event log validation. + +--- + +## 8) Test Plan + +Follow project rule: backend external-service integrations tested via layered fakes, not by mocking core business logic. + +### 8.1 Unit tests (`CursorAdapter`) + +1. JSON-RPC envelope parsing: + - response matching by id + - server request handling (`session/request_permission`) + - notification decode (`session/update`) +2. Event projection: + - `agent_message_chunk` / `agent_thought_chunk` + - `tool_call` + `tool_call_update` dedupe/lifecycle + - permission request -> approval events +3. Error mapping: + - unknown session + - method-not-found (`session/cancel`) + - invalid params + +### 8.2 Provider service/routing tests + +1. Registry resolves `cursor`. +2. Session directory persistence reads/writes `cursor`. +3. ProviderService fan-out ordering with Cursor ACP events. + +### 8.3 Orchestration tests + +1. `thread.turn.start` with `provider: cursor` routes to Cursor adapter. +2. approval response command maps to ACP permission response. +3. checkpoint revert on Cursor thread returns controlled unsupported failure. + +### 8.4 Optional live smoke + +1. Env-gated ACP smoke: + - start session + - run prompt + - observe deltas + completion + - exercise permission request path with one tool call + +--- + +## 9) Operational Notes + +1. Keep one in-flight turn per ACP session. +2. Keep per-session ACP process logs/NDJSON artifacts for debugging. +3. Treat `session/cancel` as unsupported until Cursor ships it; avoid relying on it. +4. Preserve resume metadata (`acpSessionId`) for crash recovery. + +--- + +## 10) Open Questions + +1. Should we call `authenticate` always, or only after auth-required errors? +2. Should model selection be passed at `session/new` only, or can/should we support model switching mid-session if ACP adds API? +3. For interruption UX, do we expose “hard interrupt” semantics (process restart) explicitly? + +--- + +## 11) Delivery Checklist + +1. Plan/documentation switched from headless `agent -p` to ACP `agent acp`. +2. Contracts updated (`ProviderKind`, Cursor options, model/trait mapping). +3. Cursor ACP adapter layer implemented and registered. +4. Provider precedence fixed in orchestration router. +5. Approval response path wired through ACP permission requests. +6. Tests added for protocol decode, projection, approval flow, and routing. +7. Lint + tests green. diff --git a/.plans/server-driven-review-flow.md b/.plans/server-driven-review-flow.md new file mode 100644 index 0000000000..ed2871f9f6 --- /dev/null +++ b/.plans/server-driven-review-flow.md @@ -0,0 +1,701 @@ +# Implementation Plan: Server-Driven PR Review Flow + +## Executive Summary + +Replace the current frontend-orchestrated PR review flow (8+ sequential WS round-trips with user-visible spinners) with a single server-side operation. The frontend sends one request (`reviewRequest.startReview`) and receives back a `threadId` immediately. The server handles all setup (project resolution, cloning, worktree creation, thread creation, review linking, agent kickoff) and pushes progress via existing orchestration events. The chat view naturally renders agent progress as it arrives. + +**Key deliverables:** +- New `reviewRequest.startReview` WS method (server) +- New server-side `ReviewFlowOrchestrator` module that composes existing services +- Simplified frontend: dialog becomes a thin "starting review..." indicator +- Agent auto-kickoff: the review agent starts automatically, no manual trigger needed + +**Success criteria:** +- Review start is a single WS call from the frontend +- User sees the chat view within ~200ms of clicking "Start Review" +- Agent begins working while worktree setup completes in the background +- All error states are surfaced via existing orchestration events +- `bun fmt`, `bun lint`, and `bun typecheck` pass + +--- + +## Repository Context + +### Technology Stack +- **Server:** Node.js + Effect-TS, WebSocket-based RPC +- **Web:** React + Vite, TanStack Router/Query, Zustand stores +- **Contracts:** Effect/Schema shared type definitions +- **Persistence:** SQLite via effect/sql + +### Current Flow (Frontend-Orchestrated) + +The `StandaloneReviewPrDialog.tsx` component runs this sequential pipeline: + +1. Validate PR URL, extract repo URL +2. Match repo URL to existing project via `githubUrlByProjectId` map +3. If no match: clone repo via `git.cloneRepo`, create project via `orchestration.dispatchCommand(project.create)` +4. Fetch PR details via `git.fetchPrDetails` + prepare branch via `git.preparePullRequestThread` (parallel) +5. Create worktree via `git.createWorktree` +6. Set branch upstream via `git.setBranchUpstream` +7. Create draft thread in `composerDraftStore` (client-only, pre-server) +8. Set review prompt in composer draft +9. Call `onThreadCreated` callback (triggers `reviewRequest.linkThread` from Sidebar) +10. Navigate to `/$threadId` +11. User manually triggers the agent by pressing send + +**Problems:** +- 6-8 sequential WS round-trips before the user sees the chat +- Race condition between `linkThread` and navigation +- Thread starts as a "draft" in client state, not yet on server +- Agent doesn't start automatically -- user must manually press send +- All error handling is in the frontend, duplicated from server capabilities +- `buildReviewPrompt` logic lives in `apps/web/src/lib/prReviewUtils.ts` (frontend-only) + +### Relevant Files + +**Server:** +- `apps/server/src/wsServer.ts` -- WS request routing (lines ~790-1400 handle all methods) +- `apps/server/src/git/Layers/GitManager.ts` -- `preparePullRequestThread`, worktree creation +- `apps/server/src/git/Services/GitManager.ts` -- Service interface +- `apps/server/src/git/Services/GitCore.ts` -- Low-level git operations +- `apps/server/src/git/Services/GitHubCli.ts` -- `gh` CLI wrapper (fetchPrDetails, etc.) +- `apps/server/src/orchestration/Services/OrchestrationEngine.ts` -- Command dispatch +- `apps/server/src/orchestration/Layers/OrchestrationEngine.ts` -- Engine implementation +- `apps/server/src/persistence/Services/ReviewRequestRepository.ts` -- Review request DB +- `apps/server/src/persistence/Layers/ReviewRequestRepository.ts` -- Repository implementation +- `apps/server/src/config.ts` -- `ServerConfig` (has `cwd`, `stateDir`) + +**Contracts:** +- `packages/contracts/src/ws.ts` -- `WS_METHODS`, `WebSocketRequestBody` union, push channels +- `packages/contracts/src/reviewRequest.ts` -- Review request schemas +- `packages/contracts/src/orchestration.ts` -- Commands, events, thread/project schemas +- `packages/contracts/src/git.ts` -- Git operation input/result schemas +- `packages/contracts/src/ipc.ts` -- `NativeApi` interface + +**Web:** +- `apps/web/src/components/StandaloneReviewPrDialog.tsx` -- Current frontend orchestration (375 lines) +- `apps/web/src/components/NotificationBell.tsx` -- `onStartReview` callback +- `apps/web/src/components/Sidebar.tsx` -- Mounts dialog, handles `onStartReview` +- `apps/web/src/lib/prReviewUtils.ts` -- `buildReviewPrompt`, `normalizePrReference` +- `apps/web/src/composerDraftStore.ts` -- Draft thread state management +- `apps/web/src/wsNativeApi.ts` -- WS transport wrapper for NativeApi +- `apps/web/src/nativeApi.ts` -- NativeApi singleton +- `apps/web/src/hooks/useHandleNewThread.ts` -- Thread creation helper + +--- + +## Architectural Decisions + +### AD-1: Single Server Method, Return Early + +The new `reviewRequest.startReview` method returns `{ threadId, projectId }` as soon as the thread is created on the server (after project resolution and thread.create dispatch). The heavier work (worktree setup, agent kickoff) continues asynchronously. The frontend navigates to the thread immediately and sees activity arrive via orchestration event push. + +**Rationale:** Performance first. The user should see the chat view within 200ms. Worktree creation can take seconds; the agent can start while the UI is already showing. + +### AD-2: Project Resolution on Server + +The server resolves which project matches the PR's repo URL by querying the orchestration read model. If no project matches, the server clones the repo (using `ServerConfig.cwd` or a configured `projectsWorkingDirectory` passed in the request) and creates the project via `orchestrationEngine.dispatch`. + +**Rationale:** The server already has all the services needed. Moving this to the server eliminates the `githubUrlByProjectId` map construction on the frontend and the multiple round-trips for clone + project.create. + +### AD-3: Move `buildReviewPrompt` to `packages/shared` + +The review prompt builder currently lives in `apps/web/src/lib/prReviewUtils.ts`. It takes `GitFetchPrDetailsResult` and produces a string. This is pure logic with no DOM dependencies. Move it to `packages/shared` so both server and web can use it. + +**Rationale:** The server needs to build the prompt for auto-kickoff. Shared utilities belong in `packages/shared` per CLAUDE.md package roles. This avoids duplicating the prompt logic. + +### AD-4: Auto-Kickoff via `thread.turn.start` + +After creating the thread, the server dispatches a `thread.turn.start` command with the review prompt as the user message. This starts the agent immediately. The frontend doesn't need to put anything in the composer. + +**Rationale:** Eliminates the manual "press send" step. The user clicks "Start Review" and the agent begins working. The chat view shows the agent's progress naturally via orchestration events. + +### AD-5: No Clone from Server Without `projectsWorkingDirectory` + +If no matching project exists and no `projectsWorkingDirectory` is provided in the request, the server returns an error. The frontend should surface this and offer a "Configure in Settings" link (same as current behavior). + +**Rationale:** The server should not guess where to clone repos. This is a user configuration choice. + +### AD-6: Keep StandaloneReviewPrDialog as Thin Shell + +The dialog is not fully removed. It becomes a thin component that: +1. Validates the PR URL format (instant, no server call) +2. Calls `reviewRequest.startReview` +3. Shows a single loading state +4. Navigates to the thread on success +5. Shows errors on failure + +**Rationale:** The dialog still provides a place for URL input when the user clicks the "Review PR" button without a notification context. But it no longer orchestrates multi-step flows. + +--- + +## Implementation Strategy + +### Phase 1: Shared Utilities (Low Risk) + +**Goal:** Move `buildReviewPrompt` and `normalizePrReference` to `packages/shared` so both server and web can import them. + +#### Step 1.1: Create `packages/shared/src/prReview.ts` + +- **Files changed:** + - NEW: `packages/shared/src/prReview.ts` + - EDIT: `packages/shared/package.json` (add `./prReview` subpath export) + +- **Content:** Move `buildReviewPrompt` and `normalizePrReference` from `apps/web/src/lib/prReviewUtils.ts`. The `GitFetchPrDetailsResult` type is imported from `@t3tools/contracts`. The `GITHUB_PR_URL_REGEX` and `isLikelyPrReference` can stay in the web package as they're only used for UI input validation. + +- **Validation:** `bun typecheck` passes. + +#### Step 1.2: Update Web Imports + +- **Files changed:** + - EDIT: `apps/web/src/lib/prReviewUtils.ts` -- re-export from shared, or update imports + - EDIT: `apps/web/src/components/StandaloneReviewPrDialog.tsx` -- update import path + +- **Validation:** `bun fmt && bun lint && bun typecheck` pass. Existing behavior unchanged. + +--- + +### Phase 2: Contract Layer (Low Risk) + +**Goal:** Define the new `reviewRequest.startReview` WS method schema. + +#### Step 2.1: Add `ReviewRequestStartReviewInput` and `ReviewRequestStartReviewResult` Schemas + +- **File:** `packages/contracts/src/reviewRequest.ts` + +```typescript +export const ReviewRequestStartReviewInput = Schema.Struct({ + prUrl: Schema.String, + /** Optional review request ID to link (from notification bell). */ + requestId: Schema.optional(TrimmedNonEmptyString), + /** Directory to clone into if no matching project exists. */ + projectsWorkingDirectory: Schema.optional(Schema.String), +}); + +export const ReviewRequestStartReviewResult = Schema.Struct({ + threadId: ThreadId, + projectId: ProjectId, +}); +``` + +#### Step 2.2: Register in WS Method Catalog + +- **File:** `packages/contracts/src/ws.ts` + +Add to `WS_METHODS`: +```typescript +reviewRequestStartReview: "reviewRequest.startReview", +``` + +Add to `WebSocketRequestBody` union: +```typescript +tagRequestBody(WS_METHODS.reviewRequestStartReview, ReviewRequestStartReviewInput), +``` + +Add import of `ReviewRequestStartReviewInput` from `./reviewRequest`. + +#### Step 2.3: Update NativeApi Interface + +- **File:** `packages/contracts/src/ipc.ts` + +Add to `reviewRequest` section: +```typescript +startReview: (input: ReviewRequestStartReviewInput) => Promise; +``` + +Import the new types. + +- **Validation:** `bun typecheck` passes (web will fail until Step 4 adds the client implementation -- that's expected and handled in Phase 4). + +--- + +### Phase 3: Server Implementation (Medium Risk) + +**Goal:** Implement the server-side review flow orchestrator and wire it into the WS router. + +#### Step 3.1: Create `apps/server/src/reviewFlow.ts` + +This is a new module that composes existing services to orchestrate the entire review start flow. It is an Effect function, not a Service, because it is a one-shot operation rather than a long-lived service. + +**Dependencies (all existing services):** +- `OrchestrationEngineService` -- dispatch commands +- `ProjectionSnapshotQuery` -- read current projects +- `GitManager` -- `preparePullRequestThread` +- `GitCore` -- `cloneRepo`, `createWorktree`, `setBranchUpstream` +- `GitHubCli` -- `fetchPrDetails` +- `ReviewRequestRepository` -- `updateStatus` +- `ServerConfig` -- `cwd` +- `FileSystem`, `Path` -- path resolution + +**Algorithm:** + +``` +startReview(input: { prUrl, requestId?, projectsWorkingDirectory? }): + 1. Normalize prUrl (strip fragments/query) + 2. Extract repoUrl from prUrl + 3. Query orchestration read model for existing projects + 4. Match repoUrl to project.workspaceRoot via gitRemoteOriginToGitHubUrl + (need to check each project's git remote origin URL -- use GitCore.readConfigValue) + OR simpler: check if any project's workspaceRoot has a matching origin URL. + + OPTIMIZATION: The server can build the same githubUrlByProjectId map + by reading remote.origin.url for each project's workspaceRoot. + Cache this with a short TTL. + + 5a. If project found: use its projectId and cwd + 5b. If no project found: + - If no projectsWorkingDirectory: fail with descriptive error + - Clone repo via GitCore.cloneRepo + - Dispatch project.create command + + 6. Generate threadId (server-side UUID) + 7. Fetch PR details via GitHubCli.fetchPrDetails (needed for prompt + worktree) + 8. Prepare PR thread via GitManager.preparePullRequestThread (mode: "worktree") + This handles: fetch branch, create worktree, configure upstream + + 9. Dispatch thread.create command: + - threadId, projectId, title: "Review PR #{number}: {title}" + - model: project's defaultModel + - runtimeMode: "full-access" + - branch: worktree branch + - worktreePath: worktree path + + 10. Link review request (if requestId provided): + reviewRequestRepo.updateStatus({ id: requestId, status: "in_review", threadId }) + + 11. Build review prompt via buildReviewPrompt(prDetails) + + 12. Dispatch thread.turn.start command: + - threadId + - message: { role: "user", text: reviewPrompt } + - runtimeMode: "full-access" + + 13. Return { threadId, projectId } +``` + +**Steps 1-9 are synchronous (must complete before return).** +**Steps 10-12 can be fire-and-forget after returning threadId (use Effect.fork).** + +Actually, reconsidering: Steps 6-9 must complete synchronously because: +- The frontend needs `threadId` to navigate +- The thread must exist on the server for the chat view to load +- The turn start must happen for the agent to begin + +Steps 7-8 (PR details + worktree) are the slow parts. We can parallelize them with step 9 (thread creation) using a two-phase approach: + +**Revised algorithm (optimized):** + +``` +Phase A (fast, return to client): + 1-5. Resolve project (use cache for origin URL lookup) + 6. Generate threadId + 7. Fetch PR details (fast: single gh api call, ~200ms) + 8. Dispatch thread.create command + 9. Return { threadId, projectId } to client + +Phase B (background, after return): + 10. preparePullRequestThread (slow: fetch + worktree create) + 11. Dispatch thread.meta.update to set branch + worktreePath + 12. Link review request + 13. Build review prompt + 14. Dispatch thread.turn.start (kicks off agent) +``` + +This way the client gets `threadId` after ~300ms (project lookup + PR details + thread.create) and navigates immediately. The chat view shows a thread with no messages yet. Within 1-3 seconds, the worktree is ready and the agent starts, with activity events streaming in. + +**However**, there's a subtlety: `thread.turn.start` needs the thread to have a `worktreePath` so the provider session starts in the correct directory. The worktree must be ready before the turn starts. So we can't fully decouple Phase B. + +**Final revised approach:** Return `threadId` to the client as early as possible, but do all the setup synchronously in the server method. The WS request handler has no timeout issues (Effect handles long-running operations). The client navigates immediately and sees the thread appear when `thread.created` event arrives. + +Actually -- the WS request/response model means the client awaits the response. If the server takes 3 seconds, the client waits 3 seconds. This is still better than 8 round-trips but not ideal. + +**Best approach: Two-phase with early response.** + +``` +1. Validate input, resolve project, generate threadId +2. Dispatch thread.create (synchronous, fast) +3. Return { threadId, projectId } to client +4. Fork background fiber: + a. Fetch PR details + b. Prepare worktree (fetch branch + create worktree) + c. Dispatch thread.meta.update (set branch, worktreePath) + d. Link review request + e. Build review prompt + f. Dispatch thread.turn.start + g. If any step fails, dispatch thread.activity.append with error +``` + +The client navigates to `/$threadId` immediately. The thread exists (from step 2) but has no messages/worktree yet. When step 4f completes, the agent starts and events flow to the chat view. + +If step 4b fails (e.g., clone/worktree error), we dispatch an error activity that shows in the chat timeline, and the thread stays in an error state that the user can see. + +**Error activity for failures:** +```typescript +orchestrationEngine.dispatch({ + type: "thread.activity.append", + commandId: newCommandId(), + threadId, + activity: { + id: newEventId(), + tone: "error", + kind: "review-setup-failed", + summary: "Failed to set up review workspace: ${error.message}", + payload: {}, + turnId: null, + createdAt: new Date().toISOString(), + }, + createdAt: new Date().toISOString(), +}); +``` + +#### Step 3.2: Wire into `wsServer.ts` Route Handler + +- **File:** `apps/server/src/wsServer.ts` + +Add a new case in the `routeRequest` switch: + +```typescript +case WS_METHODS.reviewRequestStartReview: { + const body = stripRequestTag(request.body); + return yield* startReview(body); +} +``` + +Import the `startReview` function from `./reviewFlow.ts`. + +The `startReview` function needs access to all the services already available in `createServer`'s closure. Two options: + +**Option A:** Pass services explicitly to `startReview`. +**Option B:** Make `startReview` an Effect that reads services from the environment. + +Option B is cleaner and follows existing patterns. The `routeRequest` function already runs in a context with all `ServerRuntimeServices`. We just need to ensure `startReview` declares its dependencies correctly. + +```typescript +// reviewFlow.ts +export const startReview = Effect.fnUntraced(function* (input: { + prUrl: string; + requestId?: string; + projectsWorkingDirectory?: string; +}) { + const orchestrationEngine = yield* OrchestrationEngineService; + const projectionQuery = yield* ProjectionSnapshotQuery; + const gitManager = yield* GitManager; + const gitCore = yield* GitCore; + const gitHubCli = yield* GitHubCli; + const reviewRequestRepo = yield* ReviewRequestRepository; + const serverConfig = yield* ServerConfig; + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + // ... implementation +}); +``` + +Wait -- `routeRequest` in wsServer.ts is an `Effect.fnUntraced` that doesn't appear to provide services via the environment. It accesses services through closure variables (`orchestrationEngine`, `gitManager`, etc.) that were resolved at server startup. So Option A is more consistent with the existing pattern. + +Looking at the code more carefully: `routeRequest` is defined inside `createServer` which already has all services in scope. The `startReview` function can similarly be created inside `createServer` using those closure variables, or it can be a standalone Effect that takes the services as parameters. + +**Decision:** Create `startReview` as a standalone Effect function in `reviewFlow.ts` that takes services as dependencies via Effect context. In `wsServer.ts`, provide the services when calling it. This keeps the review flow logic in its own module (clean separation) while still working within the existing architecture. + +Actually, the simplest approach consistent with the codebase: define `startReview` as a regular function that takes the needed services as parameters (like `makeGitManager` does). The wsServer.ts handler calls it with the services already in scope. + +**Final decision:** Create a factory function in `reviewFlow.ts`: + +```typescript +export function makeStartReview(deps: { + orchestrationEngine: OrchestrationEngineShape; + projectionQuery: ProjectionSnapshotQueryShape; + gitManager: GitManagerShape; + gitCore: GitCoreShape; + gitHubCli: GitHubCliShape; + reviewRequestRepo: ReviewRequestRepositoryShape; + serverConfig: ServerConfigShape; + fileSystem: FileSystem.FileSystem; + path: Path.Path; +}) { + return Effect.fnUntraced(function* (input: ReviewRequestStartReviewInput) { + // ... implementation using deps.* + }); +} +``` + +In `wsServer.ts`: +```typescript +const startReview = makeStartReview({ + orchestrationEngine, projectionQuery, gitManager, gitCore, gitHubCli, + reviewRequestRepo, serverConfig: { cwd, ... }, fileSystem, path, +}); +``` + +This is consistent with how `makeGitManager` works in the codebase. + +#### Step 3.3: Project Resolution Logic + +The server needs to map a GitHub repo URL to an existing project. The current frontend builds `githubUrlByProjectId` by calling `git.status` for each project and extracting `originUrl`, then using `gitRemoteOriginToGitHubUrl`. + +For the server, we can: +1. Query the read model for all projects +2. For each non-deleted project, read `remote.origin.url` via `gitCore.readConfigValue` +3. Normalize via `gitRemoteOriginToGitHubUrl` from `@t3tools/shared/git` +4. Match against the repo URL extracted from the PR URL + +**Optimization:** Cache this mapping with a short TTL (30s). Multiple review starts within 30s reuse the cached mapping. + +#### Step 3.4: Background Fiber for Post-Return Work + +After returning `{ threadId, projectId }`, the server forks a fiber for: +1. Fetch PR details +2. Prepare worktree +3. Update thread meta (branch, worktreePath) +4. Link review request +5. Build and dispatch review prompt as turn.start + +Use `Effect.forkDaemon` or `Effect.forkIn(subscriptionsScope)` to ensure the fiber outlives the request handler. + +Error handling: wrap the entire background fiber in a catch-all that dispatches an error activity to the thread. + +- **Validation:** `bun typecheck` passes. Manual testing with mock server. + +--- + +### Phase 4: Frontend Simplification (Medium Risk) + +**Goal:** Replace the multi-step dialog with a single server call. + +#### Step 4.1: Add `startReview` to `wsNativeApi.ts` + +- **File:** `apps/web/src/wsNativeApi.ts` + +Add to `reviewRequest` section: +```typescript +startReview: (input) => transport.request(WS_METHODS.reviewRequestStartReview, input), +``` + +#### Step 4.2: Rewrite `StandaloneReviewPrDialog.tsx` + +The dialog becomes much simpler: + +```typescript +// Simplified phases +type Phase = "input" | "starting"; + +function StandaloneReviewPrDialog({ initialPrUrl, onClose, projectsWorkingDirectory }) { + const [prUrl, setPrUrl] = useState(initialPrUrl ?? ""); + const [phase, setPhase] = useState("input"); + const [error, setError] = useState(null); + const navigate = useNavigate(); + + const handleStart = async () => { + setPhase("starting"); + setError(null); + try { + const api = ensureNativeApi(); + const result = await api.reviewRequest.startReview({ + prUrl: prUrl.trim(), + ...(pendingReviewRequest?.requestId ? { requestId: pendingReviewRequest.requestId } : {}), + ...(projectsWorkingDirectory ? { projectsWorkingDirectory } : {}), + }); + await navigate({ to: "/$threadId", params: { threadId: result.threadId } }); + onClose(); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to start review"); + setPhase("input"); + } + }; + + // Auto-trigger when opened with initialPrUrl + // ... (same pattern as current) +} +``` + +**Removed concerns:** +- `githubUrlByProjectId` prop -- server handles project resolution +- `projects` prop -- server handles project matching +- `cloneRepoMutation` -- server handles cloning +- `fetchPrMutation` -- server handles PR details +- `setProjectDraftThreadId` -- thread is created server-side, no draft +- `useComposerDraftStore` -- no draft prompt needed, agent starts automatically +- Phase tracking for "cloning", "fetching-pr", "creating-worktree" -- single "starting" phase +- `onThreadCreated` callback with `linkThread` -- server handles linking + +**Kept:** +- PR URL validation (instant, no server call) +- Error display +- "needs working directory" warning with Settings link + +#### Step 4.3: Update Sidebar Props + +- **File:** `apps/web/src/components/Sidebar.tsx` + +The `StandaloneReviewPrDialog` no longer needs: +- `githubUrlByProjectId` +- `projects` +- `onThreadCreated` callback + +Remove these props from the dialog instantiation. The `projectsWorkingDirectory` prop stays (from appSettings). + +#### Step 4.4: Update NotificationBell Flow + +No changes needed to `NotificationBell.tsx`. It still calls `onStartReview(prUrl, requestId)` which opens the dialog. The dialog now just makes a single server call. + +However, consider an optimization: for notification-triggered reviews (where we have `requestId`), skip the dialog entirely and call `startReview` directly from the Sidebar's `onStartReview` handler: + +```typescript +onStartReview={async (prUrl, requestId) => { + try { + const api = ensureNativeApi(); + const result = await api.reviewRequest.startReview({ + prUrl, + requestId, + projectsWorkingDirectory: appSettings.projectsWorkingDirectory, + }); + await navigate({ to: "/$threadId", params: { threadId: result.threadId } }); + } catch { + // Fall back to dialog on error + setPendingReviewRequest({ prUrl, requestId }); + setStandaloneReviewOpen(true); + } +}} +``` + +This makes the notification bell -> review flow instant (no dialog at all on success). + +- **Validation:** `bun fmt && bun lint && bun typecheck` pass. Manual testing. + +--- + +### Phase 5: Testing (Low Risk) + +#### Step 5.1: Unit Test for `startReview` Server Logic + +- **File:** NEW `apps/server/src/reviewFlow.test.ts` + +Test cases: +1. Happy path: existing project, PR details fetched, thread + turn created +2. No matching project, clone succeeds, project + thread created +3. No matching project, no projectsWorkingDirectory -- returns error +4. PR URL validation failure +5. Worktree creation failure -- thread exists but error activity dispatched +6. PR fetch failure -- thread exists but error activity dispatched + +Use the existing test patterns from `apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts` for mocking services. + +#### Step 5.2: Move `buildReviewPrompt` Tests + +If `buildReviewPrompt` had tests in the web package, move them alongside the shared module. Currently there don't appear to be dedicated tests, so add basic tests in `packages/shared/src/prReview.test.ts`. + +#### Step 5.3: Integration Smoke Test + +Manual testing checklist: +- [ ] Click "Start Review" from notification bell with existing project +- [ ] Click "Start Review" from notification bell with new repo (clone needed) +- [ ] Click "Review PR" button in sidebar (manual URL entry) +- [ ] Verify agent starts automatically +- [ ] Verify error when no projectsWorkingDirectory configured +- [ ] Verify error surfaces in chat when worktree fails +- [ ] Verify review request status updates to "in_review" +- [ ] Verify thread shows correct branch and worktree path + +--- + +## Detailed File Change Matrix + +| File | Action | Description | +|------|--------|-------------| +| `packages/shared/src/prReview.ts` | NEW | `buildReviewPrompt`, `normalizePrReference` | +| `packages/shared/package.json` | EDIT | Add `./prReview` subpath export | +| `packages/contracts/src/reviewRequest.ts` | EDIT | Add `ReviewRequestStartReviewInput`, `ReviewRequestStartReviewResult` | +| `packages/contracts/src/ws.ts` | EDIT | Add `reviewRequestStartReview` to `WS_METHODS` + body union | +| `packages/contracts/src/ipc.ts` | EDIT | Add `startReview` to `NativeApi.reviewRequest` | +| `apps/server/src/reviewFlow.ts` | NEW | `makeStartReview` factory + review flow orchestration | +| `apps/server/src/reviewFlow.test.ts` | NEW | Unit tests | +| `apps/server/src/wsServer.ts` | EDIT | Add route case, instantiate `startReview` | +| `apps/web/src/wsNativeApi.ts` | EDIT | Add `startReview` transport binding | +| `apps/web/src/components/StandaloneReviewPrDialog.tsx` | REWRITE | Simplify to single-call flow | +| `apps/web/src/components/Sidebar.tsx` | EDIT | Remove dialog props, add direct startReview for notifications | +| `apps/web/src/lib/prReviewUtils.ts` | EDIT | Re-export from shared or keep UI-only utils | +| `packages/shared/src/prReview.test.ts` | NEW | Tests for shared review utils | + +--- + +## Risk Assessment + +### Technical Risks + +1. **Background fiber error propagation** + - Risk: If the background fiber (worktree + turn start) fails silently, the user sees an empty thread + - Mitigation: Wrap entire background fiber in catch-all, dispatch `thread.activity.append` with error tone + - The chat view already renders error activities + +2. **Thread exists but agent never starts** + - Risk: The thread is created and returned, but the background fiber crashes before `thread.turn.start` + - Mitigation: The chat view shows an empty thread. User can type a message and manually trigger the agent. + - Enhancement: Add a "review setup in progress" system message during background setup + +3. **Project resolution race condition** + - Risk: Two concurrent `startReview` calls for the same repo could create duplicate projects + - Mitigation: Use `ON CONFLICT` on project workspaceRoot or add a mutex. The orchestration engine already serializes command dispatch, so duplicate `project.create` commands are rejected by invariants. + +4. **Stale project-to-repo mapping** + - Risk: A project's remote origin URL changes but the cache still has the old mapping + - Mitigation: Short TTL (30s) on the mapping cache. This is not a frequent scenario. + +5. **Large repo clone timeout** + - Risk: Cloning a large repo takes > 60s, WS request times out + - Mitigation: With the two-phase approach, the clone happens in Phase A (before return). If clone is needed, the response will be slow. Consider: if clone is needed, create a "pending" thread immediately, fork the clone, and return. The clone completion triggers thread.meta.update. + - For v1: accept that clone-needed reviews are slower. The dialog shows "starting..." during the clone. This is still better than the current flow (which also blocks on clone). + +### Dependency Risks + +- **Effect-TS version:** No new Effect features needed; uses existing patterns +- **`@t3tools/shared` subpath export:** Well-established pattern in the codebase +- **`gh` CLI:** Already used extensively; no new gh commands needed + +--- + +## Acceptance Criteria + +### Functional + +1. Single `reviewRequest.startReview` WS method handles the entire flow +2. Thread is created on the server (not as a client-side draft) +3. Agent starts automatically with the review prompt +4. Review request is linked to the thread +5. Existing project is reused when repo URL matches +6. New project is created + repo cloned when no match exists +7. Error states are surfaced in the chat timeline +8. Manual URL entry (dialog) still works + +### Quality + +1. `bun fmt` passes +2. `bun lint` passes +3. `bun typecheck` passes +4. Unit tests for server-side review flow +5. Unit tests for shared `buildReviewPrompt` + +### Performance + +1. Client-side latency for notification-triggered review (existing project): < 500ms to navigate +2. Client-side latency for manual URL review (existing project): < 500ms after submitting URL +3. Agent begins producing output within 3s of review start (existing project, no clone) + +### Reliability + +1. If worktree creation fails, thread exists with error activity visible +2. If agent kickoff fails, thread exists and user can manually send a message +3. Concurrent review starts for different PRs don't interfere +4. WebSocket reconnect doesn't lose the thread (it's persisted server-side) + +--- + +## Implementation Order + +The phases can be executed in strict order (1 -> 2 -> 3 -> 4 -> 5). Each phase is independently verifiable: + +1. **Phase 1 (Shared Utilities)** -- Pure refactor, zero behavior change. Safe to merge independently. +2. **Phase 2 (Contracts)** -- Additive schema changes. Does not break existing code (new method, not changing existing ones). +3. **Phase 3 (Server)** -- New server endpoint. Can be deployed without frontend changes (endpoint exists but isn't called yet). +4. **Phase 4 (Frontend)** -- Switches to the new endpoint. Can be feature-flagged if needed. +5. **Phase 5 (Testing)** -- Validates the complete flow. + +Estimated total effort: 2-3 days for a developer familiar with the codebase. diff --git a/CLAUDE.md b/CLAUDE.md index c317064255..47dc3e3d86 120000 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1 +1 @@ -AGENTS.md +AGENTS.md \ No newline at end of file diff --git a/README.md b/README.md index 03e81b5fb9..d1e0ff0cb3 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,25 @@ You can also just install the desktop app. It's cooler. Install the [desktop app from the Releases page](https://github.com/pingdotgg/t3code/releases) +## Jira Integration (optional) + +T3 Code can link chat sessions to Jira tickets — create, link, unlink, post AI-generated progress comments, and transition ticket status directly from the UI. + +To enable it, set the following environment variables: + +```bash +# Your Jira Cloud instance URL +JIRA_BASE_URL=https://yourcompany.atlassian.net + +# Your Jira account email +JIRA_USER_EMAIL=you@company.com + +# A Jira API token (https://id.atlassian.com/manage-profile/security/api-tokens) +JIRA_API_TOKEN=your-api-token +``` + +When all three are set, a Jira button appears in the chat toolbar. When unset, the Jira UI is completely hidden. + ## Some notes We are very very early in this project. Expect bugs. diff --git a/apps/server/integration/OrchestrationEngineHarness.integration.ts b/apps/server/integration/OrchestrationEngineHarness.integration.ts index c5eb125aba..6952418ab7 100644 --- a/apps/server/integration/OrchestrationEngineHarness.integration.ts +++ b/apps/server/integration/OrchestrationEngineHarness.integration.ts @@ -159,7 +159,7 @@ export interface OrchestrationIntegrationHarness { readonly rootDir: string; readonly workspaceDir: string; readonly dbPath: string; - readonly adapterHarness: TestProviderAdapterHarness | null; + readonly adapterHarness: TestProviderAdapterHarness; readonly engine: OrchestrationEngineShape; readonly snapshotQuery: ProjectionSnapshotQuery["Service"]; readonly providerService: ProviderService["Service"]; @@ -205,7 +205,7 @@ export interface OrchestrationIntegrationHarness { } interface MakeOrchestrationIntegrationHarnessOptions { - readonly provider?: "codex"; + readonly provider?: "codex" | "claudeCode"; readonly realCodex?: boolean; } @@ -269,12 +269,10 @@ export const makeOrchestrationIntegrationHarness = ( ? makeProviderServiceLive().pipe( Layer.provide(providerSessionDirectoryLayer), Layer.provide(realCodexRegistry), - Layer.provide(AnalyticsService.layerTest), ) : makeProviderServiceLive().pipe( Layer.provide(providerSessionDirectoryLayer), Layer.provide(fakeRegistry!), - Layer.provide(AnalyticsService.layerTest), ); const runtimeServicesLayer = Layer.mergeAll( @@ -312,6 +310,7 @@ export const makeOrchestrationIntegrationHarness = ( const layer = orchestrationReactorLayer.pipe( Layer.provide(persistenceLayer), Layer.provideMerge(ServerConfig.layerTest(workspaceDir, stateDir)), + Layer.provideMerge(AnalyticsService.layerTest), Layer.provideMerge(NodeServices.layer), ); @@ -454,14 +453,19 @@ export const makeOrchestrationIntegrationHarness = ( disposed = true; const shutdown = Effect.gen(function* () { + const stopAllExit = yield* Effect.exit( + Effect.promise(() => runtime.runPromise(providerService.stopAll())), + ); const closeScopeExit = yield* Effect.exit(Scope.close(scope, Exit.void)); const disposeRuntimeExit = yield* Effect.exit(Effect.promise(() => runtime.dispose())); - const failureCause = Exit.isFailure(closeScopeExit) - ? closeScopeExit.cause - : Exit.isFailure(disposeRuntimeExit) - ? disposeRuntimeExit.cause - : null; + const failureCause = Exit.isFailure(stopAllExit) + ? stopAllExit.cause + : Exit.isFailure(closeScopeExit) + ? closeScopeExit.cause + : Exit.isFailure(disposeRuntimeExit) + ? disposeRuntimeExit.cause + : null; if (failureCause) { return yield* Effect.failCause(failureCause); @@ -481,7 +485,7 @@ export const makeOrchestrationIntegrationHarness = ( rootDir, workspaceDir, dbPath, - adapterHarness, + adapterHarness: adapterHarness as TestProviderAdapterHarness, engine, snapshotQuery, providerService, diff --git a/apps/server/integration/TestProviderAdapter.integration.ts b/apps/server/integration/TestProviderAdapter.integration.ts index 017c59e2c8..d970bf7849 100644 --- a/apps/server/integration/TestProviderAdapter.integration.ts +++ b/apps/server/integration/TestProviderAdapter.integration.ts @@ -35,7 +35,7 @@ export interface TestTurnResponse { export type FixtureProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: "codex"; + readonly provider: "codex" | "claudeCode" | "cursor"; readonly createdAt: string; readonly threadId: string; readonly turnId?: string | undefined; @@ -177,7 +177,7 @@ function normalizeFixtureEvent(rawEvent: Record): ProviderRunti export interface TestProviderAdapterHarness { readonly adapter: ProviderAdapterShape; - readonly provider: "codex"; + readonly provider: "codex" | "claudeCode"; readonly queueTurnResponse: ( threadId: ThreadId, response: TestTurnResponse, @@ -197,7 +197,7 @@ export interface TestProviderAdapterHarness { } interface MakeTestProviderAdapterHarnessOptions { - readonly provider?: "codex"; + readonly provider?: "codex" | "claudeCode"; } function nowIso(): string { @@ -205,7 +205,7 @@ function nowIso(): string { } function sessionNotFound( - provider: "codex", + provider: "codex" | "claudeCode", threadId: ThreadId, ): ProviderAdapterSessionNotFoundError { return new ProviderAdapterSessionNotFoundError({ @@ -215,7 +215,7 @@ function sessionNotFound( } function missingSessionEffect( - provider: "codex", + provider: "codex" | "claudeCode", threadId: ThreadId, ): Effect.Effect { return Effect.fail(sessionNotFound(provider, threadId)); diff --git a/apps/server/integration/orchestrationEngine.integration.test.ts b/apps/server/integration/orchestrationEngine.integration.test.ts index 42dcfe34f8..f799e80eb1 100644 --- a/apps/server/integration/orchestrationEngine.integration.test.ts +++ b/apps/server/integration/orchestrationEngine.integration.test.ts @@ -36,7 +36,7 @@ const PROJECT_ID = asProjectId("project-1"); const THREAD_ID = ThreadId.makeUnsafe("thread-1"); const FIXTURE_TURN_ID = "fixture-turn"; const APPROVAL_REQUEST_ID = asApprovalRequestId("req-approval-1"); -type IntegrationProvider = "codex"; +type IntegrationProvider = "codex" | "claudeCode"; function nowIso() { return new Date().toISOString(); @@ -397,7 +397,7 @@ it.live("runs multi-turn file edits and persists checkpoint diffs", () => (entry) => entry.checkpoints.length === 1 && entry.session?.threadId === "thread-1", ); - yield* harness.adapterHarness!.queueTurnResponse(THREAD_ID, { + yield* harness.adapterHarness.queueTurnResponse(THREAD_ID, { events: [ { type: "turn.started", @@ -578,7 +578,7 @@ it.live("tracks approval requests and resolves pending approvals on user respons assert.equal(resolvedRow.decision, "accept"); const approvalResponses = yield* waitForSync( - () => harness.adapterHarness!.getApprovalResponses(THREAD_ID), + () => harness.adapterHarness.getApprovalResponses(THREAD_ID), (responses) => responses.length === 1, "provider approval response", ); @@ -731,7 +731,7 @@ it.live("reverts to an earlier checkpoint and trims checkpoint projections + git (entry) => entry.session?.threadId === "thread-1" && entry.checkpoints.length === 1, ); - yield* harness.adapterHarness!.queueTurnResponse(THREAD_ID, { + yield* harness.adapterHarness.queueTurnResponse(THREAD_ID, { events: [ { type: "turn.started", @@ -836,7 +836,7 @@ it.live("reverts to an earlier checkpoint and trims checkpoint projections + git gitRefExists(harness.workspaceDir, checkpointRefForThreadTurn(THREAD_ID, 2)), false, ); - assert.deepEqual(harness.adapterHarness!.getRollbackCalls(THREAD_ID), [1]); + assert.deepEqual(harness.adapterHarness.getRollbackCalls(THREAD_ID), [1]); const checkpointRows = yield* harness.checkpointRepository.listByThreadId({ threadId: THREAD_ID, @@ -882,3 +882,420 @@ it.live( }), ), ); + +it.live("starts a claudeCode session on first turn when provider is requested", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-start-1", "2026-02-24T10:10:00.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-start-2", "2026-02-24T10:10:00.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Claude first turn.\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-start-3", "2026-02-24T10:10:00.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-initial", + messageId: "msg-user-claude-initial", + text: "Use Claude", + provider: "claudeCode", + }); + + const thread = yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.session?.providerName === "claudeCode" && + entry.session.status === "ready" && + entry.messages.some( + (message) => message.role === "assistant" && message.text === "Claude first turn.\n", + ), + ); + assert.equal(thread.session?.providerName, "claudeCode"); + }), + "claudeCode", + ), +); + +it.live("recovers claudeCode sessions after provider stopAll using persisted resume state", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-recover-1", "2026-02-24T10:11:00.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-recover-2", "2026-02-24T10:11:00.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Turn before restart.\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-recover-3", "2026-02-24T10:11:00.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-recover-1", + messageId: "msg-user-claude-recover-1", + text: "Before restart", + provider: "claudeCode", + }); + + yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.latestTurn?.turnId === "turn-1" && entry.session?.threadId === "thread-1", + ); + + yield* harness.providerService.stopAll(); + yield* waitForSync( + () => harness.adapterHarness.listActiveSessionIds(), + (sessionIds) => sessionIds.length === 0, + "provider stopAll", + ); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-recover-4", "2026-02-24T10:11:01.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-recover-5", "2026-02-24T10:11:01.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Turn after restart.\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-recover-6", "2026-02-24T10:11:01.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-recover-2", + messageId: "msg-user-claude-recover-2", + text: "After restart", + }); + yield* waitForSync( + () => harness.adapterHarness.getStartCount(), + (count) => count === 2, + "claude provider recovery start", + ); + + const recoveredThread = yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.session?.providerName === "claudeCode" && + entry.messages.some( + (message) => message.role === "user" && message.text === "After restart", + ) && + !entry.activities.some((activity) => activity.kind === "provider.turn.start.failed"), + ); + assert.equal(recoveredThread.session?.providerName, "claudeCode"); + assert.equal(recoveredThread.session?.threadId, "thread-1"); + }), + "claudeCode", + ), +); + +it.live("forwards claudeCode approval responses to the provider session", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-approval-1", "2026-02-24T10:12:00.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "approval.requested", + ...runtimeBase("evt-claude-approval-2", "2026-02-24T10:12:00.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + requestId: APPROVAL_REQUEST_ID, + requestKind: "command", + detail: "Approve Claude tool call", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-approval-3", "2026-02-24T10:12:00.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-approval", + messageId: "msg-user-claude-approval", + text: "Need approval", + provider: "claudeCode", + }); + + const thread = yield* harness.waitForThread(THREAD_ID, (entry) => + entry.activities.some((activity) => activity.kind === "approval.requested"), + ); + assert.equal(thread.session?.threadId, "thread-1"); + + yield* harness.engine.dispatch({ + type: "thread.approval.respond", + commandId: CommandId.makeUnsafe("cmd-claude-approval-respond"), + threadId: THREAD_ID, + requestId: APPROVAL_REQUEST_ID, + decision: "accept", + createdAt: nowIso(), + }); + + yield* harness.waitForPendingApproval( + "req-approval-1", + (row) => row.status === "resolved" && row.decision === "accept", + ); + + const approvalResponses = yield* waitForSync( + () => harness.adapterHarness.getApprovalResponses(THREAD_ID), + (responses) => responses.length === 1, + "claude provider approval response", + ); + assert.equal(approvalResponses[0]?.decision, "accept"); + }), + "claudeCode", + ), +); + +it.live("forwards thread.turn.interrupt to claudeCode provider sessions", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-interrupt-1", "2026-02-24T10:13:00.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-interrupt-2", "2026-02-24T10:13:00.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Long running output.\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-interrupt-3", "2026-02-24T10:13:00.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-interrupt", + messageId: "msg-user-claude-interrupt", + text: "Start long turn", + provider: "claudeCode", + }); + + const thread = yield* harness.waitForThread( + THREAD_ID, + (entry) => entry.session?.threadId === "thread-1", + ); + assert.equal(thread.session?.threadId, "thread-1"); + + yield* harness.engine.dispatch({ + type: "thread.turn.interrupt", + commandId: CommandId.makeUnsafe("cmd-turn-interrupt-claude"), + threadId: THREAD_ID, + createdAt: nowIso(), + }); + yield* harness.waitForDomainEvent( + (event) => event.type === "thread.turn-interrupt-requested", + ); + + const interruptCalls = yield* waitForSync( + () => harness.adapterHarness.getInterruptCalls(THREAD_ID), + (calls) => calls.length === 1, + "claude provider interrupt call", + ); + assert.equal(interruptCalls.length, 1); + }), + "claudeCode", + ), +); + +it.live("reverts claudeCode turns and rolls back provider conversation state", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-revert-1", "2026-02-24T10:14:00.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-revert-2", "2026-02-24T10:14:00.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "README -> v2\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-revert-3", "2026-02-24T10:14:00.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + mutateWorkspace: ({ cwd }) => + Effect.sync(() => { + fs.writeFileSync(path.join(cwd, "README.md"), "v2\n", "utf8"); + }), + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-revert-1", + messageId: "msg-user-claude-revert-1", + text: "First Claude edit", + provider: "claudeCode", + }); + + yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.latestTurn?.turnId === "turn-1" && entry.session?.threadId === "thread-1", + ); + + yield* harness.adapterHarness.queueTurnResponse(THREAD_ID, { + events: [ + { + type: "turn.started", + ...runtimeBase("evt-claude-revert-4", "2026-02-24T10:14:01.000Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase("evt-claude-revert-5", "2026-02-24T10:14:01.050Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "README -> v3\n", + }, + { + type: "turn.completed", + ...runtimeBase("evt-claude-revert-6", "2026-02-24T10:14:01.100Z", "claudeCode"), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + mutateWorkspace: ({ cwd }) => + Effect.sync(() => { + fs.writeFileSync(path.join(cwd, "README.md"), "v3\n", "utf8"); + }), + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-revert-2", + messageId: "msg-user-claude-revert-2", + text: "Second Claude edit", + }); + + yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.latestTurn?.turnId === "turn-2" && + entry.checkpoints.length === 2 && + entry.session?.providerName === "claudeCode", + ); + + yield* harness.engine.dispatch({ + type: "thread.checkpoint.revert", + commandId: CommandId.makeUnsafe("cmd-checkpoint-revert-claude"), + threadId: THREAD_ID, + turnCount: 1, + createdAt: nowIso(), + }); + + const revertedThread = yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.checkpoints.length === 1 && entry.checkpoints[0]?.checkpointTurnCount === 1, + ); + assert.equal(revertedThread.checkpoints[0]?.checkpointTurnCount, 1); + assert.equal( + gitRefExists(harness.workspaceDir, checkpointRefForThreadTurn(THREAD_ID, 1)), + true, + ); + assert.equal( + gitRefExists(harness.workspaceDir, checkpointRefForThreadTurn(THREAD_ID, 2)), + false, + ); + assert.deepEqual(harness.adapterHarness.getRollbackCalls(THREAD_ID), [1]); + }), + "claudeCode", + ), +); diff --git a/apps/server/package.json b/apps/server/package.json index 546a2c3b68..94fe639c64 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -22,13 +22,15 @@ "test": "vitest run" }, "dependencies": { + "@anthropic-ai/claude-agent-sdk": "^0.2.62", "@effect/platform-node": "catalog:", "@effect/sql-sqlite-bun": "catalog:", "@pierre/diffs": "^1.1.0-beta.16", "effect": "catalog:", "node-pty": "^1.1.0", "open": "^10.1.0", - "ws": "^8.18.0" + "ws": "^8.18.0", + "zod": "^4.3.6" }, "devDependencies": { "@effect/language-service": "catalog:", diff --git a/apps/server/scripts/logger-scope-repro.ts b/apps/server/scripts/logger-scope-repro.ts new file mode 100644 index 0000000000..52f6fc1e93 --- /dev/null +++ b/apps/server/scripts/logger-scope-repro.ts @@ -0,0 +1,66 @@ +import * as NodeRuntime from "@effect/platform-node/NodeRuntime"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import path from "node:path"; + +import { Effect, FileSystem, Layer, Logger, ServiceMap } from "effect"; + +import { makeEventNdjsonLogger } from "../src/provider/Layers/EventNdjsonLogger.ts"; + +class LogDir extends ServiceMap.Service()("t3/scripts/logger-scope-repro/LogDir") {} + +const main = Effect.gen(function* () { + const logdir = yield* LogDir; + const providerLogPath = path.join(logdir, "provider"); + + yield* Effect.logInfo(`providerLogPath=${providerLogPath}`); + + const providerLogger = yield* makeEventNdjsonLogger(providerLogPath, { + stream: "native", + batchWindowMs: 10, + }); + + yield* Effect.logInfo("before provider write"); + + if (providerLogger) { + yield* providerLogger.write( + { + kind: "probe", + message: "provider-only event", + }, + "thread-123" as never, + ); + } + + yield* Effect.logInfo("after provider write"); + yield* Effect.sleep("50 millis"); + + if (providerLogger) { + yield* providerLogger.close(); + } + yield* Effect.logInfo("after provider close"); + + const fs = yield* FileSystem.FileSystem; + const logContents = yield* fs + .readDirectory(logdir, { recursive: true }) + .pipe( + Effect.flatMap((entries) => + Effect.all(entries.map((entry) => fs.readFileString(path.join(logdir, entry)))), + ), + ); + yield* Effect.logInfo(`logContents=${logContents}`); +}); + +Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const logdir = path.join(process.cwd(), "logtest"); + yield* fs.makeDirectory(logdir); + + const fileLogger = yield* Logger.formatSimple.pipe( + Logger.toFile(path.join(logdir, "global.log")), + ); + const dualLogger = Logger.layer([fileLogger, Logger.consolePretty()]); + + const mainLayer = Layer.mergeAll(dualLogger, Layer.succeed(LogDir, logdir)); + + yield* main.pipe(Effect.provide(mainLayer)); +}).pipe(Effect.scoped, Effect.provide(NodeServices.layer), NodeRuntime.runMain); diff --git a/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.test.ts b/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.test.ts index 2f79ea9d5a..0c77cb3e74 100644 --- a/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.test.ts +++ b/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.test.ts @@ -74,6 +74,7 @@ function makeSnapshot(input: { }, ], session: null, + linkedJiraTicket: null, }, ], }; diff --git a/apps/server/src/codexAppServerManager.test.ts b/apps/server/src/codexAppServerManager.test.ts index cea8df0a0b..d040e03d84 100644 --- a/apps/server/src/codexAppServerManager.test.ts +++ b/apps/server/src/codexAppServerManager.test.ts @@ -98,11 +98,10 @@ function createPendingUserInputHarness() { const manager = new CodexAppServerManager(); const context = { session: { + sessionId: "sess_1", provider: "codex", status: "ready", threadId: "thread_1", - runtimeMode: "full-access", - model: "gpt-5.3-codex", resumeCursor: { threadId: "thread_1" }, createdAt: "2026-02-10T00:00:00.000Z", updatedAt: "2026-02-10T00:00:00.000Z", @@ -536,6 +535,36 @@ describe("sendTurn", () => { }); }); + it("passes Codex plan mode as a collaboration preset on turn/start", async () => { + const { manager, context, sendRequest } = createSendTurnHarness(); + + await manager.sendTurn({ + threadId: asThreadId("thread_1"), + input: "Plan the work", + interactionMode: "plan", + }); + + expect(sendRequest).toHaveBeenCalledWith(context, "turn/start", { + threadId: "thread_1", + input: [ + { + type: "text", + text: "Plan the work", + text_elements: [], + }, + ], + model: "gpt-5.3-codex", + collaborationMode: { + mode: "plan", + settings: { + model: "gpt-5.3-codex", + reasoning_effort: "medium", + developer_instructions: CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS, + }, + }, + }); + }); + it("rejects empty turn input", async () => { const { manager } = createSendTurnHarness(); @@ -672,80 +701,6 @@ describe("respondToUserInput", () => { }), ); }); - - it("preserves explicit empty multi-select answers", async () => { - const { manager, context, requireSession, writeMessage, emitEvent } = - createPendingUserInputHarness(); - - await manager.respondToUserInput( - asThreadId("thread_1"), - ApprovalRequestId.makeUnsafe("req-user-input-1"), - { - scope: [], - }, - ); - - expect(requireSession).toHaveBeenCalledWith("thread_1"); - expect(writeMessage).toHaveBeenCalledWith(context, { - id: 42, - result: { - answers: { - scope: { answers: [] }, - }, - }, - }); - expect(emitEvent).toHaveBeenCalledWith( - expect.objectContaining({ - method: "item/tool/requestUserInput/answered", - payload: { - requestId: "req-user-input-1", - answers: { - scope: { answers: [] }, - }, - }, - }), - ); - }); - - it("tracks file-read approval requests with the correct method", () => { - const manager = new CodexAppServerManager(); - const context = { - session: { - sessionId: "sess_1", - provider: "codex", - status: "ready", - threadId: asThreadId("thread_1"), - resumeCursor: { threadId: "thread_1" }, - createdAt: "2026-02-10T00:00:00.000Z", - updatedAt: "2026-02-10T00:00:00.000Z", - }, - pendingApprovals: new Map(), - pendingUserInputs: new Map(), - }; - type ApprovalRequestContext = { - session: typeof context.session; - pendingApprovals: typeof context.pendingApprovals; - pendingUserInputs: typeof context.pendingUserInputs; - }; - - ( - manager as unknown as { - handleServerRequest: ( - context: ApprovalRequestContext, - request: Record, - ) => void; - } - ).handleServerRequest(context, { - jsonrpc: "2.0", - id: 42, - method: "item/fileRead/requestApproval", - params: {}, - }); - - const request = Array.from(context.pendingApprovals.values())[0]; - expect(request?.requestKind).toBe("file-read"); - expect(request?.method).toBe("item/fileRead/requestApproval"); - }); }); describe.skipIf(!process.env.CODEX_BINARY_PATH)("startSession live Codex resume", () => { diff --git a/apps/server/src/codexAppServerManager.ts b/apps/server/src/codexAppServerManager.ts index a8a8ce4607..b22a8a9e90 100644 --- a/apps/server/src/codexAppServerManager.ts +++ b/apps/server/src/codexAppServerManager.ts @@ -327,6 +327,17 @@ Do not ask "should I proceed?" in the final output. The user can easily switch o Only produce at most one \`\` block per turn, and only when you are presenting a complete spec. `; +export function resolveCodexModelForAccount( + model: string | undefined, + account: CodexAccountSnapshot, +): string | undefined { + if (model !== CODEX_SPARK_MODEL || account.sparkEnabled) { + return model; + } + + return CODEX_DEFAULT_MODEL; +} + export const CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS = `# Collaboration Mode: Default You are now in Default mode. Any previous instructions for other modes (e.g. Plan mode) are no longer active. @@ -357,17 +368,6 @@ function mapCodexRuntimeMode(runtimeMode: RuntimeMode): { }; } -export function resolveCodexModelForAccount( - model: string | undefined, - account: CodexAccountSnapshot, -): string | undefined { - if (model !== CODEX_SPARK_MODEL || account.sparkEnabled) { - return model; - } - - return CODEX_DEFAULT_MODEL; -} - /** * On Windows with `shell: true`, `child.kill()` only terminates the `cmd.exe` * wrapper, leaving the actual command running. Use `taskkill /T` to kill the @@ -452,14 +452,18 @@ function toCodexUserInputAnswer(value: unknown): CodexUserInputAnswer { if (Array.isArray(value)) { const answers = value.filter((entry): entry is string => typeof entry === "string"); - return { answers }; + if (answers.length > 0) { + return { answers }; + } } if (value && typeof value === "object") { const maybeAnswers = (value as { answers?: unknown }).answers; if (Array.isArray(maybeAnswers)) { const answers = maybeAnswers.filter((entry): entry is string => typeof entry === "string"); - return { answers }; + if (answers.length > 0) { + return { answers }; + } } } @@ -543,11 +547,6 @@ export class CodexAppServerManager extends EventEmitter 0) { return { @@ -32,7 +31,7 @@ function toCodexOutputJsonSchema(schema: Schema.Top): unknown { return document.schema; } -function normalizeCodexError( +function normalizeClaudeError( operation: string, error: unknown, fallback: string, @@ -44,13 +43,13 @@ function normalizeCodexError( if (error instanceof Error) { const lower = error.message.toLowerCase(); if ( - error.message.includes("Command not found: codex") || - lower.includes("spawn codex") || + error.message.includes("Command not found: claude") || + lower.includes("spawn claude") || lower.includes("enoent") ) { return new TextGenerationError({ operation, - detail: "Codex CLI (`codex`) is required but not available on PATH.", + detail: "Claude CLI (`claude`) is required but not available on PATH.", cause: error, }); } @@ -117,33 +116,12 @@ const makeCodexTextGeneration = Effect.gen(function* () { }), ).pipe( Effect.mapError((cause) => - normalizeCodexError(operation, cause, "Failed to collect process output"), + normalizeClaudeError(operation, cause, "Failed to collect process output"), ), ); return text; }); - const tempDir = process.env.TMPDIR ?? process.env.TEMP ?? process.env.TMP ?? "/tmp"; - - const writeTempFile = ( - operation: string, - prefix: string, - content: string, - ): Effect.Effect => { - const filePath = path.join(tempDir, `t3code-${prefix}-${process.pid}-${randomUUID()}.tmp`); - return fileSystem.writeFileString(filePath, content).pipe( - Effect.mapError( - (cause) => - new TextGenerationError({ - operation, - detail: `Failed to write temp file at ${filePath}.`, - cause, - }), - ), - Effect.as(filePath), - ); - }; - const safeUnlink = (filePath: string): Effect.Effect => fileSystem.remove(filePath).pipe(Effect.catch(() => Effect.void)); @@ -180,12 +158,11 @@ const makeCodexTextGeneration = Effect.gen(function* () { return { imagePaths }; }); - const runCodexJson = ({ + const runClaudeJson = ({ operation, cwd, prompt, outputSchemaJson, - imagePaths = [], cleanupPaths = [], }: { operation: "generateCommitMessage" | "generatePrContent" | "generateBranchName"; @@ -196,38 +173,26 @@ const makeCodexTextGeneration = Effect.gen(function* () { cleanupPaths?: ReadonlyArray; }): Effect.Effect => Effect.gen(function* () { - const schemaPath = yield* writeTempFile( - operation, - "codex-schema", - JSON.stringify(toCodexOutputJsonSchema(outputSchemaJson)), - ); - const outputPath = yield* writeTempFile(operation, "codex-output", ""); + const jsonSchema = JSON.stringify(toJsonSchema(outputSchemaJson)); - const runCodexCommand = Effect.gen(function* () { + const runClaudeCommand = Effect.gen(function* () { const command = ChildProcess.make( - "codex", + "claude", [ - "exec", - "--ephemeral", - "-s", - "read-only", + "--print", "--model", - CODEX_MODEL, - "--config", - `model_reasoning_effort="${CODEX_REASONING_EFFORT}"`, - "--output-schema", - schemaPath, - "--output-last-message", - outputPath, - ...imagePaths.flatMap((imagePath) => ["--image", imagePath]), - "-", + CLAUDE_MODEL, + "--output-format", + "json", + "--json-schema", + jsonSchema, + "--no-session-persistence", + "--dangerously-skip-permissions", + prompt, ], { cwd, shell: process.platform === "win32", - stdin: { - stream: Stream.make(new TextEncoder().encode(prompt)), - }, }, ); @@ -235,7 +200,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { .spawn(command) .pipe( Effect.mapError((cause) => - normalizeCodexError(operation, cause, "Failed to spawn Codex CLI process"), + normalizeClaudeError(operation, cause, "Failed to spawn Claude CLI process"), ), ); @@ -246,7 +211,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { child.exitCode.pipe( Effect.map((value) => Number(value)), Effect.mapError((cause) => - normalizeCodexError(operation, cause, "Failed to read Codex CLI exit code"), + normalizeClaudeError(operation, cause, "Failed to read Claude CLI exit code"), ), ), ], @@ -261,49 +226,41 @@ const makeCodexTextGeneration = Effect.gen(function* () { operation, detail: detail.length > 0 - ? `Codex CLI command failed: ${detail}` - : `Codex CLI command failed with code ${exitCode}.`, + ? `Claude CLI command failed: ${detail}` + : `Claude CLI command failed with code ${exitCode}.`, }); } + + return stdout; }); const cleanup = Effect.all( - [schemaPath, outputPath, ...cleanupPaths].map((filePath) => safeUnlink(filePath)), - { - concurrency: "unbounded", - }, + cleanupPaths.map((filePath) => safeUnlink(filePath)), + { concurrency: "unbounded" }, ).pipe(Effect.asVoid); return yield* Effect.gen(function* () { - yield* runCodexCommand.pipe( + const stdout = yield* runClaudeCommand.pipe( Effect.scoped, - Effect.timeoutOption(CODEX_TIMEOUT_MS), + Effect.timeoutOption(CLAUDE_TIMEOUT_MS), Effect.flatMap( Option.match({ onNone: () => Effect.fail( - new TextGenerationError({ operation, detail: "Codex CLI request timed out." }), + new TextGenerationError({ operation, detail: "Claude CLI request timed out." }), ), - onSome: () => Effect.void, + onSome: (value) => Effect.succeed(value), }), ), ); - return yield* fileSystem.readFileString(outputPath).pipe( - Effect.mapError( - (cause) => - new TextGenerationError({ - operation, - detail: "Failed to read Codex output file.", - cause, - }), - ), + return yield* Effect.succeed(stdout).pipe( Effect.flatMap(Schema.decodeEffect(Schema.fromJsonString(outputSchemaJson))), Effect.catchTag("SchemaError", (cause) => Effect.fail( new TextGenerationError({ operation, - detail: "Codex returned invalid structured output.", + detail: "Claude returned invalid structured output.", cause, }), ), @@ -348,7 +305,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { body: Schema.String, }); - return runCodexJson({ + return runClaudeJson({ operation: "generateCommitMessage", cwd: input.cwd, prompt, @@ -390,7 +347,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { limitSection(input.diffPatch, 40_000), ].join("\n"); - return runCodexJson({ + return runClaudeJson({ operation: "generatePrContent", cwd: input.cwd, prompt, @@ -441,7 +398,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { } const prompt = promptSections.join("\n"); - const generated = yield* runCodexJson({ + const generated = yield* runClaudeJson({ operation: "generateBranchName", cwd: input.cwd, prompt, @@ -457,10 +414,126 @@ const makeCodexTextGeneration = Effect.gen(function* () { }); }; + const generateJiraTicketContent: TextGenerationShape["generateJiraTicketContent"] = (input) => { + const prompt = [ + `Create a Jira ticket for project ${input.projectKey} based on the conversation below.`, + "", + "For the summary field: write a concise imperative title (e.g. 'Add retry logic for failed API calls').", + "", + "For the description field, use EXACTLY this format:", + "", + "Background:", + "", + "", + "Tasks:", + "- ", + "", + "Acceptance criteria:", + "- ", + "", + "Rules:", + "- NEVER ask for clarification — this is a one-shot generation with no follow-up", + "- Always produce output in the format above, using whatever context is available", + "- Only include bullet points you are confident about — do not invent requirements", + "- No markdown headers (#), no code fences, no bold/italic", + "- Be specific — mention files, APIs, or components where relevant", + "", + "--- CONVERSATION ---", + limitSection(input.conversationContext, 16_000), + "--- END CONVERSATION ---", + ].join("\n"); + + return runAgentQuery( + "generateJiraTicketContent", + prompt, + { + type: "object", + properties: { + summary: { type: "string" }, + description: { type: "string" }, + }, + required: ["summary", "description"], + }, + (raw) => { + const obj = raw as { summary: string; description: string }; + return { + summary: obj.summary.trim(), + description: obj.description.trim(), + } satisfies JiraTicketContentGenerationResult; + }, + ); + }; + + const generateJiraProgressComment: TextGenerationShape["generateJiraProgressComment"] = ( + input, + ) => { + const hasComments = input.ticketComments.length > 0; + const prompt = [ + "--- TICKET ---", + `Key: ${input.ticketKey}`, + `Type: ${input.ticketType}`, + `Status: ${input.ticketStatus}`, + `Summary: ${input.ticketTitle}`, + `Description: ${limitSection(input.ticketDescription, 3_000)}`, + "--- END TICKET ---", + "", + "--- CONVERSATION ---", + limitSection(input.recentConversation, 16_000), + "--- END CONVERSATION ---", + "", + ...(hasComments + ? [ + "--- COMMENTS ALREADY ON THE TICKET (posted earlier — everything below is OLD news) ---", + limitSection(input.ticketComments, 4_000), + "--- END OLD COMMENTS ---", + "", + `Now write the NEXT progress comment for ${input.ticketKey}. Only mention work from the conversation that is NOT in the old comments above. If a topic appears in the old comments, skip it completely.`, + ] + : [`Write a progress update comment for Jira ticket ${input.ticketKey}.`]), + "", + "Output format:", + "", + "Progress update:", + "- ", + "", + "Next steps:", + "- ", + "", + "Rules:", + "- NEVER ask for clarification — always produce output", + "- Only include points you are confident about", + "- Be specific — mention file names, function names, or features", + "- No markdown headers (#), no code fences, no bold/italic", + "- Omit Next steps if nothing is clearly outstanding", + '- If there is genuinely nothing new to report, just output: "No new progress since last update." — nothing else', + ].join("\n"); + + const systemPrompt = hasComments + ? "You write Jira progress comments. You are writing a CONTINUATION of an existing comment thread. Your job is to add only NEW information. If something was already said in a previous comment, you must skip it entirely. If nothing new happened, say so in one short sentence — do not explain why. Never ask for clarification. Output is captured as JSON automatically." + : undefined; + + return runAgentQuery( + "generateJiraProgressComment", + prompt, + { + type: "object", + properties: { comment: { type: "string" } }, + required: ["comment"], + }, + (raw) => { + const obj = raw as { comment: string }; + return { comment: obj.comment.trim() } satisfies JiraProgressCommentGenerationResult; + }, + systemPrompt, + ); + }; + return { generateCommitMessage, generatePrContent, generateBranchName, + generateJiraTicketContent, + generateJiraProgressComment, } satisfies TextGenerationShape; }); diff --git a/apps/server/src/git/Layers/GitCore.test.ts b/apps/server/src/git/Layers/GitCore.test.ts index 6c98229e8a..977d4e985b 100644 --- a/apps/server/src/git/Layers/GitCore.test.ts +++ b/apps/server/src/git/Layers/GitCore.test.ts @@ -113,8 +113,13 @@ const makeIsolatedGitCore = (gitService: GitServiceShape) => renameBranch: (input) => core.renameBranch(input), createBranch: (input) => core.createBranch(input), checkoutBranch: (input) => core.checkoutBranch(input), + cloneRepo: (input) => core.cloneRepo(input), initRepo: (input) => core.initRepo(input), listLocalBranchNames: (cwd) => core.listLocalBranchNames(cwd), + diffBranch: (input) => core.diffBranch(input), + diffWorkingTree: (input) => core.diffWorkingTree(input), + getRepoRoot: (cwd) => core.getRepoRoot(cwd), + resolveRef: (cwd, ref) => core.resolveRef(cwd, ref), } satisfies GitCoreShape; }); @@ -1038,26 +1043,25 @@ it.layer(TestLayer)("git integration", (it) => { }), ); - it.effect("throws when new branch name already exists", () => + it.effect("reuses an existing branch when newBranch already exists", () => Effect.gen(function* () { const tmp = yield* makeTmpDir(); yield* initRepoWithCommit(tmp); yield* createGitBranch({ cwd: tmp, branch: "existing" }); const wtPath = path.join(tmp, "wt-conflict"); - const currentBranch = (yield* listGitBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; + const result = yield* createGitWorktree({ + cwd: tmp, + branch: "main", + newBranch: "existing", + path: wtPath, + }); - const result = yield* Effect.result( - createGitWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "existing", - path: wtPath, - }), - ); - expect(result._tag).toBe("Failure"); + expect(result.worktree.path).toBe(wtPath); + expect(result.worktree.branch).toBe("existing"); + expect(yield* git(wtPath, ["branch", "--show-current"])).toBe("existing"); + + yield* removeGitWorktree({ cwd: tmp, path: wtPath }); }), ); diff --git a/apps/server/src/git/Layers/GitCore.ts b/apps/server/src/git/Layers/GitCore.ts index f5b9168abb..be9953c340 100644 --- a/apps/server/src/git/Layers/GitCore.ts +++ b/apps/server/src/git/Layers/GitCore.ts @@ -5,7 +5,8 @@ import { GitService } from "../Services/GitService.ts"; import { GitCore, type GitCoreShape } from "../Services/GitCore.ts"; const STATUS_UPSTREAM_REFRESH_INTERVAL = Duration.seconds(15); -const STATUS_UPSTREAM_REFRESH_TIMEOUT = Duration.seconds(5); +const STATUS_UPSTREAM_REFRESH_TIMEOUT = Duration.seconds(15); +const STATUS_UPSTREAM_REFRESH_FAILURE_BACKOFF = Duration.seconds(60); const STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY = 2_048; const DEFAULT_BASE_BRANCH_CANDIDATES = ["main", "master"] as const; @@ -391,8 +392,11 @@ const makeGitCore = Effect.gen(function* () { }); return true as const; }), - // Keep successful refreshes warm; drop failures immediately so next request can retry. - timeToLive: (exit) => (Exit.isSuccess(exit) ? STATUS_UPSTREAM_REFRESH_INTERVAL : Duration.zero), + // Keep successful refreshes warm; back off on failures to avoid spamming slow/unreachable remotes. + timeToLive: (exit) => + Exit.isSuccess(exit) + ? STATUS_UPSTREAM_REFRESH_INTERVAL + : STATUS_UPSTREAM_REFRESH_FAILURE_BACKOFF, }); const refreshStatusUpstreamIfStale = (cwd: string): Effect.Effect => @@ -763,6 +767,7 @@ const makeGitCore = Effect.gen(function* () { aheadCount: details.aheadCount, behindCount: details.behindCount, pr: null, + originUrl: null, })), ); @@ -1187,12 +1192,46 @@ const makeGitCore = Effect.gen(function* () { const sanitizedBranch = targetBranch.replace(/\//g, "-"); const repoName = path.basename(input.cwd); const homeDir = process.env.HOME ?? process.env.USERPROFILE ?? "/tmp"; - const worktreePath = + let worktreePath = input.path ?? path.join(homeDir, ".t3", "worktrees", repoName, sanitizedBranch); + // If the computed path already exists (e.g. from a prior review), deduplicate + if (!input.path) { + const basePath = worktreePath; + let suffix = 2; + while ( + yield* fileSystem.stat(worktreePath).pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ) + ) { + worktreePath = `${basePath}-${suffix}`; + suffix += 1; + } + } + const shouldReuseExistingBranch = input.newBranch + ? yield* branchExists(input.cwd, input.newBranch) + : false; const args = input.newBranch - ? ["worktree", "add", "-b", input.newBranch, worktreePath, input.branch] + ? shouldReuseExistingBranch + ? ["worktree", "add", worktreePath, input.newBranch] + : ["worktree", "add", "-b", input.newBranch, worktreePath, input.branch] : ["worktree", "add", worktreePath, input.branch]; + // If the base branch is a remote ref (e.g. "origin/feature"), fetch it first + // so that the ref is available locally for worktree creation. + const remoteRefMatch = input.branch.match(/^([\w.-]+)\/(.*)/); + if (remoteRefMatch) { + const [, remoteName, remoteBranch] = remoteRefMatch; + if (remoteName && remoteBranch) { + yield* runGit( + "GitCore.createWorktree.fetch", + input.cwd, + ["fetch", "--quiet", "--no-tags", remoteName, remoteBranch], + true, // allowNonZeroExit — fetch failure is not fatal, worktree add will error clearly + ); + } + } + yield* executeGit("GitCore.createWorktree", input.cwd, args, { fallbackErrorMessage: "git worktree add failed", }); @@ -1251,10 +1290,42 @@ const makeGitCore = Effect.gen(function* () { "--set-upstream-to", `${input.remoteName}/${input.remoteBranch}`, input.branch, - ]); + ]).pipe( + // Fallback: if --set-upstream-to fails (remote ref not fetched yet), + // set the tracking config directly via git config. + Effect.catch(() => + Effect.gen(function* () { + const configPrefix = `branch.${input.branch}`; + yield* runGit("GitCore.setBranchUpstream.configRemote", input.cwd, [ + "config", + configPrefix + ".remote", + input.remoteName, + ]); + yield* runGit("GitCore.setBranchUpstream.configMerge", input.cwd, [ + "config", + configPrefix + ".merge", + `refs/heads/${input.remoteBranch}`, + ]); + }), + ), + ); const removeWorktree: GitCoreShape["removeWorktree"] = (input) => Effect.gen(function* () { + // Skip removal if the worktree directory no longer exists + const worktreeExists = yield* fileSystem.stat(input.path).pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ); + if (!worktreeExists) { + // Prune stale worktree entries so git doesn't keep a dangling ref + yield* executeGit("GitCore.removeWorktree.prune", input.cwd, ["worktree", "prune"], { + timeoutMs: 10_000, + allowNonZeroExit: true, + }); + return; + } + const args = ["worktree", "remove"]; if (input.force) { args.push("--force"); @@ -1381,6 +1452,65 @@ const makeGitCore = Effect.gen(function* () { ); }); + const cloneRepo: GitCoreShape["cloneRepo"] = (input) => + Effect.gen(function* () { + const repoName = + input.url + .split("/") + .pop() + ?.replace(/\.git$/, "") ?? "repo"; + // Expand ~ to the user's home directory (shells expand it, but + // programmatic callers pass it as a literal string). + const homeDir = process.env.HOME ?? process.env.USERPROFILE ?? ""; + const targetDir = + input.targetDir.startsWith("~/") && homeDir + ? path.join(homeDir, input.targetDir.slice(2)) + : input.targetDir; + const clonedPath = path.join(targetDir, repoName); + + // Ensure parent directory exists + const targetExists = yield* fileSystem.stat(targetDir).pipe( + Effect.map((stat) => stat.type === "Directory"), + Effect.catch(() => Effect.succeed(false)), + ); + if (!targetExists) { + yield* fileSystem.makeDirectory(targetDir, { recursive: true }).pipe( + Effect.mapError( + (error) => + new GitCommandError({ + operation: "GitCore.cloneRepo.mkdir", + command: `mkdir -p ${targetDir}`, + cwd: targetDir, + detail: `Failed to create target directory: ${error.message}`, + }), + ), + ); + } + + // Check if the directory already exists and is a git repo + const dirExists = yield* fileSystem.stat(clonedPath).pipe( + Effect.map((stat) => stat.type === "Directory"), + Effect.catch(() => Effect.succeed(false)), + ); + if (dirExists) { + const isGitRepo = yield* executeGit( + "GitCore.cloneRepo.check", + clonedPath, + ["rev-parse", "--git-dir"], + { allowNonZeroExit: true, timeoutMs: 5_000 }, + ); + if (isGitRepo.code === 0) { + return { clonedPath, alreadyExisted: true as const }; + } + } + + yield* executeGit("GitCore.cloneRepo", targetDir, ["clone", input.url], { + timeoutMs: 300_000, + fallbackErrorMessage: "git clone failed", + }); + return { clonedPath, alreadyExisted: false as const }; + }); + const initRepo: GitCoreShape["initRepo"] = (input) => executeGit("GitCore.initRepo", input.cwd, ["init"], { timeoutMs: 10_000, @@ -1401,6 +1531,49 @@ const makeGitCore = Effect.gen(function* () { ), ); + const diffBranch: GitCoreShape["diffBranch"] = (input) => + Effect.gen(function* () { + // Prefer the remote tracking ref (origin/) so the diff reflects + // the latest remote state, not the potentially stale local branch. + const remoteRef = `origin/${input.base}`; + const remoteExists = yield* runGitStdout( + "GitCore.diffBranch:checkRemoteRef", + input.cwd, + ["rev-parse", "--verify", remoteRef], + false, + ).pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ); + const base = remoteExists ? remoteRef : input.base; + const stdout = yield* runGitStdout( + "GitCore.diffBranch", + input.cwd, + ["diff", `${base}...HEAD`], + true, + ); + return { diff: stdout }; + }); + + const diffWorkingTree: GitCoreShape["diffWorkingTree"] = (input) => + runGitStdout("GitCore.diffWorkingTree", input.cwd, ["diff", "HEAD"], true).pipe( + Effect.map((stdout) => ({ diff: stdout })), + ); + + const getRepoRoot: GitCoreShape["getRepoRoot"] = (cwd) => + runGitStdout("GitCore.getRepoRoot", cwd, ["rev-parse", "--show-toplevel"], true).pipe( + Effect.map((stdout) => { + const trimmed = stdout.trim(); + return trimmed.length > 0 ? trimmed : null; + }), + Effect.catch(() => Effect.succeed(null)), + ); + + const resolveRef: GitCoreShape["resolveRef"] = (cwd, ref) => + runGitStdout("GitCore.resolveRef", cwd, ["rev-parse", ref], false).pipe( + Effect.map((stdout) => stdout.trim()), + ); + return { status, statusDetails, @@ -1420,8 +1593,13 @@ const makeGitCore = Effect.gen(function* () { renameBranch, createBranch, checkoutBranch, + cloneRepo, initRepo, listLocalBranchNames, + diffBranch, + diffWorkingTree, + getRepoRoot, + resolveRef, } satisfies GitCoreShape; }); diff --git a/apps/server/src/git/Layers/GitHubCli.ts b/apps/server/src/git/Layers/GitHubCli.ts index 80ce43659e..fad6b465ef 100644 --- a/apps/server/src/git/Layers/GitHubCli.ts +++ b/apps/server/src/git/Layers/GitHubCli.ts @@ -7,6 +7,7 @@ import { GitHubCli, type GitHubRepositoryCloneUrls, type GitHubCliShape, + type GitHubPullRequestDetails, type GitHubPullRequestSummary, } from "../Services/GitHubCli.ts"; @@ -146,7 +147,12 @@ function normalizeRepositoryCloneUrls( function decodeGitHubJson( raw: string, schema: S, - operation: "listOpenPullRequests" | "getPullRequest" | "getRepositoryCloneUrls", + operation: + | "listOpenPullRequests" + | "getPullRequest" + | "getRepositoryCloneUrls" + | "listReviewRequests" + | "listReviewedPrs", invalidDetail: string, ): Effect.Effect { return Schema.decodeEffect(Schema.fromJsonString(schema))(raw).pipe( @@ -161,6 +167,62 @@ function decodeGitHubJson( ); } +const RawGitHubReviewRequestSchema = Schema.Struct({ + number: PositiveInt, + title: TrimmedNonEmptyString, + url: TrimmedNonEmptyString, + updatedAt: Schema.String, + body: Schema.String, + labels: Schema.Array(Schema.Struct({ name: Schema.String })), + repository: Schema.Struct({ + name: Schema.String, + nameWithOwner: Schema.String, + }), + author: Schema.Struct({ + login: Schema.String, + }), +}); + +const PR_DETAILS_JSON_FIELDS = + "number,title,body,url,state,headRefName,baseRefName,additions,deletions,changedFiles"; + +function parsePrDetails(raw: string): GitHubPullRequestDetails { + const trimmed = raw.trim(); + if (trimmed.length === 0) { + throw new Error("GitHub CLI returned empty response for PR details."); + } + + const parsed: unknown = JSON.parse(trimmed); + if (!parsed || typeof parsed !== "object") { + throw new Error("GitHub CLI returned non-object JSON for PR details."); + } + + const record = parsed as Record; + if ( + typeof record.number !== "number" || + typeof record.title !== "string" || + typeof record.url !== "string" || + typeof record.state !== "string" || + typeof record.headRefName !== "string" || + typeof record.baseRefName !== "string" + ) { + throw new Error("GitHub CLI returned incomplete PR details JSON."); + } + + return { + number: record.number, + title: record.title, + body: typeof record.body === "string" ? record.body : "", + url: record.url, + state: record.state as "OPEN" | "CLOSED" | "MERGED", + headRefName: record.headRefName, + baseRefName: record.baseRefName, + additions: typeof record.additions === "number" ? record.additions : 0, + deletions: typeof record.deletions === "number" ? record.deletions : 0, + changedFiles: typeof record.changedFiles === "number" ? record.changedFiles : 0, + }; +} + const makeGitHubCli = Effect.sync(() => { const execute: GitHubCliShape["execute"] = (input) => Effect.tryPromise({ @@ -168,6 +230,7 @@ const makeGitHubCli = Effect.sync(() => { runProcess("gh", input.args, { cwd: input.cwd, timeoutMs: input.timeoutMs ?? DEFAULT_TIMEOUT_MS, + ...(input.stdin !== undefined ? { stdin: input.stdin } : {}), }), catch: (error) => normalizeGitHubCliError("execute", error), }); @@ -180,12 +243,12 @@ const makeGitHubCli = Effect.sync(() => { args: [ "pr", "list", - "--head", - input.headSelector, + ...(input.repo ? ["--repo", input.repo] : []), + ...(input.headSelector ? ["--head", input.headSelector] : []), "--state", "open", "--limit", - String(input.limit ?? 1), + String(input.limit ?? 30), "--json", "number,title,url,baseRefName,headRefName", ], @@ -267,6 +330,83 @@ const makeGitHubCli = Effect.sync(() => { return trimmed.length > 0 ? trimmed : null; }), ), + fetchPrDetails: (input) => + execute({ + cwd: input.cwd, + args: ["pr", "view", input.prUrl, "--json", PR_DETAILS_JSON_FIELDS], + }).pipe( + Effect.map((result) => result.stdout), + Effect.flatMap((raw) => + Effect.try({ + try: () => parsePrDetails(raw), + catch: (error: unknown) => + new GitHubCliError({ + operation: "fetchPrDetails", + detail: + error instanceof Error + ? `GitHub CLI returned invalid PR details JSON: ${error.message}` + : "GitHub CLI returned invalid PR details JSON.", + ...(error !== undefined ? { cause: error } : {}), + }), + }), + ), + ), + listReviewRequests: (input) => + execute({ + cwd: process.cwd(), + args: [ + "search", + "prs", + "--review-requested=@me", + "--state", + "open", + "--limit", + String(input.limit ?? 30), + "--json", + "number,title,url,repository,author,updatedAt,body,labels", + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + raw.length === 0 + ? Effect.succeed([] as (typeof RawGitHubReviewRequestSchema.Type)[]) + : decodeGitHubJson( + raw, + Schema.Array(RawGitHubReviewRequestSchema), + "listReviewRequests", + "GitHub CLI returned invalid review request JSON.", + ), + ), + ), + listReviewedPrs: (input) => + execute({ + cwd: process.cwd(), + args: [ + "search", + "prs", + "--reviewed-by=@me", + "--state", + "closed", + "--sort", + "updated", + "--limit", + String(input.limit ?? 20), + "--json", + "number,title,url,repository,author,updatedAt,body,labels", + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + raw.length === 0 + ? Effect.succeed([] as (typeof RawGitHubReviewRequestSchema.Type)[]) + : decodeGitHubJson( + raw, + Schema.Array(RawGitHubReviewRequestSchema), + "listReviewedPrs", + "GitHub CLI returned invalid reviewed PR JSON.", + ), + ), + ), checkoutPullRequest: (input) => execute({ cwd: input.cwd, diff --git a/apps/server/src/git/Layers/GitManager.test.ts b/apps/server/src/git/Layers/GitManager.test.ts index 8c72941cd0..d55e39061d 100644 --- a/apps/server/src/git/Layers/GitManager.test.ts +++ b/apps/server/src/git/Layers/GitManager.test.ts @@ -8,6 +8,7 @@ import { Effect, FileSystem, Layer, PlatformError, Scope } from "effect"; import { expect } from "vitest"; import { GitCommandError, GitHubCliError, TextGenerationError } from "../Errors.ts"; +import { GitCore, type GitCoreShape } from "../Services/GitCore.ts"; import { type GitManagerShape } from "../Services/GitManager.ts"; import { type GitHubCliShape, @@ -63,6 +64,15 @@ interface FakeGitTextGeneration { cwd: string; message: string; }) => Effect.Effect<{ branch: string }, TextGenerationError>; + generateJiraTicketContent?: (input: { + conversationContext: string; + projectKey: string; + }) => Effect.Effect<{ summary: string; description: string }, TextGenerationError>; + generateJiraProgressComment?: (input: { + ticketKey: string; + ticketTitle: string; + recentConversation: string; + }) => Effect.Effect<{ comment: string }, TextGenerationError>; } type FakePullRequest = NonNullable; @@ -166,6 +176,15 @@ function createTextGeneration(overrides: Partial = {}): T Effect.succeed({ branch: "update-workflow", }), + generateJiraTicketContent: () => + Effect.succeed({ + summary: "Mock summary", + description: "Mock description", + }), + generateJiraProgressComment: () => + Effect.succeed({ + comment: "Mock progress", + }), ...overrides, }; @@ -203,6 +222,34 @@ function createTextGeneration(overrides: Partial = {}): T }), ), ), + generateJiraTicketContent: (input) => + ( + implementation.generateJiraTicketContent ?? + (() => Effect.succeed({ summary: "Mock summary", description: "Mock description" })) + )(input).pipe( + Effect.mapError( + (cause) => + new TextGenerationError({ + operation: "generateJiraTicketContent", + detail: "fake text generation failed", + ...(cause !== undefined ? { cause } : {}), + }), + ), + ), + generateJiraProgressComment: (input) => + ( + implementation.generateJiraProgressComment ?? + (() => Effect.succeed({ comment: "Mock progress" })) + )(input).pipe( + Effect.mapError( + (cause) => + new TextGenerationError({ + operation: "generateJiraProgressComment", + detail: "fake text generation failed", + ...(cause !== undefined ? { cause } : {}), + }), + ), + ), }; } @@ -379,7 +426,7 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { "pr", "list", "--head", - input.headSelector, + input.headSelector ?? "", "--state", "open", "--limit", @@ -418,6 +465,10 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { return value.length > 0 ? value : null; }), ), + fetchPrDetails: () => + Effect.fail( + new GitHubCliError({ operation: "fetchPrDetails", detail: "Not implemented in test." }), + ), getPullRequest: (input) => execute({ cwd: input.cwd, @@ -439,6 +490,8 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { cwd: input.cwd, args: ["pr", "checkout", input.reference, ...(input.force ? ["--force"] : [])], }).pipe(Effect.asVoid), + listReviewRequests: () => Effect.succeed([]), + listReviewedPrs: () => Effect.succeed([]), }, ghCalls, }; @@ -701,6 +754,156 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }), ); + it.effect("status returns an empty result when the cwd no longer exists", () => + Effect.gen(function* () { + const repoDir = fs.mkdtempSync( + path.join( + process.env.TMPDIR ?? process.env.TEMP ?? process.env.TMP ?? "/tmp", + "t3code-git-manager-", + ), + ); + const fileSystem = yield* FileSystem.FileSystem; + const { manager } = yield* makeManager(); + + yield* fileSystem.remove(repoDir, { recursive: true, force: true }); + + const status = yield* manager.status({ cwd: repoDir }); + expect(status).toEqual({ + branch: null, + hasWorkingTreeChanges: false, + workingTree: { + files: [], + insertions: 0, + deletions: 0, + }, + hasUpstream: false, + aheadCount: 0, + behindCount: 0, + pr: null, + originUrl: null, + }); + }), + ); + + it.effect("preparePullRequestThread skips stale missing worktree paths", () => + Effect.gen(function* () { + const repoDir = fs.mkdtempSync( + path.join( + process.env.TMPDIR ?? process.env.TEMP ?? process.env.TMP ?? "/tmp", + "t3code-git-manager-", + ), + ); + const missingWorktreePath = path.join(repoDir, "missing-worktree"); + const newWorktreePath = fs.mkdtempSync(path.join(repoDir, "fresh-worktree-")); + const statusCalls: string[] = []; + let listBranchesCalls = 0; + let fetchPullRequestBranchCalls = 0; + let createWorktreeCalls = 0; + + const unsupported = () => Effect.die("unsupported"); + const fakeGitCore = { + status: unsupported, + statusDetails: (cwd: string) => { + statusCalls.push(cwd); + return Effect.succeed({ + branch: "feature/pr-74", + upstreamRef: null, + hasWorkingTreeChanges: false, + workingTree: { files: [], insertions: 0, deletions: 0 }, + hasUpstream: false, + aheadCount: 0, + behindCount: 0, + }); + }, + prepareCommitContext: unsupported, + commit: unsupported, + pushCurrentBranch: unsupported, + pullCurrentBranch: unsupported, + createBranch: unsupported, + checkoutBranch: unsupported, + initRepo: unsupported, + listBranches: () => + Effect.succeed({ + branches: + listBranchesCalls++ === 0 + ? [ + { + name: "feature/pr-74", + current: false, + isDefault: false, + worktreePath: missingWorktreePath, + }, + ] + : [ + { + name: "feature/pr-74", + current: false, + isDefault: false, + worktreePath: null, + }, + ], + isRepo: true, + hasOriginRemote: true, + }), + createWorktree: () => { + createWorktreeCalls += 1; + return Effect.succeed({ + worktree: { + path: newWorktreePath, + branch: "feature/pr-74", + }, + }); + }, + removeWorktree: unsupported, + renameBranch: unsupported, + fetchPullRequestBranch: () => { + fetchPullRequestBranchCalls += 1; + return Effect.void; + }, + ensureRemote: unsupported, + fetchRemoteBranch: unsupported, + setBranchUpstream: unsupported, + readConfigValue: unsupported, + readRangeContext: unsupported, + readPreparedCommitContext: unsupported, + } as unknown as GitCoreShape; + + const { service: gitHubCli } = createGitHubCliWithFakeGh({ + pullRequest: { + number: 74, + title: "Review PR #74", + url: "https://github.com/pingdotgg/codething-mvp/pull/74", + baseRefName: "main", + headRefName: "feature/pr-74", + state: "open", + }, + }); + + const manager = yield* makeGitManager.pipe( + Effect.provide( + Layer.mergeAll( + Layer.succeed(GitCore, fakeGitCore), + Layer.succeed(GitHubCli, gitHubCli), + Layer.succeed(TextGeneration, createTextGeneration()), + NodeServices.layer, + ), + ), + ); + + const result = yield* manager.preparePullRequestThread({ + cwd: repoDir, + reference: "#74", + mode: "worktree", + }); + + expect(result.branch).toBe("feature/pr-74"); + expect(result.worktreePath).toBe(newWorktreePath); + expect(fetchPullRequestBranchCalls).toBe(1); + expect(createWorktreeCalls).toBe(1); + expect(statusCalls).toEqual([newWorktreePath]); + }), + ); + it.effect("creates a commit when working tree is dirty", () => Effect.gen(function* () { const repoDir = yield* makeTempDir("t3code-git-manager-"); diff --git a/apps/server/src/git/Layers/GitManager.ts b/apps/server/src/git/Layers/GitManager.ts index 8357795173..bc1c0c7a2a 100644 --- a/apps/server/src/git/Layers/GitManager.ts +++ b/apps/server/src/git/Layers/GitManager.ts @@ -1,14 +1,15 @@ import { randomUUID } from "node:crypto"; import { realpathSync } from "node:fs"; -import { Effect, FileSystem, Layer, Path } from "effect"; +import { Effect, FileSystem, Layer, Path, Schema } from "effect"; import { resolveAutoFeatureBranchName, sanitizeBranchFragment, sanitizeFeatureBranchName, } from "@t3tools/shared/git"; +import { createTtlCache } from "@t3tools/shared/cache"; -import { GitManagerError } from "../Errors.ts"; +import { GitCommandError, GitManagerError } from "../Errors.ts"; import { GitManager, type GitManagerShape } from "../Services/GitManager.ts"; import { GitCore } from "../Services/GitCore.ts"; import { GitHubCli } from "../Services/GitHubCli.ts"; @@ -117,6 +118,35 @@ function parseRepositoryOwnerLogin(nameWithOwner: string | null): string | null return normalizedOwnerLogin.length > 0 ? normalizedOwnerLogin : null; } +function emptyGitStatus() { + return { + branch: null, + hasWorkingTreeChanges: false, + workingTree: { + files: [], + insertions: 0, + deletions: 0, + }, + hasUpstream: false, + aheadCount: 0, + originUrl: null, + behindCount: 0, + pr: null, + } as const; +} + +function isMissingPathGitCommandError(error: unknown): error is GitCommandError { + if (!Schema.is(GitCommandError)(error)) { + return false; + } + + return ( + error.detail.includes("ENOENT") || + error.detail.includes("no such file or directory") || + error.detail.includes("NotFound: FileSystem.access") + ); +} + function parsePullRequestList(raw: unknown): PullRequestInfo[] { if (!Array.isArray(raw)) return []; @@ -332,6 +362,13 @@ function toPullRequestHeadRemoteInfo(pr: { }; } +// Cache PR lookups for 60s — the PR state for a branch rarely changes between polls. +const latestPrCache = createTtlCache(60_000); +// Cache repository clone URLs for 5 minutes — repo clone URLs are essentially static. +const repoCloneUrlCache = createTtlCache<{ sshUrl: string; url: string }>(300_000); +// Cache default branch for 15 minutes — essentially never changes for a given repo. +const defaultBranchCache = createTtlCache(900_000); + export const makeGitManager = Effect.gen(function* () { const gitCore = yield* GitCore; const gitHubCli = yield* GitHubCli; @@ -348,10 +385,16 @@ export const makeGitManager = Effect.gen(function* () { return; } - const cloneUrls = yield* gitHubCli.getRepositoryCloneUrls({ - cwd, - repository: repositoryNameWithOwner, - }); + const cachedCloneUrls = repoCloneUrlCache.get(repositoryNameWithOwner); + const cloneUrls = cachedCloneUrls + ? cachedCloneUrls + : yield* gitHubCli + .getRepositoryCloneUrls({ cwd, repository: repositoryNameWithOwner }) + .pipe( + Effect.tap((urls) => + Effect.sync(() => repoCloneUrlCache.set(repositoryNameWithOwner, urls)), + ), + ); const originRemoteUrl = yield* gitCore.readConfigValue(cwd, "remote.origin.url"); const remoteUrl = shouldPreferSshRemote(originRemoteUrl) ? cloneUrls.sshUrl : cloneUrls.url; const preferredRemoteName = @@ -395,10 +438,16 @@ export const makeGitManager = Effect.gen(function* () { return; } - const cloneUrls = yield* gitHubCli.getRepositoryCloneUrls({ - cwd, - repository: repositoryNameWithOwner, - }); + const cachedCloneUrls = repoCloneUrlCache.get(repositoryNameWithOwner); + const cloneUrls = cachedCloneUrls + ? cachedCloneUrls + : yield* gitHubCli + .getRepositoryCloneUrls({ cwd, repository: repositoryNameWithOwner }) + .pipe( + Effect.tap((urls) => + Effect.sync(() => repoCloneUrlCache.set(repositoryNameWithOwner, urls)), + ), + ); const originRemoteUrl = yield* gitCore.readConfigValue(cwd, "remote.origin.url"); const remoteUrl = shouldPreferSshRemote(originRemoteUrl) ? cloneUrls.sshUrl : cloneUrls.url; const preferredRemoteName = @@ -440,6 +489,12 @@ export const makeGitManager = Effect.gen(function* () { const readConfigValueNullable = (cwd: string, key: string) => gitCore.readConfigValue(cwd, key).pipe(Effect.catch(() => Effect.succeed(null))); + const isExistingDirectory = (candidate: string) => + fileSystem.stat(candidate).pipe( + Effect.map((stat) => stat.type === "Directory"), + Effect.catch(() => Effect.succeed(false)), + ); + const resolveRemoteRepositoryContext = (cwd: string, remoteName: string | null) => Effect.gen(function* () { if (!remoteName) { @@ -552,7 +607,10 @@ export const makeGitManager = Effect.gen(function* () { return null; }); - const findLatestPr = (cwd: string, details: { branch: string; upstreamRef: string | null }) => + const findLatestPrUncached = ( + cwd: string, + details: { branch: string; upstreamRef: string | null }, + ) => Effect.gen(function* () { const headContext = yield* resolveBranchHeadContext(cwd, details); const parsedByNumber = new Map(); @@ -605,6 +663,17 @@ export const makeGitManager = Effect.gen(function* () { return parsed[0] ?? null; }); + const findLatestPr = (cwd: string, details: { branch: string; upstreamRef: string | null }) => + Effect.gen(function* () { + const cacheKey = `${cwd}::${details.branch}`; + const cached = latestPrCache.get(cacheKey); + if (cached !== undefined) return cached; + + const result = yield* findLatestPrUncached(cwd, details); + latestPrCache.set(cacheKey, result); + return result; + }); + const resolveBaseBranch = ( cwd: string, branch: string, @@ -622,9 +691,14 @@ export const makeGitManager = Effect.gen(function* () { } } - const defaultFromGh = yield* gitHubCli - .getDefaultBranch({ cwd }) - .pipe(Effect.catch(() => Effect.succeed(null))); + const cachedDefault = defaultBranchCache.get(cwd); + if (cachedDefault !== undefined) { + return cachedDefault ?? "main"; + } + const defaultFromGh = yield* gitHubCli.getDefaultBranch({ cwd }).pipe( + Effect.tap((result) => Effect.sync(() => defaultBranchCache.set(cwd, result))), + Effect.catch(() => Effect.succeed(null)), + ); if (defaultFromGh) { return defaultFromGh; } @@ -726,6 +800,9 @@ export const makeGitManager = Effect.gen(function* () { upstreamRef: details.upstreamRef, }); + // Invalidate the PR cache since we're about to look up / create a PR + latestPrCache.invalidate(`${cwd}::${branch}`); + const existing = yield* findOpenPr(cwd, headContext.headSelectors); if (existing) { return { @@ -789,7 +866,23 @@ export const makeGitManager = Effect.gen(function* () { }); const status: GitManagerShape["status"] = Effect.fnUntraced(function* (input) { - const details = yield* gitCore.statusDetails(input.cwd); + const cwdStat = yield* fileSystem + .stat(input.cwd) + .pipe(Effect.catch(() => Effect.succeed(null))); + if (!cwdStat || cwdStat.type !== "Directory") { + return emptyGitStatus(); + } + + const details = yield* gitCore + .statusDetails(input.cwd) + .pipe( + Effect.catch((error) => + isMissingPathGitCommandError(error) ? Effect.succeed(null) : Effect.fail(error), + ), + ); + if (!details) { + return emptyGitStatus(); + } const pr = details.branch !== null @@ -802,6 +895,8 @@ export const makeGitManager = Effect.gen(function* () { ) : null; + const originUrl = yield* readConfigValueNullable(input.cwd, "remote.origin.url"); + return { branch: details.branch, hasWorkingTreeChanges: details.hasWorkingTreeChanges, @@ -810,6 +905,7 @@ export const makeGitManager = Effect.gen(function* () { aheadCount: details.aheadCount, behindCount: details.behindCount, pr, + originUrl, }; }); @@ -860,6 +956,11 @@ export const makeGitManager = Effect.gen(function* () { const ensureExistingWorktreeUpstream = (worktreePath: string) => Effect.gen(function* () { + const worktreeExists = yield* isExistingDirectory(worktreePath); + if (!worktreeExists) { + return false as const; + } + const details = yield* gitCore.statusDetails(worktreePath); yield* configurePullRequestHeadUpstream( worktreePath, @@ -869,6 +970,8 @@ export const makeGitManager = Effect.gen(function* () { }, details.branch ?? pullRequest.headBranch, ); + + return true as const; }); const pullRequestWithRemoteInfo = { @@ -910,12 +1013,16 @@ export const makeGitManager = Effect.gen(function* () { existingBranchBeforeFetch?.worktreePath && existingBranchBeforeFetchPath !== rootWorktreePath ) { - yield* ensureExistingWorktreeUpstream(existingBranchBeforeFetch.worktreePath); - return { - pullRequest, - branch: localPullRequestBranch, - worktreePath: existingBranchBeforeFetch.worktreePath, - }; + const reusedExistingWorktree = yield* ensureExistingWorktreeUpstream( + existingBranchBeforeFetch.worktreePath, + ); + if (reusedExistingWorktree) { + return { + pullRequest, + branch: localPullRequestBranch, + worktreePath: existingBranchBeforeFetch.worktreePath, + }; + } } if (existingBranchBeforeFetchPath === rootWorktreePath) { return yield* gitManagerError( @@ -938,12 +1045,16 @@ export const makeGitManager = Effect.gen(function* () { existingBranchAfterFetch?.worktreePath && existingBranchAfterFetchPath !== rootWorktreePath ) { - yield* ensureExistingWorktreeUpstream(existingBranchAfterFetch.worktreePath); - return { - pullRequest, - branch: localPullRequestBranch, - worktreePath: existingBranchAfterFetch.worktreePath, - }; + const reusedExistingWorktree = yield* ensureExistingWorktreeUpstream( + existingBranchAfterFetch.worktreePath, + ); + if (reusedExistingWorktree) { + return { + pullRequest, + branch: localPullRequestBranch, + worktreePath: existingBranchAfterFetch.worktreePath, + }; + } } if (existingBranchAfterFetchPath === rootWorktreePath) { return yield* gitManagerError( diff --git a/apps/server/src/git/Services/GitCore.ts b/apps/server/src/git/Services/GitCore.ts index 879927934e..5282ab2de4 100644 --- a/apps/server/src/git/Services/GitCore.ts +++ b/apps/server/src/git/Services/GitCore.ts @@ -13,6 +13,9 @@ import type { GitCreateBranchInput, GitCreateWorktreeInput, GitCreateWorktreeResult, + GitDiffBranchInput, + GitDiffBranchResult, + GitDiffWorkingTreeInput, GitInitInput, GitListBranchesInput, GitListBranchesResult, @@ -24,7 +27,7 @@ import type { import type { GitCommandError } from "../Errors.ts"; -export interface GitStatusDetails extends Omit { +export interface GitStatusDetails extends Omit { upstreamRef: string | null; } @@ -206,6 +209,14 @@ export interface GitCoreShape { input: GitCheckoutInput, ) => Effect.Effect; + /** + * Clone a repository into a target directory. + */ + readonly cloneRepo: (input: { + url: string; + targetDir: string; + }) => Effect.Effect<{ clonedPath: string; alreadyExisted: boolean }, GitCommandError>; + /** * Initialize a repository in the provided directory. */ @@ -215,6 +226,31 @@ export interface GitCoreShape { * List local branch names (short format). */ readonly listLocalBranchNames: (cwd: string) => Effect.Effect; + + /** + * Compute a diff between a base branch and HEAD. + */ + readonly diffBranch: ( + input: GitDiffBranchInput, + ) => Effect.Effect; + + /** + * Compute a diff of uncommitted working tree changes against HEAD. + */ + readonly diffWorkingTree: ( + input: GitDiffWorkingTreeInput, + ) => Effect.Effect; + + /** + * Return the repository root for a given working directory, + * or null if the path is not inside a git repository. + */ + readonly getRepoRoot: (cwd: string) => Effect.Effect; + + /** + * Resolve a git ref (e.g. "HEAD", branch name) to its full commit SHA. + */ + readonly resolveRef: (cwd: string, ref: string) => Effect.Effect; } /** diff --git a/apps/server/src/git/Services/GitHubCli.ts b/apps/server/src/git/Services/GitHubCli.ts index f10339af47..f95bd017ea 100644 --- a/apps/server/src/git/Services/GitHubCli.ts +++ b/apps/server/src/git/Services/GitHubCli.ts @@ -29,6 +29,19 @@ export interface GitHubRepositoryCloneUrls { readonly sshUrl: string; } +export interface GitHubPullRequestDetails { + readonly number: number; + readonly title: string; + readonly body: string; + readonly url: string; + readonly state: "OPEN" | "CLOSED" | "MERGED"; + readonly headRefName: string; + readonly baseRefName: string; + readonly additions: number; + readonly deletions: number; + readonly changedFiles: number; +} + /** * GitHubCliShape - Service API for executing GitHub CLI commands. */ @@ -40,15 +53,18 @@ export interface GitHubCliShape { readonly cwd: string; readonly args: ReadonlyArray; readonly timeoutMs?: number; + readonly stdin?: string; }) => Effect.Effect; /** - * List open pull requests for a head branch. + * List open pull requests, optionally filtered by head branch. */ readonly listOpenPullRequests: (input: { readonly cwd: string; - readonly headSelector: string; + readonly headSelector?: string; readonly limit?: number; + /** Optional `owner/repo` to scope the list (avoids upstream resolution for forks). */ + readonly repo?: string; }) => Effect.Effect, GitHubCliError>; /** @@ -85,6 +101,48 @@ export interface GitHubCliShape { readonly cwd: string; }) => Effect.Effect; + /** + * Fetch full details for a pull request by URL or `owner/repo#number`. + */ + readonly fetchPrDetails: (input: { + readonly cwd: string; + readonly prUrl: string; + }) => Effect.Effect; + + /** + * List open PRs where the current user has been requested as a reviewer. + */ + readonly listReviewRequests: (input: { readonly limit?: number }) => Effect.Effect< + ReadonlyArray<{ + readonly number: number; + readonly title: string; + readonly url: string; + readonly updatedAt: string; + readonly body: string; + readonly labels: ReadonlyArray<{ readonly name: string }>; + readonly repository: { readonly name: string; readonly nameWithOwner: string }; + readonly author: { readonly login: string }; + }>, + GitHubCliError + >; + + /** + * List recently merged/closed PRs where the current user has submitted a review. + */ + readonly listReviewedPrs: (input: { readonly limit?: number }) => Effect.Effect< + ReadonlyArray<{ + readonly number: number; + readonly title: string; + readonly url: string; + readonly updatedAt: string; + readonly body: string; + readonly labels: ReadonlyArray<{ readonly name: string }>; + readonly repository: { readonly name: string; readonly nameWithOwner: string }; + readonly author: { readonly login: string }; + }>, + GitHubCliError + >; + /** * Checkout a pull request into the current repository worktree. */ diff --git a/apps/server/src/git/Services/TextGeneration.ts b/apps/server/src/git/Services/TextGeneration.ts index daae27fe66..571658216f 100644 --- a/apps/server/src/git/Services/TextGeneration.ts +++ b/apps/server/src/git/Services/TextGeneration.ts @@ -52,6 +52,30 @@ export interface BranchNameGenerationResult { branch: string; } +export interface JiraTicketContentGenerationInput { + conversationContext: string; + projectKey: string; +} + +export interface JiraTicketContentGenerationResult { + summary: string; + description: string; +} + +export interface JiraProgressCommentGenerationInput { + ticketKey: string; + ticketTitle: string; + ticketDescription: string; + ticketStatus: string; + ticketType: string; + ticketComments: string; + recentConversation: string; +} + +export interface JiraProgressCommentGenerationResult { + comment: string; +} + export interface TextGenerationService { generateCommitMessage( input: CommitMessageGenerationInput, @@ -84,6 +108,20 @@ export interface TextGenerationShape { readonly generateBranchName: ( input: BranchNameGenerationInput, ) => Effect.Effect; + + /** + * Generate Jira ticket summary and description from conversation context. + */ + readonly generateJiraTicketContent: ( + input: JiraTicketContentGenerationInput, + ) => Effect.Effect; + + /** + * Generate a Jira progress comment from recent conversation. + */ + readonly generateJiraProgressComment: ( + input: JiraProgressCommentGenerationInput, + ) => Effect.Effect; } /** diff --git a/apps/server/src/jira/Errors.ts b/apps/server/src/jira/Errors.ts new file mode 100644 index 0000000000..aef8ebb35c --- /dev/null +++ b/apps/server/src/jira/Errors.ts @@ -0,0 +1,36 @@ +import { Schema } from "effect"; +import type { TextGenerationError } from "../git/Errors.ts"; + +/** + * JiraCliError - Jira CLI execution or authentication failed. + */ +export class JiraCliError extends Schema.TaggedErrorClass()("JiraCliError", { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), +}) { + override get message(): string { + return `Jira CLI failed in ${this.operation}: ${this.detail}`; + } +} + +/** + * JiraManagerError - Jira workflow orchestration failed. + */ +export class JiraManagerError extends Schema.TaggedErrorClass()( + "JiraManagerError", + { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `Jira manager failed in ${this.operation}: ${this.detail}`; + } +} + +/** + * JiraManagerServiceError - Errors emitted by Jira workflow orchestration. + */ +export type JiraManagerServiceError = JiraCliError | JiraManagerError | TextGenerationError; diff --git a/apps/server/src/jira/Layers/JiraCli.ts b/apps/server/src/jira/Layers/JiraCli.ts new file mode 100644 index 0000000000..aa8a3ec0af --- /dev/null +++ b/apps/server/src/jira/Layers/JiraCli.ts @@ -0,0 +1,313 @@ +import { Effect, Layer } from "effect"; + +import { JiraCliError } from "../Errors.ts"; +import { JiraCli, type JiraCliShape } from "../Services/JiraCli.ts"; + +const DEFAULT_TIMEOUT_MS = 30_000; + +interface JiraRestConfig { + readonly baseUrl: string; + readonly email: string; + readonly apiToken: string; +} + +function readConfig(): JiraRestConfig | null { + const baseUrl = process.env.JIRA_BASE_URL?.replace(/\/+$/, ""); + const email = process.env.JIRA_USER_EMAIL; + const apiToken = process.env.JIRA_API_TOKEN; + if (!baseUrl || !email || !apiToken) return null; + return { baseUrl, email, apiToken }; +} + +async function jiraFetch( + config: JiraRestConfig, + path: string, + options: { method?: string; body?: unknown; timeoutMs?: number } = {}, +): Promise { + const url = `${config.baseUrl}/rest/api/3${path}`; + const auth = Buffer.from(`${config.email}:${config.apiToken}`).toString("base64"); + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), options.timeoutMs ?? DEFAULT_TIMEOUT_MS); + + try { + const response = await fetch(url, { + method: options.method ?? "GET", + headers: { + Authorization: `Basic ${auth}`, + Accept: "application/json", + ...(options.body ? { "Content-Type": "application/json" } : {}), + }, + body: options.body ? JSON.stringify(options.body) : undefined, + signal: controller.signal, + }); + + if (!response.ok) { + const text = await response.text().catch(() => ""); + throw new Error(`Jira API ${response.status}: ${text}`); + } + + const text = await response.text(); + return text.length > 0 ? JSON.parse(text) : {}; + } finally { + clearTimeout(timeout); + } +} + +function notConfiguredError(operation: string): JiraCliError { + return new JiraCliError({ + operation, + detail: + "Jira integration is not configured. Set JIRA_BASE_URL, JIRA_USER_EMAIL, and JIRA_API_TOKEN environment variables.", + }); +} + +/** Extract plain text from Atlassian Document Format (ADF). */ +function adfToPlainText(node: unknown): string { + if (!node || typeof node !== "object") return ""; + const n = node as Record; + if (n.type === "text" && typeof n.text === "string") return n.text; + if (Array.isArray(n.content)) { + return (n.content as unknown[]).map(adfToPlainText).join(""); + } + return ""; +} + +/** Lazily fetch and cache the current user's Jira accountId. */ +function createAccountIdResolver(config: JiraRestConfig) { + let cached: string | null = null; + return async (): Promise => { + if (cached) return cached; + try { + const data = (await jiraFetch(config, "/myself")) as Record; + cached = typeof data.accountId === "string" ? data.accountId : null; + } catch { + cached = null; + } + return cached; + }; +} + +const makeJiraCli = Effect.sync(() => { + const config = readConfig(); + const getAccountId = config ? createAccountIdResolver(config) : null; + + const execute: JiraCliShape["execute"] = () => + Effect.fail( + new JiraCliError({ + operation: "execute", + detail: "Raw CLI execution is not supported with the REST API backend.", + }), + ); + + const service = { + execute, + + viewIssue: (input) => { + if (!config) return Effect.fail(notConfiguredError("viewIssue")); + return Effect.tryPromise({ + try: async () => { + const data = (await jiraFetch( + config, + `/issue/${encodeURIComponent(input.key)}`, + )) as Record; + const fields = (data.fields ?? {}) as Record; + const rawComments = (fields.comment?.comments ?? []) as Array>; + const comments = rawComments.map((c) => ({ + author: String(c.author?.displayName ?? c.author?.emailAddress ?? "Unknown"), + body: typeof c.body === "string" ? c.body : adfToPlainText(c.body), + created: String(c.created ?? ""), + })); + return { + key: String(data.key ?? input.key), + url: `${config.baseUrl}/browse/${data.key ?? input.key}`, + summary: String(fields.summary ?? ""), + status: String(fields.status?.name ?? "Unknown"), + type: String(fields.issuetype?.name ?? "Task"), + priority: String(fields.priority?.name ?? "Medium"), + description: + typeof fields.description === "string" + ? fields.description + : adfToPlainText(fields.description), + comments, + }; + }, + catch: (error) => + new JiraCliError({ + operation: "viewIssue", + detail: error instanceof Error ? error.message : "Failed to view Jira issue.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + + createIssue: (input) => { + if (!config) return Effect.fail(notConfiguredError("createIssue")); + return Effect.tryPromise({ + try: async () => { + const accountId = await getAccountId!(); + const body = { + fields: { + project: { key: input.projectKey }, + issuetype: { name: input.type }, + priority: { name: input.priority }, + summary: input.summary, + ...(accountId ? { assignee: { accountId } } : {}), + description: { + type: "doc", + version: 1, + content: [ + { + type: "paragraph", + content: [{ type: "text", text: input.description || "Created via T3 Code" }], + }, + ], + }, + }, + }; + const data = (await jiraFetch(config, "/issue", { + method: "POST", + body, + })) as Record; + const key = String(data.key ?? ""); + return { + key, + url: `${config.baseUrl}/browse/${key}`, + }; + }, + catch: (error) => + new JiraCliError({ + operation: "createIssue", + detail: error instanceof Error ? error.message : "Failed to create Jira issue.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + + moveIssue: (input) => { + if (!config) return Effect.fail(notConfiguredError("moveIssue")); + return Effect.tryPromise({ + try: async () => { + // First, get available transitions + const transitionsData = (await jiraFetch( + config, + `/issue/${encodeURIComponent(input.key)}/transitions`, + )) as Record; + const transitions = (transitionsData.transitions ?? []) as Array>; + const target = transitions.find( + (t) => t.name?.toLowerCase() === input.targetStatus.toLowerCase(), + ); + if (!target) { + const available = transitions.map((t) => t.name).join(", "); + throw new Error( + `Transition "${input.targetStatus}" not found. Available: ${available}`, + ); + } + await jiraFetch(config, `/issue/${encodeURIComponent(input.key)}/transitions`, { + method: "POST", + body: { transition: { id: target.id } }, + }); + return { key: input.key, newStatus: input.targetStatus }; + }, + catch: (error) => + new JiraCliError({ + operation: "moveIssue", + detail: error instanceof Error ? error.message : "Failed to move Jira issue.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + + addComment: (input) => { + if (!config) return Effect.fail(notConfiguredError("addComment")); + return Effect.tryPromise({ + try: async () => { + await jiraFetch(config, `/issue/${encodeURIComponent(input.key)}/comment`, { + method: "POST", + body: { + body: { + type: "doc", + version: 1, + content: [ + { + type: "paragraph", + content: [{ type: "text", text: input.comment }], + }, + ], + }, + }, + }); + return { key: input.key }; + }, + catch: (error) => + new JiraCliError({ + operation: "addComment", + detail: error instanceof Error ? error.message : "Failed to add Jira comment.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + + listIssues: (input) => { + if (!config) return Effect.fail(notConfiguredError("listIssues")); + return Effect.tryPromise({ + try: async () => { + const jql = + input.jql ?? + (input.projectKey + ? `project = ${input.projectKey} ORDER BY updated DESC` + : "ORDER BY updated DESC"); + const data = (await jiraFetch( + config, + `/search/jql?jql=${encodeURIComponent(jql)}&maxResults=50&fields=summary,status,issuetype`, + )) as Record; + const issues = (data.issues ?? []) as Array>; + return { + issues: issues.map((item) => { + const fields = (item.fields ?? {}) as Record; + return { + key: String(item.key ?? ""), + summary: String(fields.summary ?? ""), + status: String(fields.status?.name ?? "Unknown"), + type: String(fields.issuetype?.name ?? "Task"), + }; + }), + }; + }, + catch: (error) => + new JiraCliError({ + operation: "listIssues", + detail: error instanceof Error ? error.message : "Failed to list Jira issues.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + listTransitions: (input) => { + if (!config) return Effect.fail(notConfiguredError("listTransitions")); + return Effect.tryPromise({ + try: async () => { + const data = (await jiraFetch( + config, + `/issue/${encodeURIComponent(input.key)}/transitions`, + )) as Record; + const transitions = (data.transitions ?? []) as Array>; + return { + transitions: transitions.map((t) => ({ + id: String(t.id ?? ""), + name: String(t.name ?? ""), + })), + }; + }, + catch: (error) => + new JiraCliError({ + operation: "listTransitions", + detail: error instanceof Error ? error.message : "Failed to list transitions.", + ...(error !== undefined ? { cause: error } : {}), + }), + }); + }, + } satisfies JiraCliShape; + + return service; +}); + +export const JiraCliLive = Layer.effect(JiraCli, makeJiraCli); diff --git a/apps/server/src/jira/Layers/JiraManager.test.ts b/apps/server/src/jira/Layers/JiraManager.test.ts new file mode 100644 index 0000000000..45f00f9d5e --- /dev/null +++ b/apps/server/src/jira/Layers/JiraManager.test.ts @@ -0,0 +1,207 @@ +import { describe, expect, it } from "vitest"; +import { Effect, Layer } from "effect"; + +import { JiraCli, type JiraCliShape } from "../Services/JiraCli.ts"; +import { JiraManager } from "../Services/JiraManager.ts"; +import { TextGeneration } from "../../git/Services/TextGeneration.ts"; +import { JiraManagerLive } from "./JiraManager.ts"; +import { JiraCliError } from "../Errors.ts"; + +function createFakeJiraCli(overrides: Partial = {}): JiraCliShape { + const defaults: JiraCliShape = { + execute: () => Effect.fail(new JiraCliError({ operation: "execute", detail: "not supported" })), + viewIssue: () => + Effect.succeed({ + key: "PROJ-1", + url: "https://jira.example.com/browse/PROJ-1", + summary: "Test issue", + status: "To Do", + type: "Task", + priority: "Medium", + description: "A test issue", + comments: [], + }), + createIssue: () => + Effect.succeed({ + key: "PROJ-2", + url: "https://jira.example.com/browse/PROJ-2", + }), + moveIssue: () => + Effect.succeed({ + key: "PROJ-1", + newStatus: "Done", + }), + addComment: () => + Effect.succeed({ + key: "PROJ-1", + }), + listIssues: () => + Effect.succeed({ + issues: [ + { key: "PROJ-1", summary: "Test issue", status: "To Do", type: "Task" }, + { key: "PROJ-2", summary: "Another issue", status: "In Progress", type: "Bug" }, + ], + }), + listTransitions: () => + Effect.succeed({ + transitions: [ + { id: "1", name: "In Progress" }, + { id: "2", name: "Done" }, + ], + }), + }; + return { ...defaults, ...overrides }; +} + +function createFakeTextGeneration() { + return { + generateCommitMessage: () => Effect.succeed({ subject: "test", body: "test body" }), + generatePrContent: () => Effect.succeed({ title: "Test PR", body: "PR body" }), + generateBranchName: () => Effect.succeed({ branch: "test-branch" }), + generateJiraTicketContent: () => + Effect.succeed({ summary: "Generated summary", description: "Generated description" }), + generateJiraProgressComment: () => Effect.succeed({ comment: "Progress update" }), + }; +} + +function makeTestLayer(jiraCliOverrides: Partial = {}) { + const JiraCliTest = Layer.succeed(JiraCli, createFakeJiraCli(jiraCliOverrides)); + const TextGenerationTest = Layer.succeed(TextGeneration, createFakeTextGeneration()); + return JiraManagerLive.pipe(Layer.provide(Layer.merge(JiraCliTest, TextGenerationTest))); +} + +function runWithLayer(effect: Effect.Effect) { + return Effect.runPromise(Effect.provide(effect, makeTestLayer())); +} + +describe("JiraManager", () => { + it("viewIssue delegates to JiraCli", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.viewIssue({ key: "PROJ-1" }); + }), + ); + expect(result.key).toBe("PROJ-1"); + expect(result.summary).toBe("Test issue"); + }); + + it("createIssue delegates to JiraCli", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.createIssue({ + projectKey: "PROJ", + type: "Task", + priority: "Medium", + summary: "New issue", + description: "Description", + }); + }), + ); + expect(result.key).toBe("PROJ-2"); + }); + + it("moveIssue delegates to JiraCli", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.moveIssue({ key: "PROJ-1", targetStatus: "Done" }); + }), + ); + expect(result.newStatus).toBe("Done"); + }); + + it("addComment delegates to JiraCli", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.addComment({ key: "PROJ-1", comment: "A comment" }); + }), + ); + expect(result.key).toBe("PROJ-1"); + }); + + it("listIssues delegates to JiraCli", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.listIssues({ projectKey: "PROJ" }); + }), + ); + expect(result.issues).toHaveLength(2); + expect(result.issues[0]!.key).toBe("PROJ-1"); + }); + + it("generateTicketContent delegates to TextGeneration", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.generateTicketContent({ + conversationContext: "Some context", + projectKey: "PROJ", + }); + }), + ); + expect(result.summary).toBe("Generated summary"); + expect(result.description).toBe("Generated description"); + }); + + it("generateProgressComment delegates to TextGeneration", async () => { + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.generateProgressComment({ + ticketKey: "PROJ-1", + ticketTitle: "Test issue", + recentConversation: "Recent context", + }); + }), + ); + expect(result.comment).toBe("Progress update"); + }); + + it("generateTicketContent truncates long input", async () => { + const longContext = "x".repeat(25_000); + const result = await runWithLayer( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.generateTicketContent({ + conversationContext: longContext, + projectKey: "PROJ", + }); + }), + ); + // Should succeed — limitContext truncates before passing to TextGeneration + expect(result.summary).toBe("Generated summary"); + }); + + it("propagates JiraCli errors", async () => { + const failingLayer = JiraManagerLive.pipe( + Layer.provide( + Layer.merge( + Layer.succeed( + JiraCli, + createFakeJiraCli({ + viewIssue: () => + Effect.fail(new JiraCliError({ operation: "viewIssue", detail: "not found" })), + }), + ), + Layer.succeed(TextGeneration, createFakeTextGeneration()), + ), + ), + ); + + await expect( + Effect.runPromise( + Effect.provide( + Effect.gen(function* () { + const manager = yield* JiraManager; + return yield* manager.viewIssue({ key: "NOPE-999" }); + }), + failingLayer, + ), + ), + ).rejects.toBeDefined(); + }); +}); diff --git a/apps/server/src/jira/Layers/JiraManager.ts b/apps/server/src/jira/Layers/JiraManager.ts new file mode 100644 index 0000000000..499c91bda6 --- /dev/null +++ b/apps/server/src/jira/Layers/JiraManager.ts @@ -0,0 +1,68 @@ +import { Effect, Layer } from "effect"; + +import { JiraCli } from "../Services/JiraCli.ts"; +import { JiraManager, type JiraManagerShape } from "../Services/JiraManager.ts"; +import { TextGeneration } from "../../git/Services/TextGeneration.ts"; + +function limitContext(value: string, maxChars: number): string { + if (value.length <= maxChars) return value; + return `${value.slice(0, maxChars)}\n\n[truncated]`; +} + +function formatComments( + comments: ReadonlyArray<{ author: string; body: string; created: string }>, +): string { + if (comments.length === 0) return ""; + return comments.map((c) => `[${c.created}] ${c.author}: ${c.body}`).join("\n\n"); +} + +export const makeJiraManager = Effect.gen(function* () { + const jiraCli = yield* JiraCli; + const textGeneration = yield* TextGeneration; + + const viewIssue: JiraManagerShape["viewIssue"] = (input) => jiraCli.viewIssue(input); + + const createIssue: JiraManagerShape["createIssue"] = (input) => jiraCli.createIssue(input); + + const moveIssue: JiraManagerShape["moveIssue"] = (input) => jiraCli.moveIssue(input); + + const addComment: JiraManagerShape["addComment"] = (input) => jiraCli.addComment(input); + + const listIssues: JiraManagerShape["listIssues"] = (input) => jiraCli.listIssues(input); + + const listTransitions: JiraManagerShape["listTransitions"] = (input) => + jiraCli.listTransitions(input); + + const generateTicketContent: JiraManagerShape["generateTicketContent"] = (input) => + textGeneration.generateJiraTicketContent({ + conversationContext: limitContext(input.conversationContext, 20_000), + projectKey: input.projectKey, + }); + + const generateProgressComment: JiraManagerShape["generateProgressComment"] = (input) => + Effect.gen(function* () { + const issue = yield* jiraCli.viewIssue({ key: input.ticketKey }); + return yield* textGeneration.generateJiraProgressComment({ + ticketKey: input.ticketKey, + ticketTitle: issue.summary, + ticketDescription: issue.description, + ticketStatus: issue.status, + ticketType: issue.type, + ticketComments: formatComments(issue.comments), + recentConversation: limitContext(input.recentConversation, 20_000), + }); + }); + + return { + viewIssue, + createIssue, + moveIssue, + addComment, + listIssues, + listTransitions, + generateTicketContent, + generateProgressComment, + } satisfies JiraManagerShape; +}); + +export const JiraManagerLive = Layer.effect(JiraManager, makeJiraManager); diff --git a/apps/server/src/jira/Services/JiraCli.ts b/apps/server/src/jira/Services/JiraCli.ts new file mode 100644 index 0000000000..d442a8c9d8 --- /dev/null +++ b/apps/server/src/jira/Services/JiraCli.ts @@ -0,0 +1,67 @@ +/** + * JiraCli - Effect service contract for Jira REST API interactions. + * + * Provides thin command execution helpers used by Jira workflow orchestration. + * Backed by direct HTTP calls to the Jira v3 REST API. + * + * @module JiraCli + */ +import { ServiceMap } from "effect"; +import type { Effect } from "effect"; + +import type { + JiraIssueViewInput, + JiraIssueViewResult, + JiraIssueCreateInput, + JiraIssueCreateResult, + JiraIssueMoveInput, + JiraIssueMoveResult, + JiraCommentAddInput, + JiraCommentAddResult, + JiraIssueListInput, + JiraIssueListResult, + JiraListTransitionsInput, + JiraListTransitionsResult, +} from "@t3tools/contracts"; +import type { JiraCliError } from "../Errors.ts"; + +/** + * JiraCliShape - Service API for Jira REST API operations. + */ +export interface JiraCliShape { + readonly execute: (input: { + readonly args: ReadonlyArray; + readonly timeoutMs?: number; + }) => Effect.Effect; + + readonly viewIssue: ( + input: JiraIssueViewInput, + ) => Effect.Effect; + + readonly createIssue: ( + input: JiraIssueCreateInput, + ) => Effect.Effect; + + readonly moveIssue: ( + input: JiraIssueMoveInput, + ) => Effect.Effect; + + readonly addComment: ( + input: JiraCommentAddInput, + ) => Effect.Effect; + + readonly listIssues: ( + input: JiraIssueListInput, + ) => Effect.Effect; + + readonly listTransitions: ( + input: JiraListTransitionsInput, + ) => Effect.Effect; +} + +/** + * JiraCli - Service tag for Jira REST API execution. + */ +export class JiraCli extends ServiceMap.Service()( + "t3/jira/Services/JiraCli", +) {} diff --git a/apps/server/src/jira/Services/JiraManager.ts b/apps/server/src/jira/Services/JiraManager.ts new file mode 100644 index 0000000000..24f100eb41 --- /dev/null +++ b/apps/server/src/jira/Services/JiraManager.ts @@ -0,0 +1,83 @@ +/** + * JiraManager - Effect service contract for Jira workflow orchestration. + * + * Orchestrates Jira operations by composing JiraCli and TextGeneration services. + * + * @module JiraManager + */ +import { + type JiraIssueViewInput, + type JiraIssueViewResult, + type JiraIssueCreateInput, + type JiraIssueCreateResult, + type JiraIssueMoveInput, + type JiraIssueMoveResult, + type JiraCommentAddInput, + type JiraCommentAddResult, + type JiraIssueListInput, + type JiraIssueListResult, + type JiraListTransitionsInput, + type JiraListTransitionsResult, + type JiraGenerateTicketContentResult, + type JiraGenerateProgressCommentResult, +} from "@t3tools/contracts"; +import { ServiceMap } from "effect"; +import type { Effect } from "effect"; +import type { JiraManagerServiceError } from "../Errors.ts"; + +/** Internal input for ticket content generation (wsServer resolves threadId → context). */ +export interface GenerateTicketContentInput { + conversationContext: string; + projectKey: string; +} + +/** Internal input for progress comment generation. */ +export interface GenerateProgressCommentInput { + ticketKey: string; + ticketTitle: string; + recentConversation: string; +} + +/** + * JiraManagerShape - Service API for high-level Jira workflow actions. + */ +export interface JiraManagerShape { + readonly viewIssue: ( + input: JiraIssueViewInput, + ) => Effect.Effect; + + readonly createIssue: ( + input: JiraIssueCreateInput, + ) => Effect.Effect; + + readonly moveIssue: ( + input: JiraIssueMoveInput, + ) => Effect.Effect; + + readonly addComment: ( + input: JiraCommentAddInput, + ) => Effect.Effect; + + readonly listIssues: ( + input: JiraIssueListInput, + ) => Effect.Effect; + + readonly listTransitions: ( + input: JiraListTransitionsInput, + ) => Effect.Effect; + + readonly generateTicketContent: ( + input: GenerateTicketContentInput, + ) => Effect.Effect; + + readonly generateProgressComment: ( + input: GenerateProgressCommentInput, + ) => Effect.Effect; +} + +/** + * JiraManager - Service tag for Jira workflow orchestration. + */ +export class JiraManager extends ServiceMap.Service()( + "t3/jira/Services/JiraManager", +) {} diff --git a/apps/server/src/llm/agentQuery.ts b/apps/server/src/llm/agentQuery.ts new file mode 100644 index 0000000000..97a29f39ef --- /dev/null +++ b/apps/server/src/llm/agentQuery.ts @@ -0,0 +1,74 @@ +/** + * Shared lightweight LLM query utility using the Claude Agent SDK. + * + * Wraps `query()` with a JSON-schema output format and returns parsed + * structured results. Used by text-generation helpers (commit messages, + * Jira content) and memory extraction. + * + * @module agentQuery + */ +import { query as claudeQuery, type SDKResultMessage } from "@anthropic-ai/claude-agent-sdk"; +import { Effect } from "effect"; + +import { TextGenerationError } from "../git/Errors.ts"; + +const DEFAULT_SYSTEM_PROMPT = + "You produce structured JSON output. Never ask for clarification or refuse — always produce your best output with the context provided."; + +/** + * Run a one-shot prompt via the Claude Agent SDK and parse the structured + * JSON result. + * + * Uses Haiku for fast, low-cost generation with `permissionMode: "plan"` + * (no tool use) and thinking disabled. + */ +export function runAgentQuery( + operation: string, + prompt: string, + jsonSchema: Record, + parse: (result: unknown) => T, + systemPrompt?: string, +): Effect.Effect { + return Effect.tryPromise({ + try: async () => { + const session = claudeQuery({ + prompt, + options: { + model: "claude-haiku-4-5-20251001", + permissionMode: "plan", + systemPrompt: systemPrompt ?? DEFAULT_SYSTEM_PROMPT, + outputFormat: { type: "json_schema", schema: jsonSchema }, + maxTurns: 10, + thinking: { type: "disabled" }, + }, + }); + + let resultMessage: SDKResultMessage | null = null; + for await (const message of session) { + if (message.type === "result") { + resultMessage = message as SDKResultMessage; + } + } + + if (!resultMessage) { + throw new Error("No result message received from agent query"); + } + if (resultMessage.subtype !== "success") { + const errors = resultMessage.errors.join("; "); + throw new Error( + `Agent query failed (${resultMessage.subtype}): ${errors || "unknown error"}`, + ); + } + if (resultMessage.structured_output != null) { + return parse(resultMessage.structured_output); + } + return parse(JSON.parse(resultMessage.result)); + }, + catch: (error) => + new TextGenerationError({ + operation, + detail: error instanceof Error ? error.message : "Agent query failed", + cause: error, + }), + }); +} diff --git a/apps/server/src/main.test.ts b/apps/server/src/main.test.ts index 83976e3d4c..1d085ec0a9 100644 --- a/apps/server/src/main.test.ts +++ b/apps/server/src/main.test.ts @@ -50,6 +50,7 @@ const testLayer = Layer.mergeAll( Layer.succeed(Open, { openBrowser: (_target: string) => Effect.void, openInEditor: () => Effect.void, + openInWarp: () => Effect.void, } satisfies OpenShape), AnalyticsService.layerTest, FetchHttpClient.layer, diff --git a/apps/server/src/main.ts b/apps/server/src/main.ts index 0a33be0cbb..01f9afdac3 100644 --- a/apps/server/src/main.ts +++ b/apps/server/src/main.ts @@ -9,6 +9,8 @@ import { Config, Data, Effect, FileSystem, Layer, Option, Path, Schema, ServiceMap } from "effect"; import { Command, Flag } from "effect/unstable/cli"; import { NetService } from "@t3tools/shared/Net"; + +// Dummy comment. import { DEFAULT_PORT, resolveStaticDir, @@ -198,9 +200,9 @@ const LayerLive = (input: CliInput) => Layer.provideMerge(makeServerRuntimeServicesLayer()), Layer.provideMerge(makeServerProviderLayer()), Layer.provideMerge(ProviderHealthLive), + Layer.provideMerge(AnalyticsServiceLayerLive), Layer.provideMerge(SqlitePersistence.layerConfig), Layer.provideMerge(ServerLoggerLive), - Layer.provideMerge(AnalyticsServiceLayerLive), Layer.provideMerge(ServerConfigLive(input)), ); diff --git a/apps/server/src/memory/Layers/MemoryExtraction.ts b/apps/server/src/memory/Layers/MemoryExtraction.ts new file mode 100644 index 0000000000..8b4fcd31ae --- /dev/null +++ b/apps/server/src/memory/Layers/MemoryExtraction.ts @@ -0,0 +1,384 @@ +/** + * MemoryExtraction layer — batch extraction of memories from conversation threads. + * + * Gathers threads from the orchestration read model, runs LLM summarization via + * the Claude Agent SDK, deduplicates against existing memories, and stores results. + * + * @module memory/Layers/MemoryExtraction + */ +import type { + MemoryCategory, + MemoryDate, + Memory, + OrchestrationProject, + OrchestrationThread, + ProjectId, + TrimmedNonEmptyString, + NonNegativeInt, +} from "@t3tools/contracts"; +import { Cause, Effect, Layer } from "effect"; + +import { runAgentQuery } from "../../llm/agentQuery.ts"; +import { MemoryRepository } from "../../persistence/Services/MemoryRepository.ts"; +import { ProjectionSnapshotQuery } from "../../orchestration/Services/ProjectionSnapshotQuery.ts"; + +import { + MemoryExtraction, + MemoryExtractionError, + type MemoryExtractionShape, +} from "../Services/MemoryExtraction.ts"; +import { + buildProjectExtractionPrompt, + buildDailySummaryPrompt, + PROJECT_EXTRACTION_SCHEMA, + PROJECT_EXTRACTION_SYSTEM_PROMPT, + DAILY_SUMMARY_SCHEMA, + DAILY_SUMMARY_SYSTEM_PROMPT, +} from "../prompts.ts"; + +// ── Types for LLM structured output ─────────────────────────────── + +interface ExtractedMemory { + title: string; + content: string; + category: MemoryCategory; +} + +interface ProjectExtractionResult { + memories: ExtractedMemory[]; +} + +interface DailySummaryEntry { + title: string; + content: string; + projectTitle?: string; +} + +interface DailySummaryResult { + entries: DailySummaryEntry[]; +} + +// ── Deduplication ───────────────────────────────────────────────── + +function tokenize(s: string): Set { + return new Set( + s + .toLowerCase() + .replace(/[^a-z0-9\s]/g, "") + .split(/\s+/) + .filter(Boolean), + ); +} + +function jaccardSimilarity(a: string, b: string): number { + const setA = tokenize(a); + const setB = tokenize(b); + if (setA.size === 0 && setB.size === 0) return 1; + let intersection = 0; + for (const token of setA) { + if (setB.has(token)) intersection++; + } + const union = new Set([...setA, ...setB]).size; + return union === 0 ? 0 : intersection / union; +} + +// ── Parse helpers ───────────────────────────────────────────────── + +const VALID_CATEGORIES = new Set([ + "preference", + "pattern", + "decision", + "fact", + "convention", +]); + +function parseProjectExtraction(raw: unknown): ProjectExtractionResult { + const obj = raw as Record; + const rawMemories = Array.isArray(obj.memories) ? obj.memories : []; + const memories: ExtractedMemory[] = []; + for (const m of rawMemories) { + const entry = m as Record; + if ( + typeof entry.title === "string" && + typeof entry.content === "string" && + typeof entry.category === "string" && + VALID_CATEGORIES.has(entry.category) + ) { + memories.push({ + title: entry.title, + content: entry.content, + category: entry.category as MemoryCategory, + }); + } + } + return { memories }; +} + +function parseDailySummary(raw: unknown): DailySummaryResult { + const obj = raw as Record; + const rawEntries = Array.isArray(obj.entries) ? obj.entries : []; + const entries: DailySummaryEntry[] = []; + for (const e of rawEntries) { + const entry = e as Record; + if (typeof entry.title === "string" && typeof entry.content === "string") { + entries.push({ + title: entry.title, + content: entry.content, + ...(typeof entry.projectTitle === "string" ? { projectTitle: entry.projectTitle } : {}), + }); + } + } + return { entries }; +} + +import { threadToTranscript } from "../threadTranscript.ts"; + +// ── Layer implementation ────────────────────────────────────────── + +const makeMemoryExtraction = Effect.gen(function* () { + const memoryRepo = yield* MemoryRepository; + const projectionQuery = yield* ProjectionSnapshotQuery; + + const extract: MemoryExtractionShape["extract"] = (input) => + Effect.gen(function* () { + yield* Effect.logInfo( + `Memory extraction started: sinceDate=${input.sinceDate}, projectId=${input.projectId ?? "all"}`, + ); + + const snapshot = yield* projectionQuery.getSnapshot(); + + // 1. Filter threads updated since sinceDate + const sinceMs = new Date(input.sinceDate).getTime(); + const filterProjectId = input.projectId; + const relevantThreads = snapshot.threads.filter((t) => { + if (t.deletedAt !== null) return false; + if (new Date(t.updatedAt).getTime() < sinceMs) return false; + if (filterProjectId && t.projectId !== filterProjectId) return false; + if (t.messages.length === 0) return false; + return true; + }); + + yield* Effect.logInfo(`Found ${relevantThreads.length} relevant threads`); + + // 2. Group by project + const threadsByProject = new Map(); + for (const thread of relevantThreads) { + const existing = threadsByProject.get(thread.projectId); + if (existing) { + existing.push(thread); + } else { + threadsByProject.set(thread.projectId, [thread]); + } + } + + const projectEntries = [...threadsByProject.entries()]; + + // 3. Per-project extraction + const projectResults = yield* Effect.forEach(projectEntries, ([projectId, threads]) => + Effect.gen(function* () { + const project = snapshot.projects.find((p: OrchestrationProject) => p.id === projectId); + if (!project) return { extracted: 0, duplicates: 0, summary: null as null }; + + const transcripts = threads.map(threadToTranscript); + const prompt = buildProjectExtractionPrompt(project.title, transcripts); + + const result = yield* runAgentQuery( + "memoryExtraction.project", + prompt, + PROJECT_EXTRACTION_SCHEMA as Record, + parseProjectExtraction, + PROJECT_EXTRACTION_SYSTEM_PROMPT, + ).pipe( + Effect.catch((error: unknown) => + Effect.logWarning( + `LLM extraction failed for project "${project.title}": ${String(error)}`, + ).pipe(Effect.map(() => ({ memories: [] }) as ProjectExtractionResult)), + ), + ); + + yield* Effect.logInfo( + `Project "${project.title}": LLM returned ${result.memories.length} memories`, + ); + + // 4. Dedup + store + let extracted = 0; + let duplicates = 0; + const validMemories = result.memories.filter( + (m) => m.title.trim().length > 0 && m.content.trim().length > 0, + ); + + // Load all existing project memories once for dedup comparison + const existingResult = yield* memoryRepo + .listByProject({ + projectId, + includeThread: false, + limit: 500 as typeof NonNegativeInt.Type, + }) + .pipe( + Effect.map((r) => r.memories), + Effect.catchCause(() => Effect.succeed([] as Memory[])), + ); + + // Mutable set — also tracks memories added in this batch + const allExisting = [...existingResult]; + + for (const mem of validMemories) { + const isDuplicate = allExisting.some((e) => { + const titleSim = jaccardSimilarity(e.title, mem.title); + const contentSim = jaccardSimilarity(e.content, mem.content); + // Combined: weigh both title and content similarity + const combined = titleSim * 0.4 + contentSim * 0.6; + return titleSim > 0.4 || contentSim > 0.4 || combined > 0.35; + }); + + if (isDuplicate) { + duplicates++; + continue; + } + + const ok = yield* memoryRepo + .create({ + projectId, + scope: "project", + category: mem.category, + source: "auto", + title: mem.title as typeof TrimmedNonEmptyString.Type, + content: mem.content as typeof TrimmedNonEmptyString.Type, + }) + .pipe( + Effect.map(() => true as const), + Effect.catchCause((cause) => + Effect.logWarning( + `Failed to create memory "${mem.title}": ${Cause.pretty(cause)}`, + ).pipe(Effect.map(() => false as const)), + ), + ); + + if (ok) { + extracted++; + // Track for within-batch dedup + allExisting.push({ + title: mem.title, + content: mem.content, + } as Memory); + } + } + + yield* Effect.logInfo( + `Project "${project.title}": stored ${extracted}, skipped ${duplicates} duplicates`, + ); + + return { + extracted, + duplicates, + summary: { + projectId, + projectTitle: project.title, + threadTitles: threads.map((t) => t.title), + }, + }; + }), + ); + + let extractedCount = 0; + let skippedDuplicates = 0; + const projectSummaries: { + projectId: ProjectId; + projectTitle: string; + threadTitles: string[]; + }[] = []; + + for (const pr of projectResults) { + extractedCount += pr.extracted; + skippedDuplicates += pr.duplicates; + if (pr.summary) projectSummaries.push(pr.summary); + } + + // 5. Daily summary + if (projectSummaries.length > 0) { + const today = new Date().toISOString().slice(0, 10); + + // Load existing daily summaries to incorporate into the new ones + const existingDaily = yield* memoryRepo + .listDailyByDate({ date: today }) + .pipe(Effect.catchCause(() => Effect.succeed([] as readonly Memory[]))); + + const existingDailySummaries = existingDaily.map((m) => ({ + title: m.title, + content: m.content, + })); + + const dailyResult = yield* runAgentQuery( + "memoryExtraction.daily", + buildDailySummaryPrompt(projectSummaries, today, existingDailySummaries), + DAILY_SUMMARY_SCHEMA as Record, + parseDailySummary, + DAILY_SUMMARY_SYSTEM_PROMPT, + ).pipe( + Effect.catch((error: unknown) => + Effect.logWarning(`Daily summary LLM call failed: ${String(error)}`).pipe( + Effect.map(() => ({ entries: [] }) as DailySummaryResult), + ), + ), + ); + + // Delete existing daily memories for today before inserting fresh ones + // This makes daily summaries idempotent — re-running replaces, not duplicates + yield* memoryRepo + .deleteDailyByDate({ date: today }) + .pipe(Effect.catchCause(() => Effect.void)); + + for (const entry of dailyResult.entries) { + if (!entry.title.trim() || !entry.content.trim()) continue; + + const matchedProject = entry.projectTitle + ? projectSummaries.find((p) => p.projectTitle === entry.projectTitle) + : null; + + const ok = yield* memoryRepo + .create({ + projectId: matchedProject?.projectId, + scope: "daily", + category: "fact", + source: "auto", + title: entry.title as typeof TrimmedNonEmptyString.Type, + content: entry.content as typeof TrimmedNonEmptyString.Type, + date: today as MemoryDate, + }) + .pipe( + Effect.map(() => true as const), + Effect.catchCause((cause) => + Effect.logWarning( + `Failed to create daily memory "${entry.title}": ${Cause.pretty(cause)}`, + ).pipe(Effect.map(() => false as const)), + ), + ); + + if (ok) extractedCount++; + } + } + + yield* Effect.logInfo( + `Memory extraction complete: extracted=${extractedCount}, skipped=${skippedDuplicates}, projects=${projectEntries.length}`, + ); + + return { + extractedCount, + skippedDuplicates, + projectsProcessed: projectEntries.length, + }; + }).pipe( + Effect.mapError( + (error) => + new MemoryExtractionError({ + operation: "extract", + detail: error instanceof Error ? error.message : String(error), + cause: error, + }), + ), + ); + + return { extract } satisfies MemoryExtractionShape; +}); + +export const MemoryExtractionLive = Layer.effect(MemoryExtraction, makeMemoryExtraction); diff --git a/apps/server/src/memory/Layers/MemoryReactor.ts b/apps/server/src/memory/Layers/MemoryReactor.ts new file mode 100644 index 0000000000..34016443f3 --- /dev/null +++ b/apps/server/src/memory/Layers/MemoryReactor.ts @@ -0,0 +1,228 @@ +/** + * MemoryReactor layer — autonomous memory extraction triggered by turn completion. + * + * Subscribes to `turn.processing.quiesced` receipts from the RuntimeReceiptBus. + * For each completed turn, debounces by threadId (30s), then generates a thread + * summary via Claude Haiku. Periodically triggers project + daily extraction. + * + * @module memory/Layers/MemoryReactor + */ +import type { OrchestrationThread, ThreadId, TrimmedNonEmptyString } from "@t3tools/contracts"; +import { Cause, Duration, Effect, Fiber, Layer, Ref, Stream } from "effect"; + +import { runAgentQuery } from "../../llm/agentQuery.ts"; +import { MemoryRepository } from "../../persistence/Services/MemoryRepository.ts"; +import { ProjectionSnapshotQuery } from "../../orchestration/Services/ProjectionSnapshotQuery.ts"; +import { RuntimeReceiptBus } from "../../orchestration/Services/RuntimeReceiptBus.ts"; +import { MemoryExtraction } from "../Services/MemoryExtraction.ts"; +import { MemoryReactor, type MemoryReactorShape } from "../Services/MemoryReactor.ts"; +import { threadToTranscript } from "../threadTranscript.ts"; +import { + buildThreadSummaryPrompt, + THREAD_SUMMARY_SCHEMA, + THREAD_SUMMARY_SYSTEM_PROMPT, +} from "../prompts.ts"; + +// ── Configuration ───────────────────────────────────────────────── + +/** Debounce delay before summarizing a thread after the last turn completes. */ +const DEBOUNCE_MS = 30_000; + +/** Number of thread summaries before triggering project + daily extraction. */ +const EXTRACTION_TRIGGER_THRESHOLD = 10; + +/** Minimum interval between project+daily extractions (4 hours). */ +const EXTRACTION_INTERVAL_MS = 4 * 60 * 60 * 1000; + +// ── Parse helper ────────────────────────────────────────────────── + +interface ThreadSummaryResult { + title: string; + content: string; +} + +function parseThreadSummary(raw: unknown): ThreadSummaryResult { + const obj = raw as Record; + return { + title: typeof obj.title === "string" ? obj.title : "Thread summary", + content: typeof obj.content === "string" ? obj.content : "", + }; +} + +// ── Layer implementation ────────────────────────────────────────── + +const makeMemoryReactor = Effect.gen(function* () { + const receiptBus = yield* RuntimeReceiptBus; + const projectionQuery = yield* ProjectionSnapshotQuery; + const memoryRepo = yield* MemoryRepository; + const memoryExtraction = yield* MemoryExtraction; + + const start: MemoryReactorShape["start"] = Effect.gen(function* () { + // State: debounce fibers per thread, and extraction trigger counter + const pendingFibers = yield* Ref.make(new Map>()); + const summaryCounter = yield* Ref.make(0); + const lastExtractionAt = yield* Ref.make(0); + + /** Summarize a single thread and upsert its thread-scope memory. */ + const summarizeThread = (threadId: ThreadId) => + Effect.gen(function* () { + const snapshot = yield* projectionQuery.getSnapshot(); + const thread = snapshot.threads.find((t: OrchestrationThread) => t.id === threadId); + + if (!thread || thread.deletedAt !== null || thread.messages.length === 0) { + return; + } + + // Check if thread has new activity since last summary + const existingSummary = yield* memoryRepo + .findThreadSummary(threadId) + .pipe(Effect.catchCause(() => Effect.succeed(null))); + + if (existingSummary) { + const summaryTime = new Date(existingSummary.updatedAt).getTime(); + const latestMessageTime = Math.max( + ...thread.messages.map((m) => new Date(m.updatedAt).getTime()), + ); + if (latestMessageTime <= summaryTime) { + yield* Effect.logInfo( + `Skipping thread summary for "${thread.title}" — no new activity since last summary`, + ); + return; + } + } + + const transcript = threadToTranscript(thread); + if (transcript.messages.length === 0) return; + + // Get checkpoint files for richer summaries + const checkpointFiles = + thread.checkpoints.length > 0 + ? thread.checkpoints[thread.checkpoints.length - 1]?.files + : undefined; + + const prompt = buildThreadSummaryPrompt( + thread.title, + transcript.messages, + checkpointFiles as { path: string; kind: string }[] | undefined, + ); + + const result = yield* runAgentQuery( + "memoryReactor.threadSummary", + prompt, + THREAD_SUMMARY_SCHEMA as Record, + parseThreadSummary, + THREAD_SUMMARY_SYSTEM_PROMPT, + ).pipe( + Effect.catchCause((cause) => + Effect.logWarning( + `Thread summary LLM call failed for ${threadId}: ${Cause.pretty(cause)}`, + ).pipe(Effect.map(() => null)), + ), + ); + + if (!result || !result.content.trim()) return; + + yield* memoryRepo + .upsertThreadSummary({ + threadId, + projectId: thread.projectId, + title: result.title as typeof TrimmedNonEmptyString.Type, + content: result.content as typeof TrimmedNonEmptyString.Type, + }) + .pipe( + Effect.catchCause((cause) => + Effect.logWarning( + `Failed to upsert thread summary for ${threadId}: ${Cause.pretty(cause)}`, + ), + ), + ); + + yield* Effect.logInfo(`Thread summary created/updated: "${result.title}"`); + + // Increment counter for extraction trigger + yield* Ref.update(summaryCounter, (n) => n + 1); + }).pipe( + Effect.catchCause((cause) => + Effect.logWarning( + `Memory reactor: thread summary failed for ${threadId}: ${Cause.pretty(cause)}`, + ), + ), + ); + + /** Check if project + daily extraction should be triggered. */ + const maybeRunExtraction = Effect.gen(function* () { + const count = yield* Ref.get(summaryCounter); + const lastAt = yield* Ref.get(lastExtractionAt); + const now = Date.now(); + + if (count < EXTRACTION_TRIGGER_THRESHOLD && now - lastAt < EXTRACTION_INTERVAL_MS) { + return; + } + + yield* Ref.set(summaryCounter, 0); + yield* Ref.set(lastExtractionAt, now); + + const sinceDate = new Date(now - EXTRACTION_INTERVAL_MS).toISOString(); + yield* Effect.logInfo( + `Memory reactor: triggering project + daily extraction (since ${sinceDate})`, + ); + + yield* memoryExtraction + .extract({ sinceDate }) + .pipe( + Effect.catchCause((cause) => + Effect.logWarning(`Memory reactor: project extraction failed: ${Cause.pretty(cause)}`), + ), + ); + }); + + /** Handle a quiesced receipt: debounce by threadId, then summarize. */ + const handleQuiesced = (threadId: ThreadId) => + Effect.gen(function* () { + const fibers = yield* Ref.get(pendingFibers); + + // Cancel any existing debounce fiber for this thread + const existing = fibers.get(threadId); + if (existing) { + yield* Fiber.interrupt(existing); + } + + // Fork a new debounced fiber + const fiber = yield* Effect.forkScoped( + Effect.gen(function* () { + yield* Effect.sleep(Duration.millis(DEBOUNCE_MS)); + // Remove self from pending map + yield* Ref.update(pendingFibers, (m) => { + const next = new Map(m); + next.delete(threadId); + return next; + }); + yield* summarizeThread(threadId); + yield* maybeRunExtraction; + }), + ); + + yield* Ref.update(pendingFibers, (m) => { + const next = new Map(m); + next.set(threadId, fiber); + return next; + }); + }); + + // Subscribe to receipt bus and process quiesced events + yield* Effect.forkScoped( + Stream.runForEach(receiptBus.stream, (receipt) => { + if (receipt.type !== "turn.processing.quiesced") { + return Effect.void; + } + return handleQuiesced(receipt.threadId); + }), + ); + + yield* Effect.logInfo("Memory reactor started"); + }); + + return { start } satisfies MemoryReactorShape; +}); + +export const MemoryReactorLive = Layer.effect(MemoryReactor, makeMemoryReactor); diff --git a/apps/server/src/memory/Services/MemoryExtraction.ts b/apps/server/src/memory/Services/MemoryExtraction.ts new file mode 100644 index 0000000000..5ef66b6533 --- /dev/null +++ b/apps/server/src/memory/Services/MemoryExtraction.ts @@ -0,0 +1,44 @@ +/** + * MemoryExtraction - Service interface for batch memory extraction from threads. + * + * Gathers recent conversation threads, runs LLM summarization, and stores + * extracted memories. + * + * @module MemoryExtraction + */ +import type { MemoryExtractInput } from "@t3tools/contracts"; +import { Schema, ServiceMap } from "effect"; +import type { Effect } from "effect"; + +export class MemoryExtractionError extends Schema.TaggedErrorClass()( + "MemoryExtractionError", + { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `Memory extraction failed in ${this.operation}: ${this.detail}`; + } +} + +export interface MemoryExtractionShape { + /** + * Extract memories from threads updated since `sinceDate`. + * + * Runs two extraction passes: + * 1. Per-project extraction → project-scoped memories + * 2. Global daily summary → daily-scoped memories + */ + readonly extract: ( + input: MemoryExtractInput, + ) => Effect.Effect< + { extractedCount: number; skippedDuplicates: number; projectsProcessed: number }, + MemoryExtractionError + >; +} + +export class MemoryExtraction extends ServiceMap.Service()( + "t3/memory/Services/MemoryExtraction", +) {} diff --git a/apps/server/src/memory/Services/MemoryReactor.ts b/apps/server/src/memory/Services/MemoryReactor.ts new file mode 100644 index 0000000000..088659626c --- /dev/null +++ b/apps/server/src/memory/Services/MemoryReactor.ts @@ -0,0 +1,26 @@ +/** + * MemoryReactor - Autonomous memory extraction reactor service interface. + * + * Reacts to turn completion events and automatically generates thread + * summaries and triggers periodic project/daily extraction. + * + * @module MemoryReactor + */ +import { ServiceMap } from "effect"; +import type { Effect, Scope } from "effect"; + +export interface MemoryReactorShape { + /** + * Start the memory reactor. + * + * Subscribes to `turn.processing.quiesced` receipts and auto-generates + * thread summaries. Periodically triggers project + daily extraction. + * + * Must be run in a scope for fiber cleanup on shutdown. + */ + readonly start: Effect.Effect; +} + +export class MemoryReactor extends ServiceMap.Service()( + "t3/memory/Services/MemoryReactor", +) {} diff --git a/apps/server/src/memory/prompts.ts b/apps/server/src/memory/prompts.ts new file mode 100644 index 0000000000..ed65b8de4a --- /dev/null +++ b/apps/server/src/memory/prompts.ts @@ -0,0 +1,222 @@ +/** + * Prompt templates and JSON schemas for memory extraction LLM calls. + * + * @module memory/prompts + */ + +// ── JSON Schemas for structured output ───────────────────────────── + +export const PROJECT_EXTRACTION_SCHEMA = { + type: "object", + properties: { + memories: { + type: "array", + items: { + type: "object", + properties: { + title: { type: "string", description: "Concise title (under 80 chars)" }, + content: { type: "string", description: "Specific, actionable detail (1-3 sentences)" }, + category: { + type: "string", + enum: ["preference", "pattern", "decision", "fact", "convention"], + }, + }, + required: ["title", "content", "category"], + }, + }, + }, + required: ["memories"], +} as const; + +export const DAILY_SUMMARY_SCHEMA = { + type: "object", + properties: { + entries: { + type: "array", + items: { + type: "object", + properties: { + title: { type: "string", description: "Daily summary title" }, + content: { type: "string", description: "2-5 bullet points of what was accomplished" }, + /** Omit for the global summary entry. */ + projectTitle: { type: "string", description: "Project name this entry belongs to" }, + }, + required: ["title", "content"], + }, + }, + }, + required: ["entries"], +} as const; + +// ── System prompts ───────────────────────────────────────────────── + +export const PROJECT_EXTRACTION_SYSTEM_PROMPT = `You extract actionable development knowledge from conversations. These memories will be injected into AI agent context in future sessions, so quality matters more than quantity. + +Categories: +- "decision": An explicit choice between alternatives WITH rationale. E.g. "Chose SQLite over Postgres for local persistence — no external deps needed for desktop app" +- "convention": A specific rule that must be followed. E.g. "Prefix projection tables with projection_", "Use Effect.catchCause not Effect.catchAll in this Effect version" +- "pattern": A non-obvious implementation approach. E.g. "Use for...of with yield* in Effect.gen, not Effect.forEach for loops with side effects" +- "preference": A team/personal preference about workflow. E.g. "Prefer single bundled PRs for refactors over many small ones" +- "fact": A non-obvious fact that would cause bugs if unknown. E.g. "SQLite FTS5 requires trigger-based sync for content tables" + +Quality bar — ONLY extract knowledge that: +1. Would CHANGE how an agent works on this project (not just describe what exists) +2. Is NOT derivable by reading the code (the agent can already do that) +3. Is ACTIONABLE — tells you what to do or avoid, not just what something is +4. Is NON-OBVIOUS — an experienced developer wouldn't already know this + +BAD examples (do NOT extract these): +- "The project uses strict TypeScript" (obvious from tsconfig) +- "The API is minimal" (agent can see the code) +- "Architecture is documented in CLAUDE.md" (agent already reads CLAUDE.md) +- Descriptions of what code does (the code itself is the source of truth) + +GOOD examples: +- "Module Federation names cannot contain hyphens — rspack treats them as JS variables" +- "Effect.catchAll does not exist in this Effect version — use Effect.catch instead" +- "Playwright E2E tests must use the isolated Hono server pattern in examples/*/e2e.test.ts" + +Rules: +- Maximum 5 memories per extraction — prefer fewer high-quality over many low-quality +- Each memory must be self-contained with enough context to act on +- Title: imperative phrase (under 80 chars) +- Content: 1-3 sentences of specific, actionable detail +- Return empty array if no knowledge meets the quality bar +- Do NOT produce near-duplicate entries`; + +export const DAILY_SUMMARY_SYSTEM_PROMPT = `You produce concise daily work summaries for a software developer. +Your output will be captured as structured JSON automatically. + +Rules: +- Produce one entry per project that had activity (set projectTitle to the project name) +- Also produce one overall entry summarizing ALL projects (omit projectTitle for this one) +- Title format for project entries: "{date} - {project}: {brief summary}" +- Title format for overall entry: "{date} - Overall: {brief summary}" +- Content: 2-5 bullet points of what was accomplished (use "- " prefix for bullets) +- Be specific — mention features, files, components, or bug fixes worked on +- Focus on outcomes (what was done), not process (how it was done) +- If a project had minimal activity, keep it to 1-2 bullets`; + +// ── Thread Summary ──────────────────────────────────────────────── + +export const THREAD_SUMMARY_SCHEMA = { + type: "object", + properties: { + title: { + type: "string", + description: "Concise outcome summary (under 80 chars)", + }, + content: { + type: "string", + description: "Outcome-focused summary, 3-5 sentences", + }, + }, + required: ["title", "content"], +} as const; + +export const THREAD_SUMMARY_SYSTEM_PROMPT = `You summarize software development conversation threads into concise outcome-focused summaries. +Your output will be captured as structured JSON automatically. + +Rules: +- Focus on OUTCOMES: what was accomplished, what changed, what was decided +- Mention specific files, components, features, or bugs that were worked on +- Note the final status: completed, in progress, blocked, or abandoned +- Include key decisions made during the conversation +- 3-5 sentences for the content field +- Title: concise outcome phrase (under 80 chars), e.g. "Added memory extraction with Claude Haiku" +- Do NOT describe the conversation process — describe the RESULT`; + +export function buildThreadSummaryPrompt( + threadTitle: string, + messages: readonly { role: string; text: string }[], + checkpointFiles?: readonly { path: string; kind: string }[], +): string { + const parts: string[] = [ + `Thread: ${threadTitle}\n\nSummarize the outcome of this conversation.\n`, + ]; + + if (checkpointFiles && checkpointFiles.length > 0) { + parts.push(`\nFiles changed: ${checkpointFiles.map((f) => f.path).join(", ")}\n`); + } + + const formatted = messages.map((m) => `[${m.role}]: ${m.text}`).join("\n\n"); + // Cap at 30k chars to leave room for system prompt + const truncated = + formatted.length > 30_000 ? formatted.slice(0, 30_000) + "\n\n[... truncated ...]" : formatted; + + parts.push(`\n${truncated}`); + return parts.join(""); +} + +// ── Prompt builders ──────────────────────────────────────────────── + +import type { ThreadTranscript } from "./threadTranscript.ts"; + +const MAX_PROMPT_CHARS = 40_000; + +function formatMessages(messages: readonly { role: string; text: string }[]): string { + return messages.map((m) => `[${m.role}]: ${m.text}`).join("\n\n"); +} + +function truncateToLimit(text: string, limit: number): string { + if (text.length <= limit) return text; + return text.slice(0, limit) + "\n\n[... truncated ...]"; +} + +export function buildProjectExtractionPrompt( + projectTitle: string, + threads: readonly ThreadTranscript[], +): string { + const parts: string[] = [ + `Project: ${projectTitle}\n\nBelow are recent conversation threads. Extract durable project knowledge.\n`, + ]; + + let totalChars = parts[0]!.length; + + for (const thread of threads) { + const formatted = formatMessages(thread.messages); + const block = `\n--- THREAD: ${thread.threadTitle} ---\n${formatted}\n--- END THREAD ---\n`; + + if (totalChars + block.length > MAX_PROMPT_CHARS) { + const remaining = MAX_PROMPT_CHARS - totalChars; + if (remaining > 200) { + parts.push(truncateToLimit(block, remaining)); + } + break; + } + + parts.push(block); + totalChars += block.length; + } + + return parts.join(""); +} + +export function buildDailySummaryPrompt( + projectSummaries: readonly { projectTitle: string; threadTitles: string[] }[], + date: string, + existingDailySummaries?: readonly { title: string; content: string }[], +): string { + const parts: string[] = [`Date: ${date}\n\nProjects with activity today:\n`]; + + for (const project of projectSummaries) { + parts.push(`\n--- Project: ${project.projectTitle} ---`); + parts.push(`Thread topics: ${project.threadTitles.join(", ")}`); + parts.push(`--- End Project ---\n`); + } + + if (existingDailySummaries && existingDailySummaries.length > 0) { + parts.push(`\n--- Earlier summaries from today (incorporate and build on these) ---`); + for (const existing of existingDailySummaries) { + parts.push(`\nTitle: ${existing.title}`); + parts.push(`Content: ${existing.content}`); + } + parts.push(`--- End earlier summaries ---\n`); + } + + parts.push( + `\nProduce a COMPLETE daily summary entry for each project listed above, plus one overall daily entry summarizing all projects. Incorporate any earlier summaries from today — combine them with the new activity into a single comprehensive summary per project.`, + ); + + return parts.join("\n"); +} diff --git a/apps/server/src/memory/threadTranscript.ts b/apps/server/src/memory/threadTranscript.ts new file mode 100644 index 0000000000..6dd70f6bab --- /dev/null +++ b/apps/server/src/memory/threadTranscript.ts @@ -0,0 +1,19 @@ +/** + * Shared helper for converting orchestration threads into prompt-ready transcripts. + * + * @module memory/threadTranscript + */ +import type { OrchestrationMessage, OrchestrationThread } from "@t3tools/contracts"; + +export interface ThreadTranscript { + threadTitle: string; + messages: readonly { role: string; text: string }[]; +} + +/** Convert an orchestration thread into a prompt-ready transcript. */ +export function threadToTranscript(thread: OrchestrationThread): ThreadTranscript { + const messages = thread.messages + .filter((m: OrchestrationMessage) => !m.streaming && m.text.trim().length > 0) + .map((m: OrchestrationMessage) => ({ role: m.role, text: m.text })); + return { threadTitle: thread.title, messages }; +} diff --git a/apps/server/src/open.ts b/apps/server/src/open.ts index e7238c04b2..89ccd2d717 100644 --- a/apps/server/src/open.ts +++ b/apps/server/src/open.ts @@ -7,10 +7,16 @@ * @module Open */ import { spawn } from "node:child_process"; -import { accessSync, constants, statSync } from "node:fs"; +import { accessSync, constants, existsSync, statSync } from "node:fs"; +import { homedir } from "node:os"; import { extname, join } from "node:path"; -import { EDITORS, type EditorId } from "@t3tools/contracts"; +import { + EDITORS, + type EditorId, + type OpenInTerminalInput, + type TerminalId, +} from "@t3tools/contracts"; import { ServiceMap, Schema, Effect, Layer } from "effect"; // ============================== @@ -177,6 +183,32 @@ export function resolveAvailableEditors( return available; } +function isAppInstalled(appName: string): boolean { + return ( + existsSync(`/Applications/${appName}.app`) || + existsSync(`${homedir()}/Applications/${appName}.app`) + ); +} + +const TERMINAL_DETECTION: ReadonlyArray<{ id: TerminalId; check: () => boolean }> = [ + { id: "terminal-app", check: () => process.platform === "darwin" }, + { id: "iterm2", check: () => isAppInstalled("iTerm") }, + { id: "warp", check: () => isAppInstalled("Warp") }, + { id: "ghostty", check: () => isAppInstalled("Ghostty") }, + { id: "kitty", check: () => isCommandAvailable("kitty") }, + { id: "alacritty", check: () => isCommandAvailable("alacritty") }, +]; + +export function resolveAvailableTerminals(): ReadonlyArray { + const available: TerminalId[] = []; + for (const entry of TERMINAL_DETECTION) { + if (entry.check()) { + available.push(entry.id); + } + } + return available; +} + /** * OpenShape - Service API for browser and editor launch actions. */ @@ -192,6 +224,11 @@ export interface OpenShape { * Launches the editor as a detached process so server startup is not blocked. */ readonly openInEditor: (input: OpenInEditorInput) => Effect.Effect; + + /** + * Open a terminal emulator, optionally resuming a Claude Code session. + */ + readonly openInWarp: (input: OpenInTerminalInput) => Effect.Effect; } /** @@ -257,6 +294,102 @@ export const launchDetached = (launch: EditorLaunch) => }); }); +function escapeAppleScript(value: string): string { + return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"'); +} + +const resolveTerminalLaunch = ( + terminal: TerminalId, + script: string, + cwd: string, +): Effect.Effect => { + const shell = process.env.SHELL ?? "/bin/zsh"; + + switch (terminal) { + case "terminal-app": + return launchDetached({ + command: "osascript", + args: [ + "-e", + `tell application "Terminal"`, + "-e", + `activate`, + "-e", + `do script "${escapeAppleScript(script)}"`, + "-e", + `end tell`, + ], + }); + + case "iterm2": + return launchDetached({ + command: "osascript", + args: [ + "-e", + `tell application "iTerm2"`, + "-e", + `create window with default profile command "${escapeAppleScript(script)}"`, + "-e", + `end tell`, + ], + }); + + case "warp": + return launchDetached({ + command: "osascript", + args: [ + "-e", + `tell application "Warp" to activate`, + "-e", + `tell application "System Events" to tell process "Warp"`, + "-e", + `keystroke "t" using command down`, + "-e", + `delay 0.5`, + "-e", + `keystroke "${escapeAppleScript(script)}"`, + "-e", + `key code 36`, + "-e", + `end tell`, + ], + }); + + case "ghostty": + return launchDetached({ + command: "osascript", + args: [ + "-e", + `tell application "Ghostty" to activate`, + "-e", + `tell application "System Events" to tell process "Ghostty"`, + "-e", + `keystroke "t" using command down`, + "-e", + `delay 0.5`, + "-e", + `keystroke "${escapeAppleScript(script)}"`, + "-e", + `key code 36`, + "-e", + `end tell`, + ], + }); + + case "kitty": + return launchDetached({ + command: "kitty", + args: ["--directory", cwd, shell, "-c", script], + }); + + case "alacritty": + return launchDetached({ + command: "alacritty", + args: ["--working-directory", cwd, "-e", shell, "-c", script], + }); + } +}; + const make = Effect.gen(function* () { const open = yield* Effect.tryPromise({ try: () => import("open"), @@ -270,6 +403,15 @@ const make = Effect.gen(function* () { catch: (cause) => new OpenError({ message: "Browser auto-open failed", cause }), }), openInEditor: (input) => Effect.flatMap(resolveEditorLaunch(input), launchDetached), + openInWarp: (input) => + Effect.gen(function* () { + const terminal: TerminalId = input.terminal ?? "terminal-app"; + const claudeArgs = input.sessionId ? ` --resume ${input.sessionId}` : ""; + const escapedCwd = input.cwd.replace(/\\/g, "\\\\").replace(/"/g, '\\"'); + const script = `cd "${escapedCwd}" && claude${claudeArgs}`; + + yield* resolveTerminalLaunch(terminal, script, input.cwd); + }), } satisfies OpenShape; }); diff --git a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts index 09773b71dc..1784970a01 100644 --- a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts +++ b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts @@ -44,7 +44,7 @@ const asTurnId = (value: string): TurnId => TurnId.makeUnsafe(value); type LegacyProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: "codex"; + readonly provider: "codex" | "claudeCode" | "cursor"; readonly createdAt: string; readonly threadId: ThreadId; readonly turnId?: string | undefined; @@ -92,6 +92,7 @@ function createProviderServiceHarness( listSessions, getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), rollbackConversation, + stopAll: () => unsupported(), streamEvents: Stream.fromPubSub(runtimeEventPubSub), }; @@ -235,6 +236,7 @@ describe("CheckpointReactor", () => { readonly projectWorkspaceRoot?: string; readonly threadWorktreePath?: string | null; readonly providerSessionCwd?: string; + readonly providerName?: "codex" | "claudeCode"; }) { const cwd = createGitRepository(); tempDirs.push(cwd); @@ -242,7 +244,7 @@ describe("CheckpointReactor", () => { cwd, options?.hasSession ?? true, options?.providerSessionCwd ?? cwd, - "codex", + options?.providerName ?? "codex", ); const orchestrationLayer = OrchestrationEngineLive.pipe( Layer.provide(OrchestrationProjectionPipelineLive), @@ -340,6 +342,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -416,6 +419,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: asTurnId("turn-main"), lastError: null, @@ -477,6 +481,68 @@ describe("CheckpointReactor", () => { expect(thread.checkpoints[0]?.checkpointTurnCount).toBe(1); }); + it("captures pre-turn and completion checkpoints for claudeCode runtime events", async () => { + const harness = await createHarness({ + seedFilesystemCheckpoints: false, + providerName: "claudeCode", + }); + const createdAt = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.session.set", + commandId: CommandId.makeUnsafe("cmd-session-set-capture-claude"), + threadId: ThreadId.makeUnsafe("thread-1"), + session: { + threadId: ThreadId.makeUnsafe("thread-1"), + status: "ready", + providerName: "claudeCode", + providerSessionId: null, + runtimeMode: "approval-required", + activeTurnId: null, + lastError: null, + updatedAt: createdAt, + }, + createdAt, + }), + ); + + harness.provider.emit({ + type: "turn.started", + eventId: EventId.makeUnsafe("evt-turn-started-claude-1"), + provider: "claudeCode", + createdAt: new Date().toISOString(), + threadId: ThreadId.makeUnsafe("thread-1"), + turnId: asTurnId("turn-claude-1"), + }); + await waitForGitRefExists( + harness.cwd, + checkpointRefForThreadTurn(ThreadId.makeUnsafe("thread-1"), 0), + ); + + fs.writeFileSync(path.join(harness.cwd, "README.md"), "v2\n", "utf8"); + harness.provider.emit({ + type: "turn.completed", + eventId: EventId.makeUnsafe("evt-turn-completed-claude-1"), + provider: "claudeCode", + createdAt: new Date().toISOString(), + threadId: ThreadId.makeUnsafe("thread-1"), + turnId: asTurnId("turn-claude-1"), + payload: { state: "completed" }, + }); + + await waitForEvent(harness.engine, (event) => event.type === "thread.turn-diff-completed"); + const thread = await waitForThread( + harness.engine, + (entry) => entry.latestTurn?.turnId === "turn-claude-1" && entry.checkpoints.length === 1, + ); + + expect(thread.checkpoints[0]?.checkpointTurnCount).toBe(1); + expect( + gitRefExists(harness.cwd, checkpointRefForThreadTurn(ThreadId.makeUnsafe("thread-1"), 1)), + ).toBe(true); + }); + it("appends capture failure activity when turn diff summary cannot be derived", async () => { const harness = await createHarness({ seedFilesystemCheckpoints: false }); const createdAt = new Date().toISOString(); @@ -490,6 +556,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -578,6 +645,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: asTurnId("turn-missing-cwd"), lastError: null, @@ -625,6 +693,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -675,6 +744,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -727,6 +797,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -792,6 +863,76 @@ describe("CheckpointReactor", () => { ).toBe(false); }); + it("executes provider revert and emits thread.reverted for claudeCode sessions", async () => { + const harness = await createHarness({ providerName: "claudeCode" }); + const createdAt = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.session.set", + commandId: CommandId.makeUnsafe("cmd-session-set-claude"), + threadId: ThreadId.makeUnsafe("thread-1"), + session: { + threadId: ThreadId.makeUnsafe("thread-1"), + status: "ready", + providerName: "claudeCode", + providerSessionId: null, + runtimeMode: "approval-required", + activeTurnId: null, + lastError: null, + updatedAt: createdAt, + }, + createdAt, + }), + ); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.diff.complete", + commandId: CommandId.makeUnsafe("cmd-diff-claude-1"), + threadId: ThreadId.makeUnsafe("thread-1"), + turnId: asTurnId("turn-claude-1"), + completedAt: createdAt, + checkpointRef: checkpointRefForThreadTurn(ThreadId.makeUnsafe("thread-1"), 1), + status: "ready", + files: [], + checkpointTurnCount: 1, + createdAt, + }), + ); + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.diff.complete", + commandId: CommandId.makeUnsafe("cmd-diff-claude-2"), + threadId: ThreadId.makeUnsafe("thread-1"), + turnId: asTurnId("turn-claude-2"), + completedAt: createdAt, + checkpointRef: checkpointRefForThreadTurn(ThreadId.makeUnsafe("thread-1"), 2), + status: "ready", + files: [], + checkpointTurnCount: 2, + createdAt, + }), + ); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.checkpoint.revert", + commandId: CommandId.makeUnsafe("cmd-revert-request-claude"), + threadId: ThreadId.makeUnsafe("thread-1"), + turnCount: 1, + createdAt, + }), + ); + + await waitForEvent(harness.engine, (event) => event.type === "thread.reverted"); + expect(harness.provider.rollbackConversation).toHaveBeenCalledTimes(1); + expect(harness.provider.rollbackConversation).toHaveBeenCalledWith({ + threadId: ThreadId.makeUnsafe("thread-1"), + numTurns: 1, + }); + }); + it("processes consecutive revert requests with deterministic rollback sequencing", async () => { const harness = await createHarness(); const createdAt = new Date().toISOString(); @@ -805,6 +946,7 @@ describe("CheckpointReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, diff --git a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts index d15b2efa2e..b7b9223e5a 100644 --- a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts @@ -1778,6 +1778,7 @@ it.effect("restores pending turn-start metadata across projection pipeline resta threadId, status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: turnId, lastError: null, diff --git a/apps/server/src/orchestration/Layers/ProjectionPipeline.ts b/apps/server/src/orchestration/Layers/ProjectionPipeline.ts index 6ae94105a6..f2a5809a7d 100644 --- a/apps/server/src/orchestration/Layers/ProjectionPipeline.ts +++ b/apps/server/src/orchestration/Layers/ProjectionPipeline.ts @@ -2,12 +2,15 @@ import { ApprovalRequestId, type ChatAttachment, type OrchestrationEvent, + type ThreadId, } from "@t3tools/contracts"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { Effect, FileSystem, Layer, Option, Path, Stream } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; import { toPersistenceSqlError, type ProjectionRepositoryError } from "../../persistence/Errors.ts"; +import { ReviewCommentRepository } from "../../persistence/Services/ReviewCommentRepository.ts"; +import { ReviewCommentRepositoryLive } from "../../persistence/Layers/ReviewCommentRepository.ts"; import { OrchestrationEventStore } from "../../persistence/Services/OrchestrationEventStore.ts"; import { ProjectionPendingApprovalRepository } from "../../persistence/Services/ProjectionPendingApprovals.ts"; import { ProjectionProjectRepository } from "../../persistence/Services/ProjectionProjects.ts"; @@ -349,6 +352,7 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { const projectionThreadSessionRepository = yield* ProjectionThreadSessionRepository; const projectionTurnRepository = yield* ProjectionTurnRepository; const projectionPendingApprovalRepository = yield* ProjectionPendingApprovalRepository; + const reviewCommentRepository = yield* ReviewCommentRepository; const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; @@ -425,6 +429,9 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { interactionMode: event.payload.interactionMode, branch: event.payload.branch, worktreePath: event.payload.worktreePath, + jiraTicketJson: event.payload.linkedJiraTicket + ? JSON.stringify(event.payload.linkedJiraTicket) + : null, latestTurnId: null, createdAt: event.payload.createdAt, updatedAt: event.payload.updatedAt, @@ -447,6 +454,13 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { ...(event.payload.worktreePath !== undefined ? { worktreePath: event.payload.worktreePath } : {}), + ...(event.payload.linkedJiraTicket !== undefined + ? { + jiraTicketJson: event.payload.linkedJiraTicket + ? JSON.stringify(event.payload.linkedJiraTicket) + : null, + } + : {}), updatedAt: event.payload.updatedAt, }); return; @@ -710,6 +724,8 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { ...(event.payload.activity.sequence !== undefined ? { sequence: event.payload.activity.sequence } : {}), + parentToolUseId: event.payload.activity.parentToolUseId ?? null, + itemId: event.payload.activity.itemId ?? null, createdAt: event.payload.activity.createdAt, }); return; @@ -758,6 +774,7 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { threadId: event.payload.threadId, status: event.payload.session.status, providerName: event.payload.session.providerName, + providerSessionId: event.payload.session.providerSessionId, runtimeMode: event.payload.session.runtimeMode, activeTurnId: event.payload.session.activeTurnId, lastError: event.payload.session.lastError, @@ -1159,6 +1176,21 @@ const makeOrchestrationProjectionPipeline = Effect.gen(function* () { }), ), ); + + // Clean up review comments for deleted threads. + yield* Effect.forEach( + attachmentSideEffects.deletedThreadIds, + (threadId) => + reviewCommentRepository.deleteByThreadId({ threadId: threadId as ThreadId }).pipe( + Effect.catch((cause) => + Effect.logWarning("failed to clean up review comments for deleted thread", { + threadId, + cause, + }), + ), + ), + { concurrency: 1 }, + ); }); const bootstrapProjector = (projector: ProjectorDefinition) => @@ -1229,4 +1261,5 @@ export const OrchestrationProjectionPipelineLive = Layer.effect( Layer.provideMerge(ProjectionTurnRepositoryLive), Layer.provideMerge(ProjectionPendingApprovalRepositoryLive), Layer.provideMerge(ProjectionStateRepositoryLive), + Layer.provideMerge(ReviewCommentRepositoryLive), ); diff --git a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts index fc7db54802..603c3edb56 100644 --- a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts @@ -280,11 +280,13 @@ projectionSnapshotLayer("ProjectionSnapshotQuery", (it) => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: asTurnId("turn-1"), lastError: null, updatedAt: "2026-02-24T00:00:07.000Z", }, + linkedJiraTicket: null, }, ]); }), diff --git a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts index 5fd38a5401..661b6123e1 100644 --- a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts +++ b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts @@ -161,6 +161,7 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { interaction_mode AS "interactionMode", branch, worktree_path AS "worktreePath", + jira_ticket_json AS "jiraTicketJson", latest_turn_id AS "latestTurnId", created_at AS "createdAt", updated_at AS "updatedAt", @@ -221,6 +222,8 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { summary, payload_json AS "payload", sequence, + parent_tool_use_id AS "parentToolUseId", + item_id AS "itemId", created_at AS "createdAt" FROM projection_thread_activities ORDER BY @@ -452,6 +455,8 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { payload: row.payload, turnId: row.turnId, ...(row.sequence !== null ? { sequence: row.sequence } : {}), + ...(row.parentToolUseId ? { parentToolUseId: row.parentToolUseId } : {}), + ...(row.itemId ? { itemId: row.itemId } : {}), createdAt: row.createdAt, }); activitiesByThread.set(row.threadId, threadActivities); @@ -506,6 +511,7 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { threadId: row.threadId, status: row.status, providerName: row.providerName, + providerSessionId: row.providerSessionId, runtimeMode: row.runtimeMode, activeTurnId: row.activeTurnId, lastError: row.lastError, @@ -524,25 +530,36 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { deletedAt: row.deletedAt, })); - const threads: Array = threadRows.map((row) => ({ - id: row.threadId, - projectId: row.projectId, - title: row.title, - model: row.model, - runtimeMode: row.runtimeMode, - interactionMode: row.interactionMode, - branch: row.branch, - worktreePath: row.worktreePath, - latestTurn: latestTurnByThread.get(row.threadId) ?? null, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - deletedAt: row.deletedAt, - messages: messagesByThread.get(row.threadId) ?? [], - proposedPlans: proposedPlansByThread.get(row.threadId) ?? [], - activities: activitiesByThread.get(row.threadId) ?? [], - checkpoints: checkpointsByThread.get(row.threadId) ?? [], - session: sessionsByThread.get(row.threadId) ?? null, - })); + const threads: Array = threadRows.map((row) => { + let linkedJiraTicket: OrchestrationThread["linkedJiraTicket"] = null; + if (row.jiraTicketJson) { + try { + linkedJiraTicket = JSON.parse(row.jiraTicketJson); + } catch { + linkedJiraTicket = null; + } + } + return { + id: row.threadId, + projectId: row.projectId, + title: row.title, + model: row.model, + runtimeMode: row.runtimeMode, + interactionMode: row.interactionMode, + branch: row.branch, + worktreePath: row.worktreePath, + latestTurn: latestTurnByThread.get(row.threadId) ?? null, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + deletedAt: row.deletedAt, + messages: messagesByThread.get(row.threadId) ?? [], + proposedPlans: proposedPlansByThread.get(row.threadId) ?? [], + activities: activitiesByThread.get(row.threadId) ?? [], + checkpoints: checkpointsByThread.get(row.threadId) ?? [], + session: sessionsByThread.get(row.threadId) ?? null, + linkedJiraTicket, + }; + }); const snapshot = { snapshotSequence: computeSnapshotSequence(stateRows), diff --git a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts index 8de44d78f9..4fce3cc5ab 100644 --- a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts @@ -96,7 +96,9 @@ describe("ProviderCommandReactor", () => { typeof input === "object" && input !== null && "provider" in input && - input.provider === "codex" + (input.provider === "codex" || + input.provider === "claudeCode" || + input.provider === "cursor") ? input.provider : "codex"; const resumeCursor = @@ -191,9 +193,10 @@ describe("ProviderCommandReactor", () => { listSessions: () => Effect.succeed(runtimeSessions), getCapabilities: (provider) => Effect.succeed({ - sessionModelSwitch: provider === "codex" ? "in-session" : "in-session", + sessionModelSwitch: provider === "cursor" ? "unsupported" : "in-session", }), rollbackConversation: () => unsupported(), + stopAll: () => unsupported(), streamEvents: Stream.fromPubSub(runtimeEventPubSub), }; @@ -389,6 +392,80 @@ describe("ProviderCommandReactor", () => { }); }); + it("starts first turn with requested provider when provider is specified", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-provider-first"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-provider-first"), + role: "user", + text: "hello claude", + attachments: [], + }, + provider: "claudeCode", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ + provider: "claudeCode", + cwd: "/tmp/provider-project", + model: "gpt-5-codex", + runtimeMode: "approval-required", + }); + + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.makeUnsafe("thread-1")); + expect(thread?.session?.providerName).toBe("claudeCode"); + expect(thread?.session?.threadId).toBe("thread-1"); + }); + + it("starts first turn with cursor provider when provider is specified", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-provider-cursor"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-provider-cursor"), + role: "user", + text: "hello cursor", + attachments: [], + }, + provider: "cursor", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ + provider: "cursor", + cwd: "/tmp/provider-project", + model: "gpt-5-codex", + runtimeMode: "approval-required", + }); + + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.makeUnsafe("thread-1")); + expect(thread?.session?.providerName).toBe("cursor"); + expect(thread?.session?.threadId).toBe("thread-1"); + }); + it("reuses the same provider session when runtime mode is unchanged", async () => { const harness = await createHarness(); const now = new Date().toISOString(); @@ -435,6 +512,121 @@ describe("ProviderCommandReactor", () => { expect(harness.stopSession.mock.calls.length).toBe(0); }); + it("reuses the same cursor session when requested model is unchanged", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.runtime-mode.set", + commandId: CommandId.makeUnsafe("cmd-runtime-mode-set-initial-full-access"), + threadId: ThreadId.makeUnsafe("thread-1"), + runtimeMode: "full-access", + createdAt: now, + }), + ); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-cursor-model-same-1"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-cursor-model-same-1"), + role: "user", + text: "first", + attachments: [], + }, + provider: "cursor", + model: "composer-1.5", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-cursor-model-same-2"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-cursor-model-same-2"), + role: "user", + text: "second", + attachments: [], + }, + provider: "cursor", + model: "composer-1.5", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.sendTurn.mock.calls.length === 2); + expect(harness.startSession.mock.calls.length).toBe(1); + expect(harness.stopSession.mock.calls.length).toBe(0); + }); + + it("keeps cursor session/model when model change is unsupported", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-cursor-model-change-1"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-cursor-model-change-1"), + role: "user", + text: "first", + attachments: [], + }, + provider: "cursor", + model: "gpt-5.3-codex", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-cursor-model-change-2"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-cursor-model-change-2"), + role: "user", + text: "second", + attachments: [], + }, + provider: "cursor", + model: "composer-1.5", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.sendTurn.mock.calls.length === 2); + + expect(harness.stopSession.mock.calls.length).toBe(0); + expect(harness.startSession.mock.calls.length).toBe(1); + expect(harness.sendTurn.mock.calls[1]?.[0]).toMatchObject({ + threadId: ThreadId.makeUnsafe("thread-1"), + model: "gpt-5.3-codex", + }); + }); + it("restarts the provider session when runtime mode is updated on the thread", async () => { const harness = await createHarness(); const now = new Date().toISOString(); @@ -522,6 +714,66 @@ describe("ProviderCommandReactor", () => { expect(thread?.session?.runtimeMode).toBe("approval-required"); }); + it("switches provider by restarting the session when turn request provider changes", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-provider-switch-1"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-provider-switch-1"), + role: "user", + text: "first", + attachments: [], + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.makeUnsafe("cmd-turn-start-provider-switch-2"), + threadId: ThreadId.makeUnsafe("thread-1"), + message: { + messageId: asMessageId("user-message-provider-switch-2"), + role: "user", + text: "second", + attachments: [], + }, + provider: "claudeCode", + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 2); + await waitFor(() => harness.sendTurn.mock.calls.length === 2); + + expect(harness.stopSession.mock.calls.length).toBe(0); + expect(harness.startSession.mock.calls[1]?.[1]).toMatchObject({ + threadId: ThreadId.makeUnsafe("thread-1"), + provider: "claudeCode", + runtimeMode: "approval-required", + }); + expect(harness.startSession.mock.calls[1]?.[1]).not.toHaveProperty("resumeCursor"); + + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.makeUnsafe("thread-1")); + expect(thread?.session?.threadId).toBe("thread-1"); + expect(thread?.session?.providerName).toBe("claudeCode"); + expect(thread?.session?.runtimeMode).toBe("approval-required"); + }); + it("does not stop the active session when restart fails before rebind", async () => { const harness = await createHarness(); const now = new Date().toISOString(); @@ -602,6 +854,7 @@ describe("ProviderCommandReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: asTurnId("turn-1"), lastError: null, @@ -640,6 +893,7 @@ describe("ProviderCommandReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -681,6 +935,7 @@ describe("ProviderCommandReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -719,7 +974,7 @@ describe("ProviderCommandReactor", () => { harness.respondToRequest.mockImplementation(() => Effect.fail( new ProviderAdapterRequestError({ - provider: "codex", + provider: "cursor", method: "session/request_permission", detail: "Unknown pending permission request: approval-request-1", }), @@ -734,7 +989,8 @@ describe("ProviderCommandReactor", () => { session: { threadId: ThreadId.makeUnsafe("thread-1"), status: "running", - providerName: "codex", + providerName: "cursor", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -822,6 +1078,7 @@ describe("ProviderCommandReactor", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, lastError: null, diff --git a/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts b/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts index fe02188450..bce01d2d25 100644 --- a/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts +++ b/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts @@ -5,7 +5,7 @@ import { type OrchestrationEvent, type ProviderModelOptions, type ProviderKind, - type ProviderStartOptions, + ProviderStartOptions, type OrchestrationSession, ThreadId, type ProviderSession, @@ -135,7 +135,7 @@ const make = Effect.gen(function* () { ), ); - const threadProviderOptions = new Map(); + const threadProviderOptions = new Map(); const appendProviderFailureActivity = (input: { readonly threadId: ThreadId; @@ -195,7 +195,7 @@ const make = Effect.gen(function* () { readonly provider?: ProviderKind; readonly model?: string; readonly modelOptions?: ProviderModelOptions; - readonly providerOptions?: ProviderStartOptions; + readonly providerOptions?: typeof ProviderStartOptions.Type; }, ) { const readModel = yield* orchestrationEngine.getReadModel(); @@ -206,7 +206,11 @@ const make = Effect.gen(function* () { const desiredRuntimeMode = thread.runtimeMode; const currentProvider: ProviderKind | undefined = - thread.session?.providerName === "codex" ? thread.session.providerName : undefined; + thread.session?.providerName === "codex" || + thread.session?.providerName === "claudeCode" || + thread.session?.providerName === "cursor" + ? thread.session.providerName + : undefined; const preferredProvider: ProviderKind | undefined = options?.provider ?? currentProvider; const desiredModel = options?.model ?? thread.model; const effectiveCwd = resolveThreadWorkspaceCwd({ @@ -245,6 +249,7 @@ const make = Effect.gen(function* () { threadId, status: mapProviderSessionStatusToOrchestrationStatus(session.status), providerName: session.provider, + providerSessionId: thread.session?.providerSessionId ?? null, runtimeMode: desiredRuntimeMode, // Provider turn ids are not orchestration turn ids. activeTurnId: null, @@ -255,7 +260,9 @@ const make = Effect.gen(function* () { }); const existingSessionThreadId = - thread.session && thread.session.status !== "stopped" ? thread.id : null; + thread.session && thread.session.status !== "stopped" && thread.session.status !== "error" + ? thread.id + : null; if (existingSessionThreadId) { const runtimeModeChanged = thread.runtimeMode !== thread.session?.runtimeMode; const providerChanged = @@ -318,7 +325,7 @@ const make = Effect.gen(function* () { readonly provider?: ProviderKind; readonly model?: string; readonly modelOptions?: ProviderModelOptions; - readonly providerOptions?: ProviderStartOptions; + readonly providerOptions?: typeof ProviderStartOptions.Type; readonly interactionMode?: "default" | "plan"; readonly createdAt: string; }) { @@ -431,6 +438,7 @@ const make = Effect.gen(function* () { ) { const key = turnStartKeyForEvent(event); if (yield* hasHandledTurnStartRecently(key)) { + yield* Effect.logInfo("processTurnStartRequested: skipped (handled recently)"); return; } @@ -526,6 +534,7 @@ const make = Effect.gen(function* () { threadId: event.payload.threadId, requestId: event.payload.requestId, decision: event.payload.decision, + ...(event.payload.feedback ? { feedback: event.payload.feedback } : {}), }) .pipe( Effect.catchCause((cause) => @@ -606,6 +615,7 @@ const make = Effect.gen(function* () { threadId: thread.id, status: "stopped", providerName: thread.session?.providerName ?? null, + providerSessionId: null, runtimeMode: thread.session?.runtimeMode ?? DEFAULT_RUNTIME_MODE, activeTurnId: null, lastError: thread.session?.lastError ?? null, diff --git a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts index b6b48c7edf..ccbd336d26 100644 --- a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts @@ -1,7 +1,6 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; - import type { OrchestrationReadModel, ProviderRuntimeEvent } from "@t3tools/contracts"; import { ApprovalRequestId, @@ -45,7 +44,7 @@ const asTurnId = (value: string): TurnId => TurnId.makeUnsafe(value); type LegacyProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: "codex"; + readonly provider: "codex" | "claudeCode" | "cursor"; readonly createdAt: string; readonly threadId: ThreadId; readonly turnId?: string | undefined; @@ -69,6 +68,7 @@ function createProviderServiceHarness() { listSessions: () => Effect.succeed([]), getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), rollbackConversation: () => unsupported(), + stopAll: () => unsupported(), streamEvents: Stream.fromPubSub(runtimeEventPubSub), }; @@ -197,6 +197,7 @@ describe("ProviderRuntimeIngestion", () => { threadId: ThreadId.makeUnsafe("thread-1"), status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "approval-required", activeTurnId: null, updatedAt: createdAt, @@ -402,6 +403,61 @@ describe("ProviderRuntimeIngestion", () => { ); }); + it("accepts claude turn lifecycle when seeded thread id is a synthetic placeholder", async () => { + const harness = await createHarness(); + const seededAt = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.session.set", + commandId: CommandId.makeUnsafe("cmd-session-seed-claude-placeholder"), + threadId: ThreadId.makeUnsafe("thread-1"), + session: { + threadId: ThreadId.makeUnsafe("thread-1"), + status: "ready", + providerName: "claudeCode", + providerSessionId: null, + runtimeMode: "approval-required", + activeTurnId: null, + updatedAt: seededAt, + lastError: null, + }, + createdAt: seededAt, + }), + ); + + harness.emit({ + type: "turn.started", + eventId: asEventId("evt-turn-started-claude-placeholder"), + provider: "claudeCode", + createdAt: new Date().toISOString(), + threadId: asThreadId("thread-1"), + turnId: asTurnId("turn-claude-placeholder"), + }); + + await waitForThread( + harness.engine, + (thread) => + thread.session?.status === "running" && + thread.session?.activeTurnId === "turn-claude-placeholder", + ); + + harness.emit({ + type: "turn.completed", + eventId: asEventId("evt-turn-completed-claude-placeholder"), + provider: "claudeCode", + createdAt: new Date().toISOString(), + threadId: asThreadId("thread-1"), + turnId: asTurnId("turn-claude-placeholder"), + status: "completed", + }); + + await waitForThread( + harness.engine, + (thread) => thread.session?.status === "ready" && thread.session?.activeTurnId === null, + ); + }); + it("ignores auxiliary turn completions from a different provider thread", async () => { const harness = await createHarness(); const now = new Date().toISOString(); diff --git a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts index 417e93c8d4..ac877cc6a7 100644 --- a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts +++ b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts @@ -14,9 +14,9 @@ import { import { Cache, Cause, Duration, Effect, Layer, Option, Ref, Stream } from "effect"; import { makeDrainableWorker } from "@t3tools/shared/DrainableWorker"; -import { ProviderService } from "../../provider/Services/ProviderService.ts"; import { resolveThreadWorkspaceCwd } from "../../checkpointing/Utils.ts"; import { isGitRepository } from "../../git/isRepo.ts"; +import { ProviderService } from "../../provider/Services/ProviderService.ts"; import { OrchestrationEngineService } from "../Services/OrchestrationEngine.ts"; import { ProviderRuntimeIngestionService, @@ -34,6 +34,8 @@ const BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_CACHE_CAPACITY = 20_000; const BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_TTL = Duration.minutes(120); const BUFFERED_PROPOSED_PLAN_BY_ID_CACHE_CAPACITY = 10_000; const BUFFERED_PROPOSED_PLAN_BY_ID_TTL = Duration.minutes(120); +const ASSISTANT_SEGMENT_COUNTER_CACHE_CAPACITY = 10_000; +const ASSISTANT_SEGMENT_COUNTER_TTL = Duration.minutes(120); const MAX_BUFFERED_ASSISTANT_CHARS = 24_000; const STRICT_PROVIDER_LIFECYCLE_GUARD = process.env.T3CODE_STRICT_PROVIDER_LIFECYCLE_GUARD !== "0"; @@ -71,6 +73,38 @@ function truncateDetail(value: string, limit = 180): string { return value.length > limit ? `${value.slice(0, limit - 3)}...` : value; } +const MAX_ARGS_SIZE = 50_000; + +/** + * Truncate large string fields in approval `args` to prevent unbounded + * payloads flowing through the activity pipeline and to the web client. + */ +function truncateArgs(args: unknown): unknown { + if (args == null || typeof args !== "object") return args; + const serialized = JSON.stringify(args); + if (serialized.length <= MAX_ARGS_SIZE) return args; + // Deep-clone and truncate large string values + const clone = JSON.parse(serialized) as Record; + const truncateStrings = (obj: Record) => { + for (const key of Object.keys(obj)) { + const val = obj[key]; + if (typeof val === "string" && val.length > 10_000) { + obj[key] = val.slice(0, 10_000) + "\n... (truncated)"; + } else if (val != null && typeof val === "object" && !Array.isArray(val)) { + truncateStrings(val as Record); + } else if (Array.isArray(val)) { + for (const item of val) { + if (item != null && typeof item === "object") { + truncateStrings(item as Record); + } + } + } + } + }; + truncateStrings(clone); + return clone; +} + function normalizeProposedPlanMarkdown(planMarkdown: string | undefined): string | undefined { const trimmed = planMarkdown?.trim(); if (!trimmed) { @@ -174,6 +208,31 @@ function requestKindFromCanonicalRequestType( } } +function humanReadableItemType(itemType: string): string { + switch (itemType) { + case "command_execution": + return "Command"; + case "file_change": + return "File change"; + case "mcp_tool_call": + return "MCP tool"; + case "dynamic_tool_call": + return "Tool call"; + case "collab_agent_tool_call": + return "Agent tool"; + case "web_search": + return "Web search"; + case "image_view": + return "Image view"; + default: + return "Tool"; + } +} + +function resolveToolTitle(payload: { title?: string | undefined; itemType: string }): string { + return payload.title || humanReadableItemType(payload.itemType); +} + function runtimeEventToActivities( event: ProviderRuntimeEvent, ): ReadonlyArray { @@ -183,6 +242,15 @@ function runtimeEventToActivities( ? { sequence: eventWithSequence.sessionSequence } : {}; })(); + const maybeParentToolUseId = (() => { + const eventWithParent = event as ProviderRuntimeEvent & { parentToolUseId?: string | null }; + return eventWithParent.parentToolUseId + ? { parentToolUseId: eventWithParent.parentToolUseId } + : {}; + })(); + const maybeItemId = (() => { + return event.itemId ? { itemId: event.itemId } : {}; + })(); switch (event.type) { case "request.opened": { if (event.payload.requestType === "tool_user_input") { @@ -208,9 +276,11 @@ function runtimeEventToActivities( ...(requestKind ? { requestKind } : {}), requestType: event.payload.requestType, ...(event.payload.detail ? { detail: truncateDetail(event.payload.detail) } : {}), + ...(event.payload.args !== undefined ? { args: truncateArgs(event.payload.args) } : {}), }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -235,6 +305,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -256,6 +327,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -274,6 +346,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -294,6 +367,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -312,6 +386,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -330,6 +405,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -356,6 +432,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -376,6 +453,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -401,6 +479,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -415,7 +494,7 @@ function runtimeEventToActivities( createdAt: event.createdAt, tone: "tool", kind: "tool.updated", - summary: event.payload.title ?? "Tool updated", + summary: resolveToolTitle(event.payload), payload: { itemType: event.payload.itemType, ...(event.payload.status ? { status: event.payload.status } : {}), @@ -424,6 +503,7 @@ function runtimeEventToActivities( }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, }, ]; } @@ -438,13 +518,16 @@ function runtimeEventToActivities( createdAt: event.createdAt, tone: "tool", kind: "tool.completed", - summary: event.payload.title ?? "Tool", + summary: `${resolveToolTitle(event.payload)} complete`, payload: { itemType: event.payload.itemType, ...(event.payload.detail ? { detail: truncateDetail(event.payload.detail) } : {}), + ...(event.payload.data !== undefined ? { data: event.payload.data } : {}), }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, + ...maybeItemId, }, ]; } @@ -459,13 +542,15 @@ function runtimeEventToActivities( createdAt: event.createdAt, tone: "tool", kind: "tool.started", - summary: `${event.payload.title ?? "Tool"} started`, + summary: `${resolveToolTitle(event.payload)} started`, payload: { itemType: event.payload.itemType, ...(event.payload.detail ? { detail: truncateDetail(event.payload.detail) } : {}), }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, + ...maybeParentToolUseId, + ...maybeItemId, }, ]; } @@ -497,12 +582,26 @@ const make = Effect.gen(function* () { lookup: () => Effect.succeed(""), }); + const bufferedThinkingTextByMessageId = yield* Cache.make({ + capacity: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_CACHE_CAPACITY, + timeToLive: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_TTL, + lookup: () => Effect.succeed(""), + }); + const bufferedProposedPlanById = yield* Cache.make({ capacity: BUFFERED_PROPOSED_PLAN_BY_ID_CACHE_CAPACITY, timeToLive: BUFFERED_PROPOSED_PLAN_BY_ID_TTL, lookup: () => Effect.succeed({ text: "", createdAt: "" }), }); + /** Tracks the current message segment index per turn so that tool activities + * between assistant text deltas cause a new assistant message to be created. */ + const assistantSegmentByTurnKey = yield* Cache.make({ + capacity: ASSISTANT_SEGMENT_COUNTER_CACHE_CAPACITY, + timeToLive: ASSISTANT_SEGMENT_COUNTER_TTL, + lookup: () => Effect.succeed(0), + }); + const isGitRepoForThread = Effect.fnUntraced(function* (threadId: ThreadId) { const readModel = yield* orchestrationEngine.getReadModel(); const thread = readModel.threads.find((entry) => entry.id === threadId); @@ -564,6 +663,19 @@ const make = Effect.gen(function* () { const clearAssistantMessageIdsForTurn = (threadId: ThreadId, turnId: TurnId) => Cache.invalidate(turnMessageIdsByTurnKey, providerTurnKey(threadId, turnId)); + const getAssistantSegment = (threadId: ThreadId, turnId: TurnId) => + Cache.get(assistantSegmentByTurnKey, providerTurnKey(threadId, turnId)); + + const incrementAssistantSegment = (threadId: ThreadId, turnId: TurnId) => + getAssistantSegment(threadId, turnId).pipe( + Effect.flatMap((current) => + Cache.set(assistantSegmentByTurnKey, providerTurnKey(threadId, turnId), current + 1), + ), + ); + + const clearAssistantSegment = (threadId: ThreadId, turnId: TurnId) => + Cache.invalidate(assistantSegmentByTurnKey, providerTurnKey(threadId, turnId)); + const appendBufferedAssistantText = (messageId: MessageId, delta: string) => Cache.getOption(bufferedAssistantTextByMessageId, messageId).pipe( Effect.flatMap((existingText) => @@ -593,9 +705,46 @@ const make = Effect.gen(function* () { ), ); + const peekBufferedAssistantText = (messageId: MessageId) => + Cache.getOption(bufferedAssistantTextByMessageId, messageId).pipe( + Effect.map((existingText) => Option.getOrElse(existingText, () => "")), + ); + const clearBufferedAssistantText = (messageId: MessageId) => Cache.invalidate(bufferedAssistantTextByMessageId, messageId); + const appendBufferedThinkingText = (messageId: MessageId, delta: string) => + Cache.getOption(bufferedThinkingTextByMessageId, messageId).pipe( + Effect.flatMap((existingText) => + Effect.gen(function* () { + const nextText = Option.match(existingText, { + onNone: () => delta, + onSome: (text) => `${text}${delta}`, + }); + if (nextText.length <= MAX_BUFFERED_ASSISTANT_CHARS) { + yield* Cache.set(bufferedThinkingTextByMessageId, messageId, nextText); + return ""; + } + + // Safety valve: flush full buffered thinking text to cap memory. + yield* Cache.invalidate(bufferedThinkingTextByMessageId, messageId); + return nextText; + }), + ), + ); + + const takeBufferedThinkingText = (messageId: MessageId) => + Cache.getOption(bufferedThinkingTextByMessageId, messageId).pipe( + Effect.flatMap((existingText) => + Cache.invalidate(bufferedThinkingTextByMessageId, messageId).pipe( + Effect.as(Option.getOrElse(existingText, () => "")), + ), + ), + ); + + const clearBufferedThinkingText = (messageId: MessageId) => + Cache.invalidate(bufferedThinkingTextByMessageId, messageId); + const appendBufferedProposedPlan = (planId: string, delta: string, createdAt: string) => Cache.getOption(bufferedProposedPlanById, planId).pipe( Effect.flatMap((existingEntry) => { @@ -621,7 +770,9 @@ const make = Effect.gen(function* () { Cache.invalidate(bufferedProposedPlanById, planId); const clearAssistantMessageState = (messageId: MessageId) => - clearBufferedAssistantText(messageId); + Effect.all([clearBufferedAssistantText(messageId), clearBufferedThinkingText(messageId)]).pipe( + Effect.asVoid, + ); const finalizeAssistantMessage = (input: { event: ProviderRuntimeEvent; @@ -635,6 +786,7 @@ const make = Effect.gen(function* () { }) => Effect.gen(function* () { const bufferedText = yield* takeBufferedAssistantText(input.messageId); + const bufferedThinking = yield* takeBufferedThinkingText(input.messageId); const text = bufferedText.length > 0 ? bufferedText @@ -642,13 +794,14 @@ const make = Effect.gen(function* () { ? input.fallbackText! : ""; - if (text.length > 0) { + if (text.length > 0 || bufferedThinking.length > 0) { yield* orchestrationEngine.dispatch({ type: "thread.message.assistant.delta", commandId: providerCommandId(input.event, input.finalDeltaCommandTag), threadId: input.threadId, messageId: input.messageId, delta: text, + ...(bufferedThinking.length > 0 ? { thinkingDelta: bufferedThinking } : {}), ...(input.turnId ? { turnId: input.turnId } : {}), createdAt: input.createdAt, }); @@ -854,6 +1007,10 @@ const make = Effect.gen(function* () { : (thread.session?.lastError ?? null); if (shouldApplyThreadLifecycle) { + const providerSessionId = + event.type === "thread.started" && event.payload.providerThreadId + ? event.payload.providerThreadId + : (thread.session?.providerSessionId ?? null); yield* orchestrationEngine.dispatch({ type: "thread.session.set", commandId: providerCommandId(event, "thread-session-set"), @@ -862,6 +1019,7 @@ const make = Effect.gen(function* () { threadId: thread.id, status, providerName: event.provider, + providerSessionId, runtimeMode: thread.session?.runtimeMode ?? "full-access", activeTurnId: nextActiveTurnId, lastError, @@ -876,14 +1034,20 @@ const make = Effect.gen(function* () { event.type === "content.delta" && event.payload.streamKind === "assistant_text" ? event.payload.delta : undefined; + const thinkingDelta = + event.type === "content.delta" && event.payload.streamKind === "reasoning_text" + ? event.payload.delta + : undefined; const proposedPlanDelta = event.type === "turn.proposed.delta" ? event.payload.delta : undefined; if (assistantDelta && assistantDelta.length > 0) { + const turnId = toTurnId(event.turnId); + const segment = turnId ? yield* getAssistantSegment(thread.id, turnId) : 0; + const baseId = event.itemId ?? event.turnId ?? event.eventId; const assistantMessageId = MessageId.makeUnsafe( - `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, + segment > 0 ? `assistant:${baseId}:${segment}` : `assistant:${baseId}`, ); - const turnId = toTurnId(event.turnId); if (turnId) { yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); } @@ -915,20 +1079,67 @@ const make = Effect.gen(function* () { } } + if (thinkingDelta && thinkingDelta.length > 0) { + const turnId = toTurnId(event.turnId); + const segment = turnId ? yield* getAssistantSegment(thread.id, turnId) : 0; + const baseId = event.itemId ?? event.turnId ?? event.eventId; + const assistantMessageId = MessageId.makeUnsafe( + segment > 0 ? `assistant:${baseId}:${segment}` : `assistant:${baseId}`, + ); + if (turnId) { + yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); + } + + const assistantDeliveryMode = yield* Ref.get(assistantDeliveryModeRef); + if (assistantDeliveryMode === "buffered") { + const spillChunk = yield* appendBufferedThinkingText(assistantMessageId, thinkingDelta); + if (spillChunk.length > 0) { + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.delta", + commandId: providerCommandId(event, "thinking-delta-buffer-spill"), + threadId: thread.id, + messageId: assistantMessageId, + delta: "", + thinkingDelta: spillChunk, + ...(turnId ? { turnId } : {}), + createdAt: now, + }); + } + } else { + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.delta", + commandId: providerCommandId(event, "thinking-delta"), + threadId: thread.id, + messageId: assistantMessageId, + delta: "", + thinkingDelta: thinkingDelta, + ...(turnId ? { turnId } : {}), + createdAt: now, + }); + } + } + if (proposedPlanDelta && proposedPlanDelta.length > 0) { const planId = proposedPlanIdFromEvent(event, thread.id); yield* appendBufferedProposedPlan(planId, proposedPlanDelta, now); } - const assistantCompletion = - event.type === "item.completed" && event.payload.itemType === "assistant_message" - ? { - messageId: MessageId.makeUnsafe( - `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, - ), - fallbackText: event.payload.detail, - } - : undefined; + const assistantCompletion = yield* (() => { + if (event.type !== "item.completed" || event.payload.itemType !== "assistant_message") { + return Effect.succeed(null); + } + const turnId = toTurnId(event.turnId); + return Effect.gen(function* () { + const segment = turnId ? yield* getAssistantSegment(thread.id, turnId) : 0; + const baseId = event.itemId ?? event.turnId ?? event.eventId; + return { + messageId: MessageId.makeUnsafe( + segment > 0 ? `assistant:${baseId}:${segment}` : `assistant:${baseId}`, + ), + fallbackText: event.payload.detail, + }; + }); + })(); const proposedPlanCompletion = event.type === "turn.proposed.completed" ? { @@ -999,6 +1210,7 @@ const make = Effect.gen(function* () { { concurrency: 1 }, ).pipe(Effect.asVoid); yield* clearAssistantMessageIdsForTurn(thread.id, turnId); + yield* clearAssistantSegment(thread.id, turnId); yield* finalizeBufferedProposedPlan({ event, @@ -1031,6 +1243,7 @@ const make = Effect.gen(function* () { threadId: thread.id, status: "error", providerName: event.provider, + providerSessionId: thread.session?.providerSessionId ?? null, runtimeMode: thread.session?.runtimeMode ?? "full-access", activeTurnId: eventTurnId ?? null, lastError: runtimeErrorMessage, @@ -1084,6 +1297,59 @@ const make = Effect.gen(function* () { } } + // When a tool starts while there is an active streaming assistant message, + // finalize the current message and bump the segment counter so the next + // assistant text creates a new message — splitting the work log around it. + if (event.type === "item.started" && isToolLifecycleItemType(event.payload.itemType)) { + const turnId = toTurnId(event.turnId); + if (turnId) { + const messageIds = yield* getAssistantMessageIdsForTurn(thread.id, turnId); + for (const messageId of messageIds) { + const existingMessage = thread.messages.find((entry) => entry.id === messageId); + if (!existingMessage?.streaming) continue; + + // Check whether the message or its buffer contain visible text. + // If only thinking/reasoning was received, skip the split to avoid + // creating an "(empty response)" message in the UI. + const bufferedText = yield* peekBufferedAssistantText(messageId); + const hasVisibleText = existingMessage.text.length > 0 || bufferedText.length > 0; + if (!hasVisibleText) continue; + + yield* finalizeAssistantMessage({ + event, + threadId: thread.id, + messageId, + turnId, + createdAt: now, + commandTag: "assistant-complete-segment", + finalDeltaCommandTag: "assistant-delta-finalize-segment", + }); + yield* forgetAssistantMessageId(thread.id, turnId, messageId); + yield* incrementAssistantSegment(thread.id, turnId); + } + } + } + + // When the agent changes plan mode via EnterPlanMode/ExitPlanMode tools, + // update the thread's interaction mode so the UI reflects the change. + if ( + event.type === "item.completed" && + typeof event.payload.data === "object" && + event.payload.data !== null && + "toolName" in event.payload.data + ) { + const toolName = (event.payload.data as { toolName?: string }).toolName; + if (toolName === "ExitPlanMode" || toolName === "EnterPlanMode") { + yield* orchestrationEngine.dispatch({ + type: "thread.interaction-mode.set", + commandId: providerCommandId(event, "plan-mode-change"), + threadId: thread.id, + interactionMode: toolName === "EnterPlanMode" ? "plan" : "default", + createdAt: event.createdAt, + }); + } + } + const activities = runtimeEventToActivities(event); yield* Effect.forEach(activities, (activity) => orchestrationEngine.dispatch({ diff --git a/apps/server/src/orchestration/commandInvariants.test.ts b/apps/server/src/orchestration/commandInvariants.test.ts index f95e4db754..dce76aa445 100644 --- a/apps/server/src/orchestration/commandInvariants.test.ts +++ b/apps/server/src/orchestration/commandInvariants.test.ts @@ -64,6 +64,7 @@ const readModel: OrchestrationReadModel = { proposedPlans: [], checkpoints: [], deletedAt: null, + linkedJiraTicket: null, }, { id: ThreadId.makeUnsafe("thread-2"), @@ -83,6 +84,7 @@ const readModel: OrchestrationReadModel = { proposedPlans: [], checkpoints: [], deletedAt: null, + linkedJiraTicket: null, }, ], }; diff --git a/apps/server/src/orchestration/decider.jira.test.ts b/apps/server/src/orchestration/decider.jira.test.ts new file mode 100644 index 0000000000..3c00708964 --- /dev/null +++ b/apps/server/src/orchestration/decider.jira.test.ts @@ -0,0 +1,120 @@ +import { + CommandId, + EventId, + ProjectId, + ThreadId, + type LinkedJiraTicket, + type OrchestrationEvent, +} from "@t3tools/contracts"; +import { describe, expect, it } from "vitest"; +import { Effect } from "effect"; + +import { decideOrchestrationCommand } from "./decider.ts"; +import { createEmptyReadModel, projectEvent } from "./projector.ts"; + +const asEventId = (value: string): EventId => EventId.makeUnsafe(value); +const asProjectId = (value: string): ProjectId => ProjectId.makeUnsafe(value); +const asThreadId = (value: string): ThreadId => ThreadId.makeUnsafe(value); + +const NOW = new Date().toISOString(); + +function makeReadModelWithThread() { + return Effect.runPromise( + projectEvent(createEmptyReadModel(NOW), { + sequence: 1, + eventId: asEventId("evt-thread-create"), + aggregateKind: "thread", + aggregateId: asThreadId("thread-jira"), + type: "thread.created", + occurredAt: NOW, + commandId: CommandId.makeUnsafe("cmd-thread-create"), + causationEventId: null, + correlationId: CommandId.makeUnsafe("cmd-thread-create"), + metadata: {}, + payload: { + threadId: asThreadId("thread-jira"), + projectId: asProjectId("project-1"), + title: "Jira test thread", + model: "gpt-5-codex", + runtimeMode: "full-access", + interactionMode: "default", + branch: null, + worktreePath: null, + createdAt: NOW, + updatedAt: NOW, + }, + } as OrchestrationEvent), + ); +} + +describe("decider: thread.meta.update with linkedJiraTicket", () => { + const ticket: LinkedJiraTicket = { + key: "PROJ-123", + url: "https://jira.example.com/browse/PROJ-123", + title: "Fix the login bug", + status: "active", + linkedAt: NOW, + }; + + it("emits thread.meta-updated with linkedJiraTicket when linking a ticket", async () => { + const readModel = await makeReadModelWithThread(); + + const result = await Effect.runPromise( + decideOrchestrationCommand({ + command: { + type: "thread.meta.update", + commandId: CommandId.makeUnsafe("cmd-link-jira"), + threadId: asThreadId("thread-jira"), + linkedJiraTicket: ticket, + }, + readModel, + }), + ); + + const event = Array.isArray(result) ? result[0]! : result; + expect(event.type).toBe("thread.meta-updated"); + expect((event.payload as { linkedJiraTicket: LinkedJiraTicket }).linkedJiraTicket).toEqual( + ticket, + ); + }); + + it("emits thread.meta-updated with null linkedJiraTicket when unlinking", async () => { + const readModel = await makeReadModelWithThread(); + + const result = await Effect.runPromise( + decideOrchestrationCommand({ + command: { + type: "thread.meta.update", + commandId: CommandId.makeUnsafe("cmd-unlink-jira"), + threadId: asThreadId("thread-jira"), + linkedJiraTicket: null, + }, + readModel, + }), + ); + + const event = Array.isArray(result) ? result[0]! : result; + expect(event.type).toBe("thread.meta-updated"); + expect((event.payload as { linkedJiraTicket: null }).linkedJiraTicket).toBeNull(); + }); + + it("omits linkedJiraTicket from payload when not provided in command", async () => { + const readModel = await makeReadModelWithThread(); + + const result = await Effect.runPromise( + decideOrchestrationCommand({ + command: { + type: "thread.meta.update", + commandId: CommandId.makeUnsafe("cmd-title-only"), + threadId: asThreadId("thread-jira"), + title: "Renamed thread", + }, + readModel, + }), + ); + + const event = Array.isArray(result) ? result[0]! : result; + expect(event.type).toBe("thread.meta-updated"); + expect(event.payload as Record).not.toHaveProperty("linkedJiraTicket"); + }); +}); diff --git a/apps/server/src/orchestration/decider.ts b/apps/server/src/orchestration/decider.ts index eea41a2b35..a6af9cfd75 100644 --- a/apps/server/src/orchestration/decider.ts +++ b/apps/server/src/orchestration/decider.ts @@ -210,6 +210,9 @@ export const decideOrchestrationCommand = Effect.fn("decideOrchestrationCommand" ...(command.model !== undefined ? { model: command.model } : {}), ...(command.branch !== undefined ? { branch: command.branch } : {}), ...(command.worktreePath !== undefined ? { worktreePath: command.worktreePath } : {}), + ...(command.linkedJiraTicket !== undefined + ? { linkedJiraTicket: command.linkedJiraTicket } + : {}), updatedAt: occurredAt, }, }; @@ -361,6 +364,7 @@ export const decideOrchestrationCommand = Effect.fn("decideOrchestrationCommand" threadId: command.threadId, requestId: command.requestId, decision: command.decision, + ...(command.feedback ? { feedback: command.feedback } : {}), createdAt: command.createdAt, }, }; @@ -476,6 +480,7 @@ export const decideOrchestrationCommand = Effect.fn("decideOrchestrationCommand" messageId: command.messageId, role: "assistant", text: command.delta, + ...(command.thinkingDelta !== undefined ? { thinkingText: command.thinkingDelta } : {}), turnId: command.turnId ?? null, streaming: true, createdAt: command.createdAt, diff --git a/apps/server/src/orchestration/projector.jira.test.ts b/apps/server/src/orchestration/projector.jira.test.ts new file mode 100644 index 0000000000..2cbb0c2d97 --- /dev/null +++ b/apps/server/src/orchestration/projector.jira.test.ts @@ -0,0 +1,251 @@ +import { + CommandId, + EventId, + ProjectId, + ThreadId, + type LinkedJiraTicket, + type OrchestrationEvent, +} from "@t3tools/contracts"; +import { describe, expect, it } from "vitest"; +import { Effect } from "effect"; + +import { createEmptyReadModel, projectEvent } from "./projector.ts"; + +function makeEvent(input: { + sequence: number; + type: OrchestrationEvent["type"]; + occurredAt: string; + aggregateKind: OrchestrationEvent["aggregateKind"]; + aggregateId: string; + commandId: string | null; + payload: unknown; +}): OrchestrationEvent { + return { + sequence: input.sequence, + eventId: EventId.makeUnsafe(`event-${input.sequence}`), + type: input.type, + aggregateKind: input.aggregateKind, + aggregateId: + input.aggregateKind === "project" + ? ProjectId.makeUnsafe(input.aggregateId) + : ThreadId.makeUnsafe(input.aggregateId), + occurredAt: input.occurredAt, + commandId: input.commandId === null ? null : CommandId.makeUnsafe(input.commandId), + causationEventId: null, + correlationId: null, + metadata: {}, + payload: input.payload as never, + } as OrchestrationEvent; +} + +describe("orchestration projector: linkedJiraTicket", () => { + const NOW = new Date().toISOString(); + + const ticket: LinkedJiraTicket = { + key: "PROJ-42", + url: "https://jira.example.com/browse/PROJ-42", + title: "Implement dark mode", + status: "active", + linkedAt: NOW, + }; + + async function createThreadModel() { + return Effect.runPromise( + projectEvent( + createEmptyReadModel(NOW), + makeEvent({ + sequence: 1, + type: "thread.created", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-create", + payload: { + threadId: "thread-1", + projectId: "project-1", + title: "Projector test", + model: "gpt-5-codex", + runtimeMode: "full-access", + branch: null, + worktreePath: null, + createdAt: NOW, + updatedAt: NOW, + }, + }), + ), + ); + } + + it("thread.created defaults linkedJiraTicket to null", async () => { + const model = await createThreadModel(); + expect(model.threads[0]!.linkedJiraTicket).toBeNull(); + }); + + it("thread.meta-updated sets linkedJiraTicket", async () => { + const model = await createThreadModel(); + + const next = await Effect.runPromise( + projectEvent( + model, + makeEvent({ + sequence: 2, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-link", + payload: { + threadId: "thread-1", + linkedJiraTicket: ticket, + updatedAt: NOW, + }, + }), + ), + ); + + expect(next.threads[0]!.linkedJiraTicket).toEqual(ticket); + }); + + it("thread.meta-updated can clear linkedJiraTicket to null", async () => { + const model = await createThreadModel(); + + // First link a ticket + const withTicket = await Effect.runPromise( + projectEvent( + model, + makeEvent({ + sequence: 2, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-link", + payload: { + threadId: "thread-1", + linkedJiraTicket: ticket, + updatedAt: NOW, + }, + }), + ), + ); + expect(withTicket.threads[0]!.linkedJiraTicket).toEqual(ticket); + + // Then unlink + const cleared = await Effect.runPromise( + projectEvent( + withTicket, + makeEvent({ + sequence: 3, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-unlink", + payload: { + threadId: "thread-1", + linkedJiraTicket: null, + updatedAt: NOW, + }, + }), + ), + ); + expect(cleared.threads[0]!.linkedJiraTicket).toBeNull(); + }); + + it("thread.meta-updated preserves linkedJiraTicket when not in payload", async () => { + const model = await createThreadModel(); + + // Link a ticket + const withTicket = await Effect.runPromise( + projectEvent( + model, + makeEvent({ + sequence: 2, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-link", + payload: { + threadId: "thread-1", + linkedJiraTicket: ticket, + updatedAt: NOW, + }, + }), + ), + ); + + // Update title only — linkedJiraTicket should be preserved + const renamed = await Effect.runPromise( + projectEvent( + withTicket, + makeEvent({ + sequence: 3, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-rename", + payload: { + threadId: "thread-1", + title: "Renamed thread", + updatedAt: NOW, + }, + }), + ), + ); + expect(renamed.threads[0]!.linkedJiraTicket).toEqual(ticket); + }); + + it("thread.meta-updated transitions ticket to completed", async () => { + const model = await createThreadModel(); + + const completedTicket: LinkedJiraTicket = { + ...ticket, + status: "completed", + completedAt: NOW, + }; + + const withTicket = await Effect.runPromise( + projectEvent( + model, + makeEvent({ + sequence: 2, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-link", + payload: { + threadId: "thread-1", + linkedJiraTicket: ticket, + updatedAt: NOW, + }, + }), + ), + ); + + const completed = await Effect.runPromise( + projectEvent( + withTicket, + makeEvent({ + sequence: 3, + type: "thread.meta-updated", + aggregateKind: "thread", + aggregateId: "thread-1", + occurredAt: NOW, + commandId: "cmd-complete", + payload: { + threadId: "thread-1", + linkedJiraTicket: completedTicket, + updatedAt: NOW, + }, + }), + ), + ); + + expect(completed.threads[0]!.linkedJiraTicket).toEqual(completedTicket); + expect(completed.threads[0]!.linkedJiraTicket!.status).toBe("completed"); + expect(completed.threads[0]!.linkedJiraTicket!.completedAt).toBe(NOW); + }); +}); diff --git a/apps/server/src/orchestration/projector.ts b/apps/server/src/orchestration/projector.ts index 015f82a677..ec964d3ea4 100644 --- a/apps/server/src/orchestration/projector.ts +++ b/apps/server/src/orchestration/projector.ts @@ -298,6 +298,9 @@ export function projectEvent( ...(payload.model !== undefined ? { model: payload.model } : {}), ...(payload.branch !== undefined ? { branch: payload.branch } : {}), ...(payload.worktreePath !== undefined ? { worktreePath: payload.worktreePath } : {}), + ...(payload.linkedJiraTicket !== undefined + ? { linkedJiraTicket: payload.linkedJiraTicket } + : {}), updatedAt: payload.updatedAt, }), })), @@ -349,6 +352,7 @@ export function projectEvent( id: payload.messageId, role: payload.role, text: payload.text, + ...(payload.thinkingText !== undefined ? { thinkingText: payload.thinkingText } : {}), ...(payload.attachments !== undefined ? { attachments: payload.attachments } : {}), turnId: payload.turnId, streaming: payload.streaming, @@ -370,6 +374,11 @@ export function projectEvent( : message.text.length > 0 ? message.text : entry.text, + ...(message.streaming && message.thinkingText + ? { thinkingText: `${entry.thinkingText ?? ""}${message.thinkingText}` } + : message.thinkingText !== undefined + ? { thinkingText: message.thinkingText } + : {}), streaming: message.streaming, updatedAt: message.updatedAt, turnId: message.turnId, diff --git a/apps/server/src/persistence/Layers/MemoryRepository.ts b/apps/server/src/persistence/Layers/MemoryRepository.ts new file mode 100644 index 0000000000..af37136fca --- /dev/null +++ b/apps/server/src/persistence/Layers/MemoryRepository.ts @@ -0,0 +1,577 @@ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as SqlSchema from "effect/unstable/sql/SqlSchema"; +import { + type Memory, + type MemoryId, + type ProjectId, + type ThreadId, + MemoryScope, + MemoryCategory, + MemorySource, + NonNegativeInt, + TrimmedNonEmptyString, +} from "@t3tools/contracts"; +import { Effect, Layer, Schema } from "effect"; + +import { toPersistenceDecodeError, toPersistenceSqlError } from "../Errors.ts"; + +import { + MemoryRepository, + type MemoryRepositoryError, + type MemoryRepositoryShape, +} from "../Services/MemoryRepository.ts"; + +/** + * DB row schema: nullable columns come back as null from SQLite, + * so we map optional fields to NullOr for the database representation. + */ +const MemoryDbRowSchema = Schema.Struct({ + memoryId: TrimmedNonEmptyString, + projectId: Schema.NullOr(TrimmedNonEmptyString), + threadId: Schema.NullOr(TrimmedNonEmptyString), + scope: MemoryScope, + category: MemoryCategory, + source: MemorySource, + content: TrimmedNonEmptyString, + title: TrimmedNonEmptyString, + date: Schema.NullOr(Schema.String), + sourceThreadId: Schema.NullOr(TrimmedNonEmptyString), + sourceTurnId: Schema.NullOr(TrimmedNonEmptyString), + relevanceScore: Schema.Number, + accessCount: NonNegativeInt, + lastAccessedAt: Schema.NullOr(Schema.String), + createdAt: Schema.String, + updatedAt: Schema.String, + archivedAt: Schema.NullOr(Schema.String), +}); + +/** Convert a DB row (with nulls) to the domain entity (with optionals). */ +function rowToMemory(row: typeof MemoryDbRowSchema.Type): Memory { + return { + memoryId: row.memoryId as MemoryId, + ...(row.projectId !== null ? { projectId: row.projectId } : {}), + ...(row.threadId !== null ? { threadId: row.threadId } : {}), + scope: row.scope, + category: row.category, + source: row.source, + content: row.content, + title: row.title, + ...(row.date !== null ? { date: row.date } : {}), + ...(row.sourceThreadId !== null ? { sourceThreadId: row.sourceThreadId } : {}), + ...(row.sourceTurnId !== null ? { sourceTurnId: row.sourceTurnId } : {}), + relevanceScore: row.relevanceScore, + accessCount: row.accessCount, + ...(row.lastAccessedAt !== null ? { lastAccessedAt: row.lastAccessedAt } : {}), + createdAt: row.createdAt, + updatedAt: row.updatedAt, + ...(row.archivedAt !== null ? { archivedAt: row.archivedAt } : {}), + } as Memory; +} + +function toPersistenceSqlOrDecodeError(sqlOperation: string, decodeOperation: string) { + return (cause: unknown): MemoryRepositoryError => + Schema.isSchemaError(cause) + ? toPersistenceDecodeError(decodeOperation)(cause) + : toPersistenceSqlError(sqlOperation)(cause); +} + +/** + * Sanitize user input for FTS5 MATCH queries. + * Escapes special FTS5 operators to prevent injection. + */ +function sanitizeFts5Query(query: string): string { + // Wrap each token in double quotes to escape special FTS5 operators + const tokens = query + .replace(/"/g, '""') + .split(/\s+/) + .filter((token) => token.length > 0); + if (tokens.length === 0) return '""'; // safe no-op match for degenerate input + return tokens.map((token) => `"${token}"`).join(" "); +} + +const MEMORY_SELECT_COLUMNS = ` + memory_id AS "memoryId", + project_id AS "projectId", + thread_id AS "threadId", + scope, + category, + source, + content, + title, + date, + source_thread_id AS "sourceThreadId", + source_turn_id AS "sourceTurnId", + relevance_score AS "relevanceScore", + access_count AS "accessCount", + last_accessed_at AS "lastAccessedAt", + created_at AS "createdAt", + updated_at AS "updatedAt", + archived_at AS "archivedAt" +`; + +/** Prefixed column list for JOINed queries where the table is aliased as `m`. */ +const MEMORY_SELECT_COLUMNS_PREFIXED = ` + m.memory_id AS "memoryId", + m.project_id AS "projectId", + m.thread_id AS "threadId", + m.scope, + m.category, + m.source, + m.content, + m.title, + m.date, + m.source_thread_id AS "sourceThreadId", + m.source_turn_id AS "sourceTurnId", + m.relevance_score AS "relevanceScore", + m.access_count AS "accessCount", + m.last_accessed_at AS "lastAccessedAt", + m.created_at AS "createdAt", + m.updated_at AS "updatedAt", + m.archived_at AS "archivedAt" +`; + +const makeMemoryRepository = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const insertMemoryRow = SqlSchema.void({ + Request: Schema.Struct({ + memoryId: TrimmedNonEmptyString, + projectId: Schema.NullOr(TrimmedNonEmptyString), + threadId: Schema.NullOr(TrimmedNonEmptyString), + scope: MemoryScope, + category: MemoryCategory, + source: MemorySource, + content: TrimmedNonEmptyString, + title: TrimmedNonEmptyString, + date: Schema.NullOr(Schema.String), + sourceThreadId: Schema.NullOr(TrimmedNonEmptyString), + sourceTurnId: Schema.NullOr(TrimmedNonEmptyString), + createdAt: Schema.String, + updatedAt: Schema.String, + }), + execute: (row) => + sql` + INSERT INTO projection_memories ( + memory_id, + project_id, + thread_id, + scope, + category, + source, + content, + title, + date, + source_thread_id, + source_turn_id, + created_at, + updated_at + ) + VALUES ( + ${row.memoryId}, + ${row.projectId}, + ${row.threadId}, + ${row.scope}, + ${row.category}, + ${row.source}, + ${row.content}, + ${row.title}, + ${row.date}, + ${row.sourceThreadId}, + ${row.sourceTurnId}, + ${row.createdAt}, + ${row.updatedAt} + ) + `, + }); + + const create: MemoryRepositoryShape["create"] = (input) => { + const now = new Date().toISOString(); + const memoryId = crypto.randomUUID() as typeof TrimmedNonEmptyString.Type; + + const row = { + memoryId, + projectId: (input.projectId ?? null) as typeof TrimmedNonEmptyString.Type | null, + threadId: (input.threadId ?? null) as typeof TrimmedNonEmptyString.Type | null, + scope: input.scope, + category: input.category, + source: (input.source ?? "manual") as "auto" | "manual", + content: input.content, + title: input.title, + date: (input.date ?? null) as string | null, + sourceThreadId: (input.sourceThreadId ?? null) as typeof TrimmedNonEmptyString.Type | null, + sourceTurnId: (input.sourceTurnId ?? null) as typeof TrimmedNonEmptyString.Type | null, + createdAt: now, + updatedAt: now, + }; + + return insertMemoryRow(row).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.create:query", + "MemoryRepository.create:encodeRequest", + ), + ), + Effect.map( + () => + ({ + memoryId: memoryId as MemoryId, + ...(input.projectId !== undefined ? { projectId: input.projectId } : {}), + ...(input.threadId !== undefined ? { threadId: input.threadId } : {}), + scope: input.scope, + category: input.category, + source: input.source ?? "manual", + content: input.content, + title: input.title, + ...(input.date !== undefined ? { date: input.date } : {}), + ...(input.sourceThreadId !== undefined ? { sourceThreadId: input.sourceThreadId } : {}), + ...(input.sourceTurnId !== undefined ? { sourceTurnId: input.sourceTurnId } : {}), + relevanceScore: 1.0, + accessCount: 0 as typeof NonNegativeInt.Type, + createdAt: now, + updatedAt: now, + }) as Memory, + ), + ); + }; + + const update: MemoryRepositoryShape["update"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + yield* sql` + UPDATE projection_memories + SET + content = COALESCE(${input.content ?? null}, content), + title = COALESCE(${input.title ?? null}, title), + category = COALESCE(${input.category ?? null}, category), + relevance_score = COALESCE(${input.relevanceScore ?? null}, relevance_score), + updated_at = ${now} + WHERE memory_id = ${input.memoryId} + `; + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.update:query", + "MemoryRepository.update:encodeRequest", + ), + ), + ); + + const archive: MemoryRepositoryShape["archive"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + yield* sql` + UPDATE projection_memories + SET archived_at = ${now}, updated_at = ${now} + WHERE memory_id = ${input.memoryId} + `; + }).pipe(Effect.mapError(toPersistenceSqlError("MemoryRepository.archive:query"))); + + const del: MemoryRepositoryShape["delete"] = (input) => + sql` + DELETE FROM projection_memories + WHERE memory_id = ${input.memoryId} + `.pipe(Effect.mapError(toPersistenceSqlError("MemoryRepository.delete:query"))); + + const listByProjectRows = SqlSchema.findAll({ + Request: Schema.Struct({ + projectId: TrimmedNonEmptyString, + includeThread: Schema.Boolean, + includeArchived: Schema.Boolean, + category: Schema.NullOr(MemoryCategory), + limit: Schema.Number, + offset: Schema.Number, + }), + Result: MemoryDbRowSchema, + execute: (input) => + sql.unsafe( + ` + SELECT ${MEMORY_SELECT_COLUMNS} + FROM projection_memories + WHERE ( + project_id = ? + ${input.includeThread ? "OR scope = 'thread'" : ""} + ) + ${input.includeArchived ? "" : "AND archived_at IS NULL"} + ${input.category !== null ? "AND category = ?" : ""} + ORDER BY updated_at DESC + LIMIT ? OFFSET ? + `, + [ + input.projectId, + ...(input.category !== null ? [input.category] : []), + input.limit, + input.offset, + ], + ), + }); + + const countByProjectRows = SqlSchema.findAll({ + Request: Schema.Struct({ + projectId: TrimmedNonEmptyString, + includeThread: Schema.Boolean, + includeArchived: Schema.Boolean, + category: Schema.NullOr(MemoryCategory), + }), + Result: Schema.Struct({ total: Schema.Number }), + execute: (input) => + sql.unsafe( + ` + SELECT COUNT(*) AS total + FROM projection_memories + WHERE ( + project_id = ? + ${input.includeThread ? "OR scope = 'thread'" : ""} + ) + ${input.includeArchived ? "" : "AND archived_at IS NULL"} + ${input.category !== null ? "AND category = ?" : ""} + `, + [input.projectId, ...(input.category !== null ? [input.category] : [])], + ), + }); + + const listByProject: MemoryRepositoryShape["listByProject"] = (input) => + Effect.gen(function* () { + const limit = input.limit ?? 50; + const offset = input.offset ?? 0; + const includeThread = input.includeThread ?? true; + const includeArchived = input.includeArchived ?? false; + const category = input.category ?? null; + + const [rows, countRows] = yield* Effect.all([ + listByProjectRows({ + projectId: input.projectId, + includeThread, + includeArchived, + category, + limit, + offset, + }), + countByProjectRows({ + projectId: input.projectId, + includeThread, + includeArchived, + category, + }), + ]); + + const total = (countRows[0]?.total ?? 0) as typeof NonNegativeInt.Type; + return { + memories: rows.map(rowToMemory), + total, + }; + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.listByProject:query", + "MemoryRepository.listByProject:decodeRows", + ), + ), + ); + + const searchRows = SqlSchema.findAll({ + Request: Schema.Struct({ + query: TrimmedNonEmptyString, + projectId: Schema.NullOr(TrimmedNonEmptyString), + category: Schema.NullOr(MemoryCategory), + limit: Schema.Number, + }), + Result: MemoryDbRowSchema, + execute: (input) => { + const ftsQuery = sanitizeFts5Query(input.query); + return sql.unsafe( + ` + SELECT ${MEMORY_SELECT_COLUMNS_PREFIXED} + FROM projection_memories m + JOIN projection_memories_fts fts ON fts.memory_id = m.memory_id + WHERE fts MATCH ? + AND m.archived_at IS NULL + ${input.projectId !== null ? "AND m.project_id = ?" : ""} + ${input.category !== null ? "AND m.category = ?" : ""} + ORDER BY bm25(fts) ASC + LIMIT ? + `, + [ + ftsQuery, + ...(input.projectId !== null ? [input.projectId] : []), + ...(input.category !== null ? [input.category] : []), + input.limit, + ], + ); + }, + }); + + const search: MemoryRepositoryShape["search"] = (input) => + searchRows({ + query: input.query, + projectId: (input.projectId ?? null) as typeof TrimmedNonEmptyString.Type | null, + category: input.category ?? null, + limit: input.limit ?? 20, + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.search:query", + "MemoryRepository.search:decodeRows", + ), + ), + Effect.map((rows) => rows.map(rowToMemory)), + ); + + const getRelevantForThreadRows = SqlSchema.findAll({ + Request: Schema.Struct({ + projectId: TrimmedNonEmptyString, + threadId: TrimmedNonEmptyString, + limit: Schema.Number, + }), + Result: MemoryDbRowSchema, + execute: (input) => + sql.unsafe( + ` + SELECT ${MEMORY_SELECT_COLUMNS} + FROM projection_memories + WHERE (project_id = ? OR (scope = 'thread' AND thread_id = ?)) + AND archived_at IS NULL + ORDER BY + relevance_score DESC, + CASE WHEN last_accessed_at IS NOT NULL + THEN 1.0 / (1.0 + julianday('now') - julianday(last_accessed_at)) + ELSE 0.5 + END DESC, + updated_at DESC + LIMIT ? + `, + [input.projectId, input.threadId, input.limit], + ), + }); + + const getRelevantForThread: MemoryRepositoryShape["getRelevantForThread"] = (input) => { + // If a query is provided, use FTS5 search scoped to project + if (input.query !== undefined) { + return search({ + query: input.query, + projectId: input.projectId, + limit: input.limit ?? 10, + }); + } + + // Otherwise, return top memories by relevance + recency + return getRelevantForThreadRows({ + projectId: input.projectId, + threadId: input.threadId, + limit: input.limit ?? 10, + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.getRelevantForThread:query", + "MemoryRepository.getRelevantForThread:decodeRows", + ), + ), + Effect.map((rows) => rows.map(rowToMemory)), + ); + }; + + const recordAccess: MemoryRepositoryShape["recordAccess"] = (memoryId) => + Effect.gen(function* () { + const now = new Date().toISOString(); + yield* sql` + UPDATE projection_memories + SET access_count = access_count + 1, last_accessed_at = ${now} + WHERE memory_id = ${memoryId} + `; + }).pipe(Effect.mapError(toPersistenceSqlError("MemoryRepository.recordAccess:query"))); + + const listDailyByDateRows = SqlSchema.findAll({ + Request: Schema.Struct({ date: Schema.String }), + Result: MemoryDbRowSchema, + execute: (input) => + sql.unsafe( + `SELECT ${MEMORY_SELECT_COLUMNS} FROM projection_memories + WHERE scope = 'daily' AND date = ? AND archived_at IS NULL + ORDER BY created_at DESC`, + [input.date], + ), + }); + + const listDailyByDate: MemoryRepositoryShape["listDailyByDate"] = (input) => + listDailyByDateRows(input).pipe( + Effect.map((rows) => rows.map(rowToMemory)), + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.listDailyByDate:query", + "MemoryRepository.listDailyByDate:decode", + ), + ), + ); + + const deleteDailyByDate: MemoryRepositoryShape["deleteDailyByDate"] = (input) => + sql` + DELETE FROM projection_memories + WHERE scope = 'daily' AND date = ${input.date} + `.pipe(Effect.mapError(toPersistenceSqlError("MemoryRepository.deleteDailyByDate:query"))); + + const findThreadSummaryRows = SqlSchema.findAll({ + Request: Schema.Struct({ threadId: TrimmedNonEmptyString }), + Result: MemoryDbRowSchema, + execute: (input) => + sql.unsafe( + `SELECT ${MEMORY_SELECT_COLUMNS} FROM projection_memories + WHERE thread_id = ? AND scope = 'thread' AND archived_at IS NULL + LIMIT 1`, + [input.threadId], + ), + }); + + const findThreadSummary: MemoryRepositoryShape["findThreadSummary"] = (threadId) => + findThreadSummaryRows({ threadId: threadId as typeof TrimmedNonEmptyString.Type }).pipe( + Effect.map((rows) => (rows.length > 0 ? rowToMemory(rows[0]!) : null)), + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.findThreadSummary:query", + "MemoryRepository.findThreadSummary:decode", + ), + ), + ); + + const upsertThreadSummary: MemoryRepositoryShape["upsertThreadSummary"] = (input) => + Effect.gen(function* () { + // Delete any existing thread summary for this thread + yield* sql` + DELETE FROM projection_memories + WHERE thread_id = ${input.threadId} AND scope = 'thread' + `; + + // Insert the new thread summary + return yield* create({ + threadId: input.threadId as ThreadId, + projectId: input.projectId as ProjectId, + scope: "thread", + category: "fact", + source: "auto", + title: input.title as typeof TrimmedNonEmptyString.Type, + content: input.content as typeof TrimmedNonEmptyString.Type, + }); + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "MemoryRepository.upsertThreadSummary:query", + "MemoryRepository.upsertThreadSummary:encodeRequest", + ), + ), + ); + + return { + create, + update, + archive, + delete: del, + listByProject, + search, + getRelevantForThread, + recordAccess, + listDailyByDate, + deleteDailyByDate, + findThreadSummary, + upsertThreadSummary, + } satisfies MemoryRepositoryShape; +}); + +export const MemoryRepositoryLive = Layer.effect(MemoryRepository, makeMemoryRepository); diff --git a/apps/server/src/persistence/Layers/ProjectionThreadActivities.ts b/apps/server/src/persistence/Layers/ProjectionThreadActivities.ts index 8e88cfa785..227e30d3ad 100644 --- a/apps/server/src/persistence/Layers/ProjectionThreadActivities.ts +++ b/apps/server/src/persistence/Layers/ProjectionThreadActivities.ts @@ -43,6 +43,8 @@ const makeProjectionThreadActivityRepository = Effect.gen(function* () { summary, payload_json, sequence, + parent_tool_use_id, + item_id, created_at ) VALUES ( @@ -54,6 +56,8 @@ const makeProjectionThreadActivityRepository = Effect.gen(function* () { ${row.summary}, ${JSON.stringify(row.payload)}, ${row.sequence ?? null}, + ${row.parentToolUseId}, + ${row.itemId}, ${row.createdAt} ) ON CONFLICT (activity_id) @@ -65,6 +69,8 @@ const makeProjectionThreadActivityRepository = Effect.gen(function* () { summary = excluded.summary, payload_json = excluded.payload_json, sequence = excluded.sequence, + parent_tool_use_id = excluded.parent_tool_use_id, + item_id = excluded.item_id, created_at = excluded.created_at `, }); @@ -83,6 +89,8 @@ const makeProjectionThreadActivityRepository = Effect.gen(function* () { summary, payload_json AS "payload", sequence, + parent_tool_use_id AS "parentToolUseId", + item_id AS "itemId", created_at AS "createdAt" FROM projection_thread_activities WHERE thread_id = ${threadId} @@ -131,6 +139,8 @@ const makeProjectionThreadActivityRepository = Effect.gen(function* () { summary: row.summary, payload: row.payload, ...(row.sequence !== null ? { sequence: row.sequence } : {}), + parentToolUseId: row.parentToolUseId, + itemId: row.itemId, createdAt: row.createdAt, })), ), diff --git a/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts b/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts index 2499eba196..d45cd5ecd7 100644 --- a/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts +++ b/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts @@ -23,6 +23,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { thread_id, status, provider_name, + provider_session_id, runtime_mode, active_turn_id, last_error, @@ -32,6 +33,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { ${row.threadId}, ${row.status}, ${row.providerName}, + ${row.providerSessionId}, ${row.runtimeMode}, ${row.activeTurnId}, ${row.lastError}, @@ -41,6 +43,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { DO UPDATE SET status = excluded.status, provider_name = excluded.provider_name, + provider_session_id = excluded.provider_session_id, runtime_mode = excluded.runtime_mode, active_turn_id = excluded.active_turn_id, last_error = excluded.last_error, @@ -57,6 +60,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { thread_id AS "threadId", status, provider_name AS "providerName", + provider_session_id AS "providerSessionId", runtime_mode AS "runtimeMode", active_turn_id AS "activeTurnId", last_error AS "lastError", diff --git a/apps/server/src/persistence/Layers/ProjectionThreads.ts b/apps/server/src/persistence/Layers/ProjectionThreads.ts index 10192697d0..7042380490 100644 --- a/apps/server/src/persistence/Layers/ProjectionThreads.ts +++ b/apps/server/src/persistence/Layers/ProjectionThreads.ts @@ -28,6 +28,7 @@ const makeProjectionThreadRepository = Effect.gen(function* () { interaction_mode, branch, worktree_path, + jira_ticket_json, latest_turn_id, created_at, updated_at, @@ -42,6 +43,7 @@ const makeProjectionThreadRepository = Effect.gen(function* () { ${row.interactionMode}, ${row.branch}, ${row.worktreePath}, + ${row.jiraTicketJson}, ${row.latestTurnId}, ${row.createdAt}, ${row.updatedAt}, @@ -56,6 +58,7 @@ const makeProjectionThreadRepository = Effect.gen(function* () { interaction_mode = excluded.interaction_mode, branch = excluded.branch, worktree_path = excluded.worktree_path, + jira_ticket_json = excluded.jira_ticket_json, latest_turn_id = excluded.latest_turn_id, created_at = excluded.created_at, updated_at = excluded.updated_at, @@ -77,6 +80,7 @@ const makeProjectionThreadRepository = Effect.gen(function* () { interaction_mode AS "interactionMode", branch, worktree_path AS "worktreePath", + jira_ticket_json AS "jiraTicketJson", latest_turn_id AS "latestTurnId", created_at AS "createdAt", updated_at AS "updatedAt", @@ -100,6 +104,7 @@ const makeProjectionThreadRepository = Effect.gen(function* () { interaction_mode AS "interactionMode", branch, worktree_path AS "worktreePath", + jira_ticket_json AS "jiraTicketJson", latest_turn_id AS "latestTurnId", created_at AS "createdAt", updated_at AS "updatedAt", diff --git a/apps/server/src/persistence/Layers/ReviewCommentRepository.ts b/apps/server/src/persistence/Layers/ReviewCommentRepository.ts new file mode 100644 index 0000000000..5d871cfb63 --- /dev/null +++ b/apps/server/src/persistence/Layers/ReviewCommentRepository.ts @@ -0,0 +1,218 @@ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as SqlSchema from "effect/unstable/sql/SqlSchema"; +import { + ReviewComment, + ReviewCommentDeleteInput, + ReviewCommentListInput, + ReviewCommentUpdateInput, + PositiveInt, + ThreadId, + TrimmedNonEmptyString, + ReviewCommentSeverity, +} from "@t3tools/contracts"; +import { Effect, Layer, Schema } from "effect"; + +import { toPersistenceDecodeError, toPersistenceSqlError } from "../Errors.ts"; + +import { + ReviewCommentRepository, + type ReviewCommentRepositoryShape, + type ReviewCommentRepositoryError, +} from "../Services/ReviewCommentRepository.ts"; + +/** + * DB row schema: end_line comes back as number | null from SQLite, + * so we map the optional field to NullOr for the database representation. + */ +const ReviewCommentDbRowSchema = Schema.Struct({ + id: TrimmedNonEmptyString, + threadId: ThreadId, + file: TrimmedNonEmptyString, + startLine: PositiveInt, + endLine: Schema.NullOr(PositiveInt), + body: TrimmedNonEmptyString, + severity: ReviewCommentSeverity, + createdAt: Schema.String, + updatedAt: Schema.String, + publishedAt: Schema.NullOr(Schema.String), + publishedUrl: Schema.NullOr(Schema.String), +}); + +function toPersistenceSqlOrDecodeError(sqlOperation: string, decodeOperation: string) { + return (cause: unknown): ReviewCommentRepositoryError => + Schema.isSchemaError(cause) + ? toPersistenceDecodeError(decodeOperation)(cause) + : toPersistenceSqlError(sqlOperation)(cause); +} + +const makeReviewCommentRepository = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const insertReviewCommentRow = SqlSchema.void({ + Request: ReviewComment, + execute: (row) => + sql` + INSERT INTO review_comments ( + id, + thread_id, + file, + start_line, + end_line, + body, + severity, + created_at, + updated_at + ) + VALUES ( + ${row.id}, + ${row.threadId}, + ${row.file}, + ${row.startLine}, + ${row.endLine ?? null}, + ${row.body}, + ${row.severity}, + ${row.createdAt}, + ${row.updatedAt} + ) + `, + }); + + const updateReviewCommentRow = SqlSchema.void({ + Request: ReviewCommentUpdateInput, + execute: (input) => + sql` + UPDATE review_comments + SET + body = COALESCE(${input.body ?? null}, body), + severity = COALESCE(${input.severity ?? null}, severity), + published_at = COALESCE(${input.publishedAt ?? null}, published_at), + published_url = COALESCE(${input.publishedUrl ?? null}, published_url), + updated_at = ${new Date().toISOString()} + WHERE id = ${input.id} + `, + }); + + const deleteReviewCommentRow = SqlSchema.void({ + Request: ReviewCommentDeleteInput, + execute: ({ id }) => + sql` + DELETE FROM review_comments + WHERE id = ${id} + `, + }); + + const listReviewCommentRows = SqlSchema.findAll({ + Request: ReviewCommentListInput, + Result: ReviewCommentDbRowSchema, + execute: ({ threadId }) => + sql` + SELECT + id, + thread_id AS "threadId", + file, + start_line AS "startLine", + end_line AS "endLine", + body, + severity, + created_at AS "createdAt", + updated_at AS "updatedAt", + published_at AS "publishedAt", + published_url AS "publishedUrl" + FROM review_comments + WHERE thread_id = ${threadId} + ORDER BY file ASC, start_line ASC + `, + }); + + const deleteByThreadIdRows = SqlSchema.void({ + Request: ReviewCommentListInput, + execute: ({ threadId }) => + sql` + DELETE FROM review_comments + WHERE thread_id = ${threadId} + `, + }); + + const add: ReviewCommentRepositoryShape["add"] = (input) => { + const now = new Date().toISOString(); + const row: typeof ReviewComment.Type = { + id: crypto.randomUUID() as typeof TrimmedNonEmptyString.Type, + threadId: input.threadId, + file: input.file, + startLine: input.startLine, + ...(input.endLine !== undefined ? { endLine: input.endLine } : {}), + body: input.body, + severity: input.severity, + createdAt: now, + updatedAt: now, + }; + + return insertReviewCommentRow(row).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewCommentRepository.add:query", + "ReviewCommentRepository.add:encodeRequest", + ), + ), + Effect.map(() => row), + ); + }; + + const update: ReviewCommentRepositoryShape["update"] = (input) => + updateReviewCommentRow(input).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewCommentRepository.update:query", + "ReviewCommentRepository.update:encodeRequest", + ), + ), + ); + + const del: ReviewCommentRepositoryShape["delete"] = (input) => + deleteReviewCommentRow(input).pipe( + Effect.mapError(toPersistenceSqlError("ReviewCommentRepository.delete:query")), + ); + + const listByThreadId: ReviewCommentRepositoryShape["listByThreadId"] = (input) => + listReviewCommentRows(input).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewCommentRepository.listByThreadId:query", + "ReviewCommentRepository.listByThreadId:decodeRows", + ), + ), + Effect.map((rows) => + rows.map((row) => ({ + id: row.id, + threadId: row.threadId, + file: row.file, + startLine: row.startLine, + ...(row.endLine !== null ? { endLine: row.endLine } : {}), + body: row.body, + severity: row.severity, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + ...(row.publishedAt !== null ? { publishedAt: row.publishedAt } : {}), + ...(row.publishedUrl !== null ? { publishedUrl: row.publishedUrl } : {}), + })), + ), + ); + + const deleteByThreadId: ReviewCommentRepositoryShape["deleteByThreadId"] = (input) => + deleteByThreadIdRows(input).pipe( + Effect.mapError(toPersistenceSqlError("ReviewCommentRepository.deleteByThreadId:query")), + ); + + return { + add, + update, + delete: del, + listByThreadId, + deleteByThreadId, + } satisfies ReviewCommentRepositoryShape; +}); + +export const ReviewCommentRepositoryLive = Layer.effect( + ReviewCommentRepository, + makeReviewCommentRepository, +); diff --git a/apps/server/src/persistence/Layers/ReviewRequestRepository.ts b/apps/server/src/persistence/Layers/ReviewRequestRepository.ts new file mode 100644 index 0000000000..2781a27ccf --- /dev/null +++ b/apps/server/src/persistence/Layers/ReviewRequestRepository.ts @@ -0,0 +1,333 @@ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as SqlSchema from "effect/unstable/sql/SqlSchema"; +import { + ReviewRequest, + ReviewRequestStatus, + PositiveInt, + ThreadId, + TrimmedNonEmptyString, +} from "@t3tools/contracts"; +import { Effect, Layer, Schema } from "effect"; + +import { toPersistenceDecodeError, toPersistenceSqlError } from "../Errors.ts"; + +import { + ReviewRequestRepository, + type ReviewRequestRepositoryShape, + type ReviewRequestRepositoryError, +} from "../Services/ReviewRequestRepository.ts"; + +/** + * DB row schema: thread_id comes back as string | null from SQLite, + * so we map the optional field to NullOr for the database representation. + */ +const ReviewRequestDbRowSchema = Schema.Struct({ + id: TrimmedNonEmptyString, + prUrl: Schema.String, + prNumber: PositiveInt, + prTitle: TrimmedNonEmptyString, + repoNameWithOwner: TrimmedNonEmptyString, + authorLogin: TrimmedNonEmptyString, + isBot: Schema.Number, + status: ReviewRequestStatus, + threadId: Schema.NullOr(ThreadId), + prBody: Schema.NullOr(Schema.String), + prLabels: Schema.String, // JSON string in DB + createdAt: Schema.String, + updatedAt: Schema.String, +}); + +function toPersistenceSqlOrDecodeError(sqlOperation: string, decodeOperation: string) { + return (cause: unknown): ReviewRequestRepositoryError => + Schema.isSchemaError(cause) + ? toPersistenceDecodeError(decodeOperation)(cause) + : toPersistenceSqlError(sqlOperation)(cause); +} + +const dbRowToReviewRequest = (row: typeof ReviewRequestDbRowSchema.Type): ReviewRequest => { + let parsedLabels: string[] = []; + try { + const parsed: unknown = JSON.parse(row.prLabels); + if (Array.isArray(parsed)) { + parsedLabels = parsed.filter((v): v is string => typeof v === "string"); + } + } catch { + // Fallback to empty array on invalid JSON + } + + return { + id: row.id, + prUrl: row.prUrl, + prNumber: row.prNumber, + prTitle: row.prTitle, + repoNameWithOwner: row.repoNameWithOwner, + authorLogin: row.authorLogin, + isBot: row.isBot !== 0, + status: row.status, + ...(row.threadId !== null ? { threadId: row.threadId } : {}), + ...(row.prBody !== null ? { prBody: row.prBody } : {}), + ...(parsedLabels.length > 0 ? { prLabels: parsedLabels } : {}), + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +}; + +const makeReviewRequestRepository = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const InsertRequestSchema = Schema.Struct({ + id: TrimmedNonEmptyString, + prUrl: Schema.String, + prNumber: PositiveInt, + prTitle: TrimmedNonEmptyString, + repoNameWithOwner: TrimmedNonEmptyString, + authorLogin: TrimmedNonEmptyString, + isBot: Schema.Number, + status: ReviewRequestStatus, + prBody: Schema.NullOr(Schema.String), + prLabels: Schema.String, + createdAt: Schema.String, + updatedAt: Schema.String, + }); + + const insertOrReplaceRow = SqlSchema.void({ + Request: InsertRequestSchema, + execute: (row) => + sql` + INSERT INTO review_requests ( + id, + pr_url, + pr_number, + pr_title, + repo_name_with_owner, + author_login, + is_bot, + status, + pr_body, + pr_labels, + created_at, + updated_at + ) + VALUES ( + ${row.id}, + ${row.prUrl}, + ${row.prNumber}, + ${row.prTitle}, + ${row.repoNameWithOwner}, + ${row.authorLogin}, + ${row.isBot}, + ${row.status}, + ${row.prBody}, + ${row.prLabels}, + ${row.createdAt}, + ${row.updatedAt} + ) + ON CONFLICT(pr_url) DO UPDATE SET + pr_number = ${row.prNumber}, + pr_title = ${row.prTitle}, + author_login = ${row.authorLogin}, + is_bot = ${row.isBot}, + status = CASE WHEN review_requests.status IN ('dismissed', 'approved', 'changes_requested') THEN 'pending' ELSE review_requests.status END, + pr_body = ${row.prBody}, + pr_labels = ${row.prLabels}, + updated_at = ${row.updatedAt} + `, + }); + + const UpdateStatusRequestSchema = Schema.Struct({ + id: TrimmedNonEmptyString, + status: ReviewRequestStatus, + threadId: Schema.NullOr(Schema.String), + updatedAt: Schema.String, + }); + + const updateStatusRow = SqlSchema.void({ + Request: UpdateStatusRequestSchema, + execute: (input) => + sql` + UPDATE review_requests + SET + status = ${input.status}, + thread_id = COALESCE(${input.threadId}, thread_id), + updated_at = ${input.updatedAt} + WHERE id = ${input.id} + `, + }); + + const listActiveRows = SqlSchema.findAll({ + Request: Schema.Struct({}), + Result: ReviewRequestDbRowSchema, + execute: () => + sql` + SELECT + id, + pr_url AS "prUrl", + pr_number AS "prNumber", + pr_title AS "prTitle", + repo_name_with_owner AS "repoNameWithOwner", + author_login AS "authorLogin", + is_bot AS "isBot", + status, + thread_id AS "threadId", + pr_body AS "prBody", + pr_labels AS "prLabels", + created_at AS "createdAt", + updated_at AS "updatedAt" + FROM review_requests + ORDER BY + CASE status + WHEN 'in_review' THEN 0 + WHEN 'pending' THEN 1 + WHEN 'approved' THEN 2 + WHEN 'changes_requested' THEN 3 + WHEN 'dismissed' THEN 4 + END, + updated_at DESC + `, + }); + + const findByPrUrl = SqlSchema.findAll({ + Request: Schema.Struct({ prUrl: Schema.String }), + Result: ReviewRequestDbRowSchema, + execute: ({ prUrl }) => + sql` + SELECT + id, + pr_url AS "prUrl", + pr_number AS "prNumber", + pr_title AS "prTitle", + repo_name_with_owner AS "repoNameWithOwner", + author_login AS "authorLogin", + is_bot AS "isBot", + status, + thread_id AS "threadId", + pr_body AS "prBody", + pr_labels AS "prLabels", + created_at AS "createdAt", + updated_at AS "updatedAt" + FROM review_requests + WHERE pr_url = ${prUrl} + LIMIT 1 + `, + }); + + const upsert: ReviewRequestRepositoryShape["upsert"] = (input) => { + const now = new Date().toISOString(); + const id = crypto.randomUUID() as typeof TrimmedNonEmptyString.Type; + + const row = { + id, + prUrl: input.prUrl, + prNumber: input.prNumber as typeof PositiveInt.Type, + prTitle: input.prTitle as typeof TrimmedNonEmptyString.Type, + repoNameWithOwner: input.repoNameWithOwner as typeof TrimmedNonEmptyString.Type, + authorLogin: input.authorLogin as typeof TrimmedNonEmptyString.Type, + isBot: input.isBot ? 1 : 0, + status: "pending" as typeof ReviewRequestStatus.Type, + prBody: input.prBody ?? null, + prLabels: JSON.stringify(input.prLabels ?? []), + createdAt: now, + updatedAt: now, + }; + + return insertOrReplaceRow(row).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewRequestRepository.upsert:query", + "ReviewRequestRepository.upsert:encodeRequest", + ), + ), + Effect.flatMap(() => + findByPrUrl({ prUrl: input.prUrl }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewRequestRepository.upsert:findBack", + "ReviewRequestRepository.upsert:decodeFindBack", + ), + ), + Effect.map((rows) => dbRowToReviewRequest(rows[0]!)), + ), + ), + ); + }; + + const updateStatus: ReviewRequestRepositoryShape["updateStatus"] = (input) => + updateStatusRow({ + id: input.id as typeof TrimmedNonEmptyString.Type, + status: input.status, + threadId: input.threadId ?? null, + updatedAt: new Date().toISOString(), + }).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewRequestRepository.updateStatus:query", + "ReviewRequestRepository.updateStatus:encodeRequest", + ), + ), + ); + + const listActive: ReviewRequestRepositoryShape["listActive"] = () => + listActiveRows({}).pipe( + Effect.mapError( + toPersistenceSqlOrDecodeError( + "ReviewRequestRepository.listActive:query", + "ReviewRequestRepository.listActive:decodeRows", + ), + ), + Effect.map((rows) => rows.map(dbRowToReviewRequest)), + ); + + const dismissStale: ReviewRequestRepositoryShape["dismissStale"] = (activeUrls) => { + if (activeUrls.length === 0) { + // No active URLs means dismiss all pending requests (PRs closed/merged) + return sql` + UPDATE review_requests + SET status = 'dismissed', updated_at = ${new Date().toISOString()} + WHERE status = 'pending' + `.pipe( + Effect.mapError(toPersistenceSqlError("ReviewRequestRepository.dismissStale:query")), + Effect.asVoid, + ); + } + + const now = new Date().toISOString(); + // Dismiss pending requests no longer in GitHub results (PR merged/closed/review removed) + return sql` + UPDATE review_requests + SET status = 'dismissed', updated_at = ${now} + WHERE status = 'pending' + AND pr_url NOT IN ${sql.in(activeUrls)} + `.pipe( + Effect.mapError(toPersistenceSqlError("ReviewRequestRepository.dismissStale:query")), + Effect.asVoid, + ); + }; + + const unlinkDeletedThreads: ReviewRequestRepositoryShape["unlinkDeletedThreads"] = () => + sql` + UPDATE review_requests + SET thread_id = NULL, + status = CASE WHEN status = 'in_review' THEN 'pending' ELSE status END, + updated_at = ${new Date().toISOString()} + WHERE thread_id IS NOT NULL + AND thread_id IN ( + SELECT thread_id FROM projection_threads WHERE deleted_at IS NOT NULL + ) + `.pipe( + Effect.mapError(toPersistenceSqlError("ReviewRequestRepository.unlinkDeletedThreads:query")), + Effect.asVoid, + ); + + return { + upsert, + updateStatus, + listActive, + dismissStale, + unlinkDeletedThreads, + } satisfies ReviewRequestRepositoryShape; +}); + +export const ReviewRequestRepositoryLive = Layer.effect( + ReviewRequestRepository, + makeReviewRequestRepository, +); diff --git a/apps/server/src/persistence/Migrations.ts b/apps/server/src/persistence/Migrations.ts index 7deb890dd8..1f1a0d80b2 100644 --- a/apps/server/src/persistence/Migrations.ts +++ b/apps/server/src/persistence/Migrations.ts @@ -25,6 +25,14 @@ import Migration0010 from "./Migrations/010_ProjectionThreadsRuntimeMode.ts"; import Migration0011 from "./Migrations/011_OrchestrationThreadCreatedRuntimeMode.ts"; import Migration0012 from "./Migrations/012_ProjectionThreadsInteractionMode.ts"; import Migration0013 from "./Migrations/013_ProjectionThreadProposedPlans.ts"; +import Migration0014 from "./Migrations/014_ProjectionThreadActivityParentAndItemId.ts"; +import Migration0015 from "./Migrations/015_ProjectionThreadsJiraTicket.ts"; +import Migration0016 from "./Migrations/016_ReviewComments.ts"; +import Migration0017 from "./Migrations/017_ReviewRequests.ts"; +import Migration0018 from "./Migrations/018_ReviewRequestsPrMeta.ts"; +import Migration0019 from "./Migrations/019_Memories.ts"; +import Migration0020 from "./Migrations/020_MemoriesDateColumn.ts"; +import Migration0021 from "./Migrations/021_MemoryScopeThread.ts"; import { Effect } from "effect"; /** @@ -51,6 +59,14 @@ const loader = Migrator.fromRecord({ "11_OrchestrationThreadCreatedRuntimeMode": Migration0011, "12_ProjectionThreadsInteractionMode": Migration0012, "13_ProjectionThreadProposedPlans": Migration0013, + "14_ProjectionThreadActivityParentAndItemId": Migration0014, + "15_ProjectionThreadsJiraTicket": Migration0015, + "16_ReviewComments": Migration0016, + "17_ReviewRequests": Migration0017, + "18_ReviewRequestsPrMeta": Migration0018, + "19_Memories": Migration0019, + "20_MemoriesDateColumn": Migration0020, + "21_MemoryScopeThread": Migration0021, }); /** diff --git a/apps/server/src/persistence/Migrations/014_ProjectionThreadActivityParentAndItemId.ts b/apps/server/src/persistence/Migrations/014_ProjectionThreadActivityParentAndItemId.ts new file mode 100644 index 0000000000..7b76bbdd12 --- /dev/null +++ b/apps/server/src/persistence/Migrations/014_ProjectionThreadActivityParentAndItemId.ts @@ -0,0 +1,16 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + ALTER TABLE projection_thread_activities + ADD COLUMN parent_tool_use_id TEXT + `; + + yield* sql` + ALTER TABLE projection_thread_activities + ADD COLUMN item_id TEXT + `; +}); diff --git a/apps/server/src/persistence/Migrations/015_ProjectionThreadsJiraTicket.ts b/apps/server/src/persistence/Migrations/015_ProjectionThreadsJiraTicket.ts new file mode 100644 index 0000000000..f34371a110 --- /dev/null +++ b/apps/server/src/persistence/Migrations/015_ProjectionThreadsJiraTicket.ts @@ -0,0 +1,11 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + ALTER TABLE projection_threads + ADD COLUMN jira_ticket_json TEXT DEFAULT NULL + `; +}); diff --git a/apps/server/src/persistence/Migrations/016_ReviewComments.ts b/apps/server/src/persistence/Migrations/016_ReviewComments.ts new file mode 100644 index 0000000000..c1f82a1677 --- /dev/null +++ b/apps/server/src/persistence/Migrations/016_ReviewComments.ts @@ -0,0 +1,27 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + CREATE TABLE IF NOT EXISTS review_comments ( + id TEXT PRIMARY KEY, + thread_id TEXT NOT NULL, + file TEXT NOT NULL, + start_line INTEGER NOT NULL, + end_line INTEGER, + body TEXT NOT NULL, + severity TEXT NOT NULL DEFAULT 'info', + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL, + published_at TEXT DEFAULT NULL, + published_url TEXT DEFAULT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_review_comments_thread + ON review_comments(thread_id) + `; +}); diff --git a/apps/server/src/persistence/Migrations/017_ReviewRequests.ts b/apps/server/src/persistence/Migrations/017_ReviewRequests.ts new file mode 100644 index 0000000000..d9263b72fa --- /dev/null +++ b/apps/server/src/persistence/Migrations/017_ReviewRequests.ts @@ -0,0 +1,27 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + CREATE TABLE IF NOT EXISTS review_requests ( + id TEXT PRIMARY KEY, + pr_url TEXT NOT NULL UNIQUE, + pr_number INTEGER NOT NULL, + pr_title TEXT NOT NULL, + repo_name_with_owner TEXT NOT NULL, + author_login TEXT NOT NULL, + is_bot INTEGER NOT NULL DEFAULT 0, + status TEXT NOT NULL DEFAULT 'pending', + thread_id TEXT DEFAULT NULL, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_review_requests_status + ON review_requests(status) + `; +}); diff --git a/apps/server/src/persistence/Migrations/018_ReviewRequestsPrMeta.ts b/apps/server/src/persistence/Migrations/018_ReviewRequestsPrMeta.ts new file mode 100644 index 0000000000..6b6b01a2f0 --- /dev/null +++ b/apps/server/src/persistence/Migrations/018_ReviewRequestsPrMeta.ts @@ -0,0 +1,9 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql`ALTER TABLE review_requests ADD COLUMN pr_body TEXT DEFAULT NULL`; + yield* sql`ALTER TABLE review_requests ADD COLUMN pr_labels TEXT NOT NULL DEFAULT '[]'`; +}); diff --git a/apps/server/src/persistence/Migrations/019_Memories.ts b/apps/server/src/persistence/Migrations/019_Memories.ts new file mode 100644 index 0000000000..a45e938502 --- /dev/null +++ b/apps/server/src/persistence/Migrations/019_Memories.ts @@ -0,0 +1,91 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + CREATE TABLE IF NOT EXISTS projection_memories ( + memory_id TEXT PRIMARY KEY, + project_id TEXT, + scope TEXT NOT NULL CHECK (scope IN ('project', 'global', 'daily')), + category TEXT NOT NULL CHECK (category IN ('preference', 'pattern', 'decision', 'fact', 'convention')), + source TEXT NOT NULL CHECK (source IN ('auto', 'manual')), + content TEXT NOT NULL, + title TEXT NOT NULL, + date TEXT, + source_thread_id TEXT, + source_turn_id TEXT, + relevance_score REAL NOT NULL DEFAULT 1.0, + access_count INTEGER NOT NULL DEFAULT 0, + last_accessed_at TEXT, + created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + updated_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + archived_at TEXT, + FOREIGN KEY (project_id) REFERENCES projection_projects(project_id) + ) + `; + + yield* sql` + CREATE VIRTUAL TABLE IF NOT EXISTS projection_memories_fts USING fts5( + memory_id UNINDEXED, + title, + content, + category, + content=projection_memories, + content_rowid=rowid + ) + `; + + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON projection_memories BEGIN + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; + + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + END + `; + + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project + ON projection_memories(project_id) WHERE project_id IS NOT NULL + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_scope + ON projection_memories(scope) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_archived + ON projection_memories(archived_at) + `; + + /* Composite covering index for the most common query pattern: + list active memories for a project, ordered by recency. */ + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project_active + ON projection_memories(project_id, archived_at, updated_at DESC) + `; + + /* Index for daily-scoped memories filtered by date. */ + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_daily_date + ON projection_memories(project_id, date DESC) + WHERE scope = 'daily' + `; +}); diff --git a/apps/server/src/persistence/Migrations/020_MemoriesDateColumn.ts b/apps/server/src/persistence/Migrations/020_MemoriesDateColumn.ts new file mode 100644 index 0000000000..384f0256bb --- /dev/null +++ b/apps/server/src/persistence/Migrations/020_MemoriesDateColumn.ts @@ -0,0 +1,117 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + // Add the date column for daily-scoped memories + yield* sql`ALTER TABLE projection_memories ADD COLUMN date TEXT`; + + // SQLite doesn't support ALTER CHECK constraints, so we need to drop and + // recreate. Since CHECK constraints in SQLite are only enforced on INSERT/UPDATE + // (not retroactively), and the old constraint would block 'daily' scope inserts, + // we recreate the table with the updated constraint. + // However, SQLite also doesn't support DROP CONSTRAINT. + // The simplest approach: the CHECK constraint was defined as part of CREATE TABLE + // and can't be altered. But SQLite does NOT enforce CHECK constraints from + // schema — it only enforces them if they were defined. Since the original + // migration used CHECK (scope IN ('project', 'global')), we need to work around it. + // Fortunately, we can just disable the check temporarily by renaming + recreating. + + // Actually, SQLite DOES enforce CHECK constraints. Let's recreate the table. + yield* sql`CREATE TABLE IF NOT EXISTS projection_memories_new ( + memory_id TEXT PRIMARY KEY, + project_id TEXT, + scope TEXT NOT NULL CHECK (scope IN ('project', 'global', 'daily')), + category TEXT NOT NULL CHECK (category IN ('preference', 'pattern', 'decision', 'fact', 'convention')), + source TEXT NOT NULL CHECK (source IN ('auto', 'manual')), + content TEXT NOT NULL, + title TEXT NOT NULL, + date TEXT, + source_thread_id TEXT, + source_turn_id TEXT, + relevance_score REAL NOT NULL DEFAULT 1.0, + access_count INTEGER NOT NULL DEFAULT 0, + last_accessed_at TEXT, + created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + updated_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + archived_at TEXT, + FOREIGN KEY (project_id) REFERENCES projection_projects(project_id) + )`; + + yield* sql`INSERT INTO projection_memories_new SELECT + memory_id, project_id, scope, category, source, content, title, + NULL as date, + source_thread_id, source_turn_id, relevance_score, access_count, + last_accessed_at, created_at, updated_at, archived_at + FROM projection_memories`; + + yield* sql`DROP TABLE projection_memories`; + yield* sql`ALTER TABLE projection_memories_new RENAME TO projection_memories`; + + // Recreate indexes + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project + ON projection_memories(project_id) WHERE project_id IS NOT NULL + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_scope + ON projection_memories(scope) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_archived + ON projection_memories(archived_at) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project_active + ON projection_memories(project_id, archived_at, updated_at DESC) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_daily_date + ON projection_memories(project_id, date DESC) + WHERE scope = 'daily' + `; + + // Recreate FTS5 virtual table and triggers + yield* sql`DROP TABLE IF EXISTS projection_memories_fts`; + yield* sql` + CREATE VIRTUAL TABLE IF NOT EXISTS projection_memories_fts USING fts5( + memory_id UNINDEXED, + title, + content, + category, + content=projection_memories, + content_rowid=rowid + ) + `; + + // Re-populate FTS + yield* sql`INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + SELECT rowid, memory_id, title, content, category FROM projection_memories`; + + // Recreate triggers + yield* sql`DROP TRIGGER IF EXISTS memories_ai`; + yield* sql`DROP TRIGGER IF EXISTS memories_ad`; + yield* sql`DROP TRIGGER IF EXISTS memories_au`; + + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON projection_memories BEGIN + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + END + `; + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; +}); diff --git a/apps/server/src/persistence/Migrations/021_MemoryScopeThread.ts b/apps/server/src/persistence/Migrations/021_MemoryScopeThread.ts new file mode 100644 index 0000000000..1fb2b53eca --- /dev/null +++ b/apps/server/src/persistence/Migrations/021_MemoryScopeThread.ts @@ -0,0 +1,116 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + // Recreate the table with updated scope CHECK constraint and new thread_id column + yield* sql`CREATE TABLE IF NOT EXISTS projection_memories_new ( + memory_id TEXT PRIMARY KEY, + project_id TEXT, + thread_id TEXT, + scope TEXT NOT NULL CHECK (scope IN ('project', 'thread', 'daily')), + category TEXT NOT NULL CHECK (category IN ('preference', 'pattern', 'decision', 'fact', 'convention')), + source TEXT NOT NULL CHECK (source IN ('auto', 'manual')), + content TEXT NOT NULL, + title TEXT NOT NULL, + date TEXT, + source_thread_id TEXT, + source_turn_id TEXT, + relevance_score REAL NOT NULL DEFAULT 1.0, + access_count INTEGER NOT NULL DEFAULT 0, + last_accessed_at TEXT, + created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + updated_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')), + archived_at TEXT, + FOREIGN KEY (project_id) REFERENCES projection_projects(project_id) + )`; + + // Migrate data — convert 'global' scope to 'project' + yield* sql`INSERT INTO projection_memories_new SELECT + memory_id, project_id, + NULL as thread_id, + CASE WHEN scope = 'global' THEN 'project' ELSE scope END as scope, + category, source, content, title, date, + source_thread_id, source_turn_id, relevance_score, access_count, + last_accessed_at, created_at, updated_at, archived_at + FROM projection_memories`; + + yield* sql`DROP TABLE projection_memories`; + yield* sql`ALTER TABLE projection_memories_new RENAME TO projection_memories`; + + // Recreate indexes + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project + ON projection_memories(project_id) WHERE project_id IS NOT NULL + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_scope + ON projection_memories(scope) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_archived + ON projection_memories(archived_at) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_project_active + ON projection_memories(project_id, archived_at, updated_at DESC) + `; + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_daily_date + ON projection_memories(project_id, date DESC) + WHERE scope = 'daily' + `; + // Thread-specific indexes + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memories_thread_id + ON projection_memories(thread_id) WHERE thread_id IS NOT NULL + `; + yield* sql` + CREATE UNIQUE INDEX IF NOT EXISTS idx_memories_thread_unique + ON projection_memories(thread_id) WHERE scope = 'thread' + `; + + // Recreate FTS5 virtual table and triggers + yield* sql`DROP TABLE IF EXISTS projection_memories_fts`; + yield* sql` + CREATE VIRTUAL TABLE IF NOT EXISTS projection_memories_fts USING fts5( + memory_id UNINDEXED, + title, + content, + category, + content=projection_memories, + content_rowid=rowid + ) + `; + + // Re-populate FTS + yield* sql`INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + SELECT rowid, memory_id, title, content, category FROM projection_memories`; + + // Recreate triggers + yield* sql`DROP TRIGGER IF EXISTS memories_ai`; + yield* sql`DROP TRIGGER IF EXISTS memories_ad`; + yield* sql`DROP TRIGGER IF EXISTS memories_au`; + + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON projection_memories BEGIN + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + END + `; + yield* sql` + CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON projection_memories BEGIN + INSERT INTO projection_memories_fts(projection_memories_fts, rowid, memory_id, title, content, category) + VALUES ('delete', old.rowid, old.memory_id, old.title, old.content, old.category); + INSERT INTO projection_memories_fts(rowid, memory_id, title, content, category) + VALUES (new.rowid, new.memory_id, new.title, new.content, new.category); + END + `; +}); diff --git a/apps/server/src/persistence/Services/MemoryRepository.ts b/apps/server/src/persistence/Services/MemoryRepository.ts new file mode 100644 index 0000000000..a77f89693b --- /dev/null +++ b/apps/server/src/persistence/Services/MemoryRepository.ts @@ -0,0 +1,137 @@ +/** + * MemoryRepository - Repository interface for memory entries. + * + * Owns persistence operations for user/auto-extracted memory entries. + * Supports full-text search via FTS5 and scoped retrieval. + * + * @module MemoryRepository + */ +import type { + Memory, + MemoryArchiveInput, + MemoryCreateInput, + MemoryDeleteInput, + MemoryGetForThreadInput, + MemoryListInput, + MemoryListResult, + MemorySearchInput, + MemoryUpdateInput, +} from "@t3tools/contracts"; +import { ServiceMap } from "effect"; +import type { Effect } from "effect"; + +import type { PersistenceDecodeError, PersistenceSqlError } from "../Errors.ts"; + +export type MemoryRepositoryError = PersistenceSqlError | PersistenceDecodeError; + +/** + * MemoryRepositoryShape - Service API for memory persistence. + */ +export interface MemoryRepositoryShape { + /** + * Insert a new memory entry. + * + * Generates `memoryId`, `createdAt`, and `updatedAt` server-side. + * Sets `source: "manual"` and `relevanceScore: 1.0` by default. + * Returns the full persisted Memory. + */ + readonly create: ( + input: typeof MemoryCreateInput.Type, + ) => Effect.Effect; + + /** + * Update an existing memory's content, title, category, or relevance. + * + * Automatically bumps `updatedAt`. + */ + readonly update: ( + input: typeof MemoryUpdateInput.Type, + ) => Effect.Effect; + + /** + * Soft-archive a memory by setting `archivedAt`. + */ + readonly archive: ( + input: typeof MemoryArchiveInput.Type, + ) => Effect.Effect; + + /** + * Hard-delete a single memory by id. + */ + readonly delete: ( + input: typeof MemoryDeleteInput.Type, + ) => Effect.Effect; + + /** + * List all memories for a project (optionally including thread-scope memories). + * + * Returned ordered by updated_at descending. + */ + readonly listByProject: ( + input: typeof MemoryListInput.Type, + ) => Effect.Effect; + + /** + * Full-text search across memory title and content via FTS5. + * + * Returns results ranked by BM25 relevance. + */ + readonly search: ( + input: typeof MemorySearchInput.Type, + ) => Effect.Effect, MemoryRepositoryError>; + + /** + * Get memories relevant to a thread's project context. + * + * Combines project-scoped + thread-scoped memories, optionally filtered by query. + */ + readonly getRelevantForThread: ( + input: typeof MemoryGetForThreadInput.Type, + ) => Effect.Effect, MemoryRepositoryError>; + + /** + * Record an access to a memory (increment counter + update timestamp). + */ + readonly recordAccess: (memoryId: string) => Effect.Effect; + + /** + * List all daily-scope memories for a given date. + */ + readonly listDailyByDate: (input: { + date: string; + }) => Effect.Effect, MemoryRepositoryError>; + + /** + * Delete all daily-scope memories for a given date. + * Used to replace daily summaries idempotently on re-extraction. + */ + readonly deleteDailyByDate: (input: { + date: string; + }) => Effect.Effect; + + /** + * Find the thread-scope summary memory for a given thread, if it exists. + */ + readonly findThreadSummary: ( + threadId: string, + ) => Effect.Effect; + + /** + * Create or replace the thread summary memory for a given thread. + * + * Uses DELETE + INSERT to upsert based on the unique thread_id index (scope='thread'). + */ + readonly upsertThreadSummary: (input: { + threadId: string; + projectId: string; + title: string; + content: string; + }) => Effect.Effect; +} + +/** + * MemoryRepository - Service tag for memory persistence. + */ +export class MemoryRepository extends ServiceMap.Service()( + "t3/persistence/Services/MemoryRepository", +) {} diff --git a/apps/server/src/persistence/Services/ProjectionThreadActivities.ts b/apps/server/src/persistence/Services/ProjectionThreadActivities.ts index 586ae3eb4a..4d9f4e7050 100644 --- a/apps/server/src/persistence/Services/ProjectionThreadActivities.ts +++ b/apps/server/src/persistence/Services/ProjectionThreadActivities.ts @@ -28,6 +28,8 @@ export const ProjectionThreadActivity = Schema.Struct({ summary: Schema.String, payload: Schema.Unknown, sequence: Schema.optional(NonNegativeInt), + parentToolUseId: Schema.NullOr(Schema.String), + itemId: Schema.NullOr(Schema.String), createdAt: IsoDateTime, }); export type ProjectionThreadActivity = typeof ProjectionThreadActivity.Type; diff --git a/apps/server/src/persistence/Services/ProjectionThreadSessions.ts b/apps/server/src/persistence/Services/ProjectionThreadSessions.ts index 537ee10bee..de1279bbb4 100644 --- a/apps/server/src/persistence/Services/ProjectionThreadSessions.ts +++ b/apps/server/src/persistence/Services/ProjectionThreadSessions.ts @@ -22,6 +22,7 @@ export const ProjectionThreadSession = Schema.Struct({ threadId: ThreadId, status: OrchestrationSessionStatus, providerName: Schema.NullOr(Schema.String), + providerSessionId: Schema.NullOr(Schema.String).pipe(Schema.withDecodingDefault(() => null)), runtimeMode: RuntimeMode, activeTurnId: Schema.NullOr(TurnId), lastError: Schema.NullOr(Schema.String), diff --git a/apps/server/src/persistence/Services/ProjectionThreads.ts b/apps/server/src/persistence/Services/ProjectionThreads.ts index 7a30870f2d..a2a712286f 100644 --- a/apps/server/src/persistence/Services/ProjectionThreads.ts +++ b/apps/server/src/persistence/Services/ProjectionThreads.ts @@ -28,6 +28,7 @@ export const ProjectionThread = Schema.Struct({ interactionMode: ProviderInteractionMode, branch: Schema.NullOr(Schema.String), worktreePath: Schema.NullOr(Schema.String), + jiraTicketJson: Schema.NullOr(Schema.String).pipe(Schema.withDecodingDefault(() => null)), latestTurnId: Schema.NullOr(TurnId), createdAt: IsoDateTime, updatedAt: IsoDateTime, diff --git a/apps/server/src/persistence/Services/ReviewCommentRepository.ts b/apps/server/src/persistence/Services/ReviewCommentRepository.ts new file mode 100644 index 0000000000..9b0a3fdb55 --- /dev/null +++ b/apps/server/src/persistence/Services/ReviewCommentRepository.ts @@ -0,0 +1,76 @@ +/** + * ReviewCommentRepository - Repository interface for code review comments. + * + * Owns persistence operations for review comments attached to threads. + * Comments are anchored to file locations and carry a severity level. + * + * @module ReviewCommentRepository + */ +import { + type ReviewComment, + ReviewCommentAddInput, + ReviewCommentDeleteInput, + ReviewCommentListInput, + ReviewCommentUpdateInput, +} from "@t3tools/contracts"; +import { ServiceMap } from "effect"; +import type { Effect } from "effect"; + +import type { PersistenceSqlError, PersistenceDecodeError } from "../Errors.ts"; + +export type ReviewCommentRepositoryError = PersistenceSqlError | PersistenceDecodeError; + +/** + * ReviewCommentRepositoryShape - Service API for review comment persistence. + */ +export interface ReviewCommentRepositoryShape { + /** + * Insert a new review comment. + * + * Generates `id`, `createdAt`, and `updatedAt` server-side. + * Returns the full persisted ReviewComment. + */ + readonly add: ( + input: typeof ReviewCommentAddInput.Type, + ) => Effect.Effect; + + /** + * Update an existing review comment's body and/or severity. + * + * Automatically bumps `updatedAt`. + */ + readonly update: ( + input: typeof ReviewCommentUpdateInput.Type, + ) => Effect.Effect; + + /** + * Delete a single review comment by id. + */ + readonly delete: ( + input: typeof ReviewCommentDeleteInput.Type, + ) => Effect.Effect; + + /** + * List all review comments for a thread. + * + * Returned ordered by file path, then start_line ascending. + */ + readonly listByThreadId: ( + input: typeof ReviewCommentListInput.Type, + ) => Effect.Effect, ReviewCommentRepositoryError>; + + /** + * Bulk delete all review comments for a thread. + */ + readonly deleteByThreadId: ( + input: typeof ReviewCommentListInput.Type, + ) => Effect.Effect; +} + +/** + * ReviewCommentRepository - Service tag for review comment persistence. + */ +export class ReviewCommentRepository extends ServiceMap.Service< + ReviewCommentRepository, + ReviewCommentRepositoryShape +>()("t3/persistence/Services/ReviewCommentRepository") {} diff --git a/apps/server/src/persistence/Services/ReviewRequestRepository.ts b/apps/server/src/persistence/Services/ReviewRequestRepository.ts new file mode 100644 index 0000000000..8fb7643ae5 --- /dev/null +++ b/apps/server/src/persistence/Services/ReviewRequestRepository.ts @@ -0,0 +1,95 @@ +/** + * ReviewRequestRepository - Repository interface for PR review requests. + * + * Owns persistence operations for incoming review request notifications. + * Tracks pending, in-review, and dismissed states with optional thread linking. + * + * @module ReviewRequestRepository + */ +import type { ReviewRequest, ReviewRequestStatus } from "@t3tools/contracts"; +import { ServiceMap } from "effect"; +import type { Effect } from "effect"; + +import type { PersistenceSqlError, PersistenceDecodeError } from "../Errors.ts"; + +export type ReviewRequestRepositoryError = PersistenceSqlError | PersistenceDecodeError; + +/** + * UpsertInput - Data needed to insert or update a review request by pr_url. + */ +export interface ReviewRequestUpsertInput { + readonly prUrl: string; + readonly prNumber: number; + readonly prTitle: string; + readonly repoNameWithOwner: string; + readonly authorLogin: string; + readonly isBot: boolean; + readonly prBody?: string; + readonly prLabels?: readonly string[]; +} + +/** + * UpdateStatusInput - Data needed to change a review request's status. + */ +export interface ReviewRequestUpdateStatusInput { + readonly id: string; + readonly status: ReviewRequestStatus; + readonly threadId?: string; +} + +/** + * ReviewRequestRepositoryShape - Service API for review request persistence. + */ +export interface ReviewRequestRepositoryShape { + /** + * Insert a new review request or update an existing one by pr_url. + * + * Generates `id`, `createdAt`, and `updatedAt` server-side for new rows. + * For existing rows, updates pr_title, pr_number, author_login, and bumps `updatedAt`. + */ + readonly upsert: ( + input: ReviewRequestUpsertInput, + ) => Effect.Effect; + + /** + * Update the status (and optionally thread_id) of a review request. + * + * Automatically bumps `updatedAt`. + */ + readonly updateStatus: ( + input: ReviewRequestUpdateStatusInput, + ) => Effect.Effect; + + /** + * List all non-dismissed review requests plus recently dismissed ones. + * + * Returns ordered by updatedAt descending. + */ + readonly listActive: () => Effect.Effect< + ReadonlyArray, + ReviewRequestRepositoryError + >; + + /** + * Mark as dismissed any review requests whose pr_url is NOT in the provided + * list of active URLs and whose status is currently 'pending'. + * + * Used to auto-dismiss stale requests when PRs are closed or merged. + */ + readonly dismissStale: ( + activeUrls: ReadonlyArray, + ) => Effect.Effect; + + /** + * Clear thread_id on review requests whose linked thread has been deleted. + */ + readonly unlinkDeletedThreads: () => Effect.Effect; +} + +/** + * ReviewRequestRepository - Service tag for review request persistence. + */ +export class ReviewRequestRepository extends ServiceMap.Service< + ReviewRequestRepository, + ReviewRequestRepositoryShape +>()("t3/persistence/Services/ReviewRequestRepository") {} diff --git a/apps/server/src/provider/Layers/ClaudeCodeAdapter.test.ts b/apps/server/src/provider/Layers/ClaudeCodeAdapter.test.ts new file mode 100644 index 0000000000..c22c903804 --- /dev/null +++ b/apps/server/src/provider/Layers/ClaudeCodeAdapter.test.ts @@ -0,0 +1,976 @@ +import type { + Options as ClaudeQueryOptions, + PermissionMode, + PermissionResult, + SDKMessage, + SDKUserMessage, +} from "@anthropic-ai/claude-agent-sdk"; +import { ApprovalRequestId, ThreadId } from "@t3tools/contracts"; +import { assert, describe, it } from "@effect/vitest"; +import { Effect, Fiber, Random, Stream } from "effect"; + +import { ProviderAdapterValidationError } from "../Errors.ts"; +import { ClaudeCodeAdapter } from "../Services/ClaudeCodeAdapter.ts"; +import { + makeClaudeCodeAdapterLive, + type ClaudeCodeAdapterLiveOptions, +} from "./ClaudeCodeAdapter.ts"; + +class FakeClaudeQuery implements AsyncIterable { + private readonly queue: Array = []; + private readonly resolvers: Array<(value: IteratorResult) => void> = []; + private done = false; + + public readonly interruptCalls: Array = []; + public readonly setModelCalls: Array = []; + public readonly setPermissionModeCalls: Array = []; + public readonly setMaxThinkingTokensCalls: Array = []; + public closeCalls = 0; + + emit(message: SDKMessage): void { + if (this.done) { + return; + } + const resolver = this.resolvers.shift(); + if (resolver) { + resolver({ done: false, value: message }); + return; + } + this.queue.push(message); + } + + finish(): void { + if (this.done) { + return; + } + this.done = true; + for (const resolver of this.resolvers.splice(0)) { + resolver({ done: true, value: undefined }); + } + } + + readonly interrupt = async (): Promise => { + this.interruptCalls.push(undefined); + }; + + readonly setModel = async (model?: string): Promise => { + this.setModelCalls.push(model); + }; + + readonly setPermissionMode = async (mode: PermissionMode): Promise => { + this.setPermissionModeCalls.push(mode); + }; + + readonly setMaxThinkingTokens = async (maxThinkingTokens: number | null): Promise => { + this.setMaxThinkingTokensCalls.push(maxThinkingTokens); + }; + + readonly close = (): void => { + this.closeCalls += 1; + this.finish(); + }; + + [Symbol.asyncIterator](): AsyncIterator { + return { + next: () => { + if (this.queue.length > 0) { + const value = this.queue.shift(); + if (value) { + return Promise.resolve({ + done: false, + value, + }); + } + } + if (this.done) { + return Promise.resolve({ + done: true, + value: undefined, + }); + } + return new Promise((resolve) => { + this.resolvers.push(resolve); + }); + }, + }; + } +} + +interface Harness { + readonly layer: ReturnType; + readonly query: FakeClaudeQuery; + readonly getLastCreateQueryInput: () => + | { + readonly prompt: AsyncIterable; + readonly options: ClaudeQueryOptions; + } + | undefined; +} + +function makeHarness(config?: { + readonly nativeEventLogPath?: string; + readonly nativeEventLogger?: ClaudeCodeAdapterLiveOptions["nativeEventLogger"]; +}): Harness { + const query = new FakeClaudeQuery(); + let createInput: + | { + readonly prompt: AsyncIterable; + readonly options: ClaudeQueryOptions; + } + | undefined; + + const adapterOptions: ClaudeCodeAdapterLiveOptions = { + createQuery: (input) => { + createInput = input; + return query; + }, + reviewCommentRepository: { + add: () => Effect.fail(new Error("stub") as never), + update: () => Effect.fail(new Error("stub") as never), + delete: () => Effect.fail(new Error("stub") as never), + listByThreadId: () => Effect.succeed([]), + deleteByThreadId: () => Effect.fail(new Error("stub") as never), + }, + ...(config?.nativeEventLogger + ? { + nativeEventLogger: config.nativeEventLogger, + } + : {}), + ...(config?.nativeEventLogPath + ? { + nativeEventLogPath: config.nativeEventLogPath, + } + : {}), + }; + + return { + layer: makeClaudeCodeAdapterLive(adapterOptions), + query, + getLastCreateQueryInput: () => createInput, + }; +} + +function makeDeterministicRandomService(seed = 0x1234_5678): { + nextIntUnsafe: () => number; + nextDoubleUnsafe: () => number; +} { + let state = seed >>> 0; + const nextIntUnsafe = (): number => { + state = (Math.imul(1_664_525, state) + 1_013_904_223) >>> 0; + return state; + }; + + return { + nextIntUnsafe, + nextDoubleUnsafe: () => nextIntUnsafe() / 0x1_0000_0000, + }; +} + +const THREAD_ID = ThreadId.makeUnsafe("thread-claude-1"); +const RESUME_THREAD_ID = ThreadId.makeUnsafe("thread-claude-resume"); + +describe("ClaudeCodeAdapterLive", () => { + it.effect("returns validation error for non-claudeCode provider on startSession", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + const result = yield* adapter + .startSession({ threadId: THREAD_ID, provider: "codex", runtimeMode: "full-access" }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.deepEqual( + result.failure, + new ProviderAdapterValidationError({ + provider: "claudeCode", + operation: "startSession", + issue: "Expected provider 'claudeCode' but received 'codex'.", + }), + ); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("derives bypass permission mode from full-access runtime policy", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + const createInput = harness.getLastCreateQueryInput(); + assert.equal(createInput?.options.permissionMode, "bypassPermissions"); + assert.equal(createInput?.options.allowDangerouslySkipPermissions, true); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("keeps explicit claude permission mode over runtime-derived defaults", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + providerOptions: { + claudeCode: { + permissionMode: "plan", + }, + }, + }); + + const createInput = harness.getLastCreateQueryInput(); + assert.equal(createInput?.options.permissionMode, "plan"); + assert.equal(createInput?.options.allowDangerouslySkipPermissions, undefined); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("maps Claude stream/runtime messages to canonical provider runtime events", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const runtimeEventsFiber = yield* Stream.take(adapter.streamEvents, 11).pipe( + Stream.runCollect, + Effect.forkChild, + ); + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + model: "claude-sonnet-4-5", + runtimeMode: "full-access", + }); + + const turn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + attachments: [], + }); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-session-1", + uuid: "stream-1", + parent_tool_use_id: null, + event: { + type: "content_block_delta", + index: 0, + delta: { + type: "text_delta", + text: "Hi", + }, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-session-1", + uuid: "stream-2", + parent_tool_use_id: null, + event: { + type: "content_block_start", + index: 1, + content_block: { + type: "tool_use", + id: "tool-1", + name: "Bash", + input: { + command: "ls", + }, + }, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-session-1", + uuid: "stream-3", + parent_tool_use_id: null, + event: { + type: "content_block_stop", + index: 1, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "assistant", + session_id: "sdk-session-1", + uuid: "assistant-1", + parent_tool_use_id: null, + message: { + id: "assistant-message-1", + content: [{ type: "text", text: "Hi" }], + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-1", + uuid: "result-1", + } as unknown as SDKMessage); + + const runtimeEvents = Array.from(yield* Fiber.join(runtimeEventsFiber)); + assert.deepEqual( + runtimeEvents.map((event) => event.type), + [ + "session.started", + "session.configured", + "session.state.changed", + "turn.started", + "thread.started", + "content.delta", + "item.started", + "item.completed", + "item.updated", + "item.completed", + "turn.completed", + ], + ); + + const turnStarted = runtimeEvents[3]; + assert.equal(turnStarted?.type, "turn.started"); + if (turnStarted?.type === "turn.started") { + assert.equal(String(turnStarted.turnId), String(turn.turnId)); + } + + const deltaEvent = runtimeEvents.find((event) => event.type === "content.delta"); + assert.equal(deltaEvent?.type, "content.delta"); + if (deltaEvent?.type === "content.delta") { + assert.equal(deltaEvent.payload.delta, "Hi"); + assert.equal(String(deltaEvent.turnId), String(turn.turnId)); + } + + const toolStarted = runtimeEvents.find((event) => event.type === "item.started"); + assert.equal(toolStarted?.type, "item.started"); + if (toolStarted?.type === "item.started") { + assert.equal(toolStarted.payload.itemType, "command_execution"); + } + + const turnCompleted = runtimeEvents[runtimeEvents.length - 1]; + assert.equal(turnCompleted?.type, "turn.completed"); + if (turnCompleted?.type === "turn.completed") { + assert.equal(String(turnCompleted.turnId), String(turn.turnId)); + assert.equal(turnCompleted.payload.state, "completed"); + } + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect( + "emits completion only after turn result when assistant frames arrive before deltas", + () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const runtimeEventsFiber = yield* Stream.take(adapter.streamEvents, 9).pipe( + Stream.runCollect, + Effect.forkChild, + ); + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + const turn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + attachments: [], + }); + + harness.query.emit({ + type: "assistant", + session_id: "sdk-session-early-assistant", + uuid: "assistant-early", + parent_tool_use_id: null, + message: { + id: "assistant-message-early", + content: [ + { type: "tool_use", id: "tool-early", name: "Read", input: { path: "a.ts" } }, + ], + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-session-early-assistant", + uuid: "stream-early", + parent_tool_use_id: null, + event: { + type: "content_block_delta", + index: 0, + delta: { + type: "text_delta", + text: "Late text", + }, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-early-assistant", + uuid: "result-early", + } as unknown as SDKMessage); + + const runtimeEvents = Array.from(yield* Fiber.join(runtimeEventsFiber)); + assert.deepEqual( + runtimeEvents.map((event) => event.type), + [ + "session.started", + "session.configured", + "session.state.changed", + "turn.started", + "thread.started", + "item.updated", + "content.delta", + "item.completed", + "turn.completed", + ], + ); + + const deltaIndex = runtimeEvents.findIndex((event) => event.type === "content.delta"); + const completedIndex = runtimeEvents.findIndex((event) => event.type === "item.completed"); + assert.equal(deltaIndex >= 0 && completedIndex >= 0 && deltaIndex < completedIndex, true); + + const deltaEvent = runtimeEvents[deltaIndex]; + assert.equal(deltaEvent?.type, "content.delta"); + if (deltaEvent?.type === "content.delta") { + assert.equal(deltaEvent.payload.delta, "Late text"); + assert.equal(String(deltaEvent.turnId), String(turn.turnId)); + } + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }, + ); + + it.effect("falls back to assistant payload text when stream deltas are absent", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const runtimeEventsFiber = yield* Stream.take(adapter.streamEvents, 9).pipe( + Stream.runCollect, + Effect.forkChild, + ); + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + const turn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + attachments: [], + }); + + harness.query.emit({ + type: "assistant", + session_id: "sdk-session-fallback-text", + uuid: "assistant-fallback", + parent_tool_use_id: null, + message: { + id: "assistant-message-fallback", + content: [{ type: "text", text: "Fallback hello" }], + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-fallback-text", + uuid: "result-fallback", + } as unknown as SDKMessage); + + const runtimeEvents = Array.from(yield* Fiber.join(runtimeEventsFiber)); + assert.deepEqual( + runtimeEvents.map((event) => event.type), + [ + "session.started", + "session.configured", + "session.state.changed", + "turn.started", + "thread.started", + "item.updated", + "content.delta", + "item.completed", + "turn.completed", + ], + ); + + const deltaEvent = runtimeEvents.find((event) => event.type === "content.delta"); + assert.equal(deltaEvent?.type, "content.delta"); + if (deltaEvent?.type === "content.delta") { + assert.equal(deltaEvent.payload.delta, "Fallback hello"); + assert.equal(String(deltaEvent.turnId), String(turn.turnId)); + } + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("does not fabricate provider thread ids before first SDK session_id", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const runtimeEventsFiber = yield* Stream.take(adapter.streamEvents, 5).pipe( + Stream.runCollect, + Effect.forkChild, + ); + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + assert.equal(session.threadId, undefined); + + const turn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + attachments: [], + }); + assert.equal(turn.threadId, undefined); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-thread-real", + uuid: "stream-thread-real", + parent_tool_use_id: null, + event: { + type: "message_start", + message: { + id: "msg-thread-real", + }, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-thread-real", + uuid: "result-thread-real", + } as unknown as SDKMessage); + + const runtimeEvents = Array.from(yield* Fiber.join(runtimeEventsFiber)); + assert.deepEqual( + runtimeEvents.map((event) => event.type), + [ + "session.started", + "session.configured", + "session.state.changed", + "turn.started", + "thread.started", + ], + ); + + const sessionStarted = runtimeEvents[0]; + assert.equal(sessionStarted?.type, "session.started"); + if (sessionStarted?.type === "session.started") { + assert.equal("threadId" in sessionStarted, false); + } + + const threadStarted = runtimeEvents[4]; + assert.equal(threadStarted?.type, "thread.started"); + if (threadStarted?.type === "thread.started") { + assert.equal(threadStarted.threadId, "sdk-thread-real"); + } + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("bridges approval request/response lifecycle through canUseTool", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "approval-required", + }); + + yield* Stream.take(adapter.streamEvents, 3).pipe(Stream.runDrain); + + const createInput = harness.getLastCreateQueryInput(); + const canUseTool = createInput?.options.canUseTool; + assert.equal(typeof canUseTool, "function"); + if (!canUseTool) { + return; + } + + const permissionPromise = canUseTool( + "Bash", + { command: "pwd" }, + { + signal: new AbortController().signal, + suggestions: [ + { + type: "setMode", + mode: "default", + destination: "session", + }, + ], + toolUseID: "tool-use-1", + }, + ); + + const requested = yield* Stream.runHead(adapter.streamEvents); + assert.equal(requested._tag, "Some"); + if (requested._tag !== "Some") { + return; + } + assert.equal(requested.value.type, "request.opened"); + if (requested.value.type !== "request.opened") { + return; + } + const runtimeRequestId = requested.value.requestId; + assert.equal(typeof runtimeRequestId, "string"); + if (runtimeRequestId === undefined) { + return; + } + + yield* adapter.respondToRequest( + session.threadId, + ApprovalRequestId.makeUnsafe(runtimeRequestId), + "accept", + ); + + const resolved = yield* Stream.runHead(adapter.streamEvents); + assert.equal(resolved._tag, "Some"); + if (resolved._tag !== "Some") { + return; + } + assert.equal(resolved.value.type, "request.resolved"); + if (resolved.value.type !== "request.resolved") { + return; + } + assert.equal(resolved.value.requestId, requested.value.requestId); + assert.equal(resolved.value.payload.decision, "accept"); + + const permissionResult = yield* Effect.promise(() => permissionPromise); + assert.equal((permissionResult as PermissionResult).behavior, "allow"); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("passes parsed resume cursor values to Claude query options", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: RESUME_THREAD_ID, + provider: "claudeCode", + resumeCursor: { + threadId: "resume-thread-1", + resume: "550e8400-e29b-41d4-a716-446655440000", + resumeSessionAt: "assistant-99", + turnCount: 3, + }, + runtimeMode: "full-access", + }); + + assert.equal(session.threadId, "resume-thread-1"); + assert.deepEqual(session.resumeCursor, { + threadId: "resume-thread-1", + resume: "550e8400-e29b-41d4-a716-446655440000", + resumeSessionAt: "assistant-99", + turnCount: 3, + }); + + const createInput = harness.getLastCreateQueryInput(); + assert.equal(createInput?.options.resume, "550e8400-e29b-41d4-a716-446655440000"); + assert.equal(createInput?.options.resumeSessionAt, "assistant-99"); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("does not synthesize resume session id from generated thread ids", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + assert.equal("resume" in (session.resumeCursor as Record), false); + + const createInput = harness.getLastCreateQueryInput(); + assert.equal(createInput?.options.resume, undefined); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("ends the prompt async iterable cleanly when stopping a session", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + const prompt = harness.getLastCreateQueryInput()?.prompt; + assert.equal(prompt !== undefined, true); + if (!prompt) { + return; + } + + const iterator = prompt[Symbol.asyncIterator](); + const nextPrompt = iterator.next(); + + yield* adapter.stopSession(session.threadId); + + const nextResult = yield* Effect.promise(() => nextPrompt); + assert.equal(nextResult.done, true); + assert.equal(nextResult.value, undefined); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect( + "supports rollbackThread by trimming in-memory turns and preserving earlier turns", + () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + + const firstTurn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "first", + attachments: [], + }); + + const firstCompletedFiber = yield* Stream.filter( + adapter.streamEvents, + (event) => event.type === "turn.completed", + ).pipe(Stream.runHead, Effect.forkChild); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-rollback", + uuid: "result-first", + } as unknown as SDKMessage); + + const firstCompleted = yield* Fiber.join(firstCompletedFiber); + assert.equal(firstCompleted._tag, "Some"); + if (firstCompleted._tag === "Some" && firstCompleted.value.type === "turn.completed") { + assert.equal(String(firstCompleted.value.turnId), String(firstTurn.turnId)); + } + + const secondTurn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "second", + attachments: [], + }); + + const secondCompletedFiber = yield* Stream.filter( + adapter.streamEvents, + (event) => event.type === "turn.completed", + ).pipe(Stream.runHead, Effect.forkChild); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-rollback", + uuid: "result-second", + } as unknown as SDKMessage); + + const secondCompleted = yield* Fiber.join(secondCompletedFiber); + assert.equal(secondCompleted._tag, "Some"); + if (secondCompleted._tag === "Some" && secondCompleted.value.type === "turn.completed") { + assert.equal(String(secondCompleted.value.turnId), String(secondTurn.turnId)); + } + + const threadBeforeRollback = yield* adapter.readThread(session.threadId); + assert.equal(threadBeforeRollback.turns.length, 2); + + const rolledBack = yield* adapter.rollbackThread(session.threadId, 1); + assert.equal(rolledBack.turns.length, 1); + assert.equal(rolledBack.turns[0]?.id, firstTurn.turnId); + + const threadAfterRollback = yield* adapter.readThread(session.threadId); + assert.equal(threadAfterRollback.turns.length, 1); + assert.equal(threadAfterRollback.turns[0]?.id, firstTurn.turnId); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }, + ); + + it.effect("updates model on sendTurn when model override is provided", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + model: "claude-opus-4-6", + attachments: [], + }); + + assert.deepEqual(harness.query.setModelCalls, ["claude-opus-4-6"]); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + + it.effect("writes provider-native observability records when enabled", () => { + const nativeEvents: Array<{ + event?: { + provider?: string; + method?: string; + threadId?: string; + turnId?: string; + }; + }> = []; + const harness = makeHarness({ + nativeEventLogger: { + filePath: "memory://claude-native-events", + write: (event) => { + nativeEvents.push(event as (typeof nativeEvents)[number]); + return Effect.void; + }, + close: () => Effect.void, + }, + }); + return Effect.gen(function* () { + const adapter = yield* ClaudeCodeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: "claudeCode", + runtimeMode: "full-access", + }); + const turn = yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + attachments: [], + }); + + const turnCompletedFiber = yield* Stream.filter( + adapter.streamEvents, + (event) => event.type === "turn.completed", + ).pipe(Stream.runHead, Effect.forkChild); + + harness.query.emit({ + type: "stream_event", + session_id: "sdk-session-native-log", + uuid: "stream-native-log", + parent_tool_use_id: null, + event: { + type: "content_block_delta", + index: 0, + delta: { + type: "text_delta", + text: "hi", + }, + }, + } as unknown as SDKMessage); + + harness.query.emit({ + type: "result", + subtype: "success", + is_error: false, + errors: [], + session_id: "sdk-session-native-log", + uuid: "result-native-log", + } as unknown as SDKMessage); + + const turnCompleted = yield* Fiber.join(turnCompletedFiber); + assert.equal(turnCompleted._tag, "Some"); + + assert.equal(nativeEvents.length > 0, true); + assert.equal( + nativeEvents.some((record) => record.event?.provider === "claudeCode"), + true, + ); + assert.equal( + nativeEvents.some((record) => String(record.event?.threadId) === String(session.threadId)), + true, + ); + assert.equal( + nativeEvents.some((record) => String(record.event?.turnId) === String(turn.turnId)), + true, + ); + assert.equal( + nativeEvents.some( + (record) => record.event?.method === "claude/stream_event/content_block_delta/text_delta", + ), + true, + ); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); +}); diff --git a/apps/server/src/provider/Layers/ClaudeCodeAdapter.ts b/apps/server/src/provider/Layers/ClaudeCodeAdapter.ts new file mode 100644 index 0000000000..d3be2ecc99 --- /dev/null +++ b/apps/server/src/provider/Layers/ClaudeCodeAdapter.ts @@ -0,0 +1,2215 @@ +/** + * ClaudeCodeAdapterLive - Scoped live implementation for the Claude Code provider adapter. + * + * Wraps `@anthropic-ai/claude-agent-sdk` query sessions behind the generic + * provider adapter contract and emits canonical runtime events. + * + * @module ClaudeCodeAdapterLive + */ +import { + type CanUseTool, + query, + type Options as ClaudeQueryOptions, + type PermissionMode, + type PermissionResult, + type PermissionUpdate, + type SDKMessage, + type SDKResultMessage, + type SDKUserMessage, +} from "@anthropic-ai/claude-agent-sdk"; +import { + ApprovalRequestId, + type CanonicalItemType, + type CanonicalRequestType, + EventId, + type ProviderApprovalDecision, + ProviderItemId, + type ProviderRuntimeEvent, + type ProviderRuntimeTurnStatus, + type ProviderSendTurnInput, + type ProviderSession, + type ProviderUserInputAnswers, + RuntimeItemId, + RuntimeRequestId, + RuntimeTaskId, + ThreadId, + TurnId, +} from "@t3tools/contracts"; +import { Cause, DateTime, Deferred, Effect, Layer, Queue, Random, Ref, Stream } from "effect"; + +import { + ProviderAdapterProcessError, + ProviderAdapterRequestError, + ProviderAdapterSessionClosedError, + ProviderAdapterSessionNotFoundError, + ProviderAdapterValidationError, + type ProviderAdapterError, +} from "../Errors.ts"; +import { resolveEnabledPlugins } from "@t3tools/shared/claude-plugins"; +import { getClaudeContextWindowMode } from "@t3tools/shared/model"; +import { type ReviewCommentRepositoryShape } from "../../persistence/Services/ReviewCommentRepository.ts"; +import { createReviewCommentMcpServer } from "../reviewCommentTools.ts"; +import { ClaudeCodeAdapter, type ClaudeCodeAdapterShape } from "../Services/ClaudeCodeAdapter.ts"; +import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; + +const PROVIDER = "claudeCode" as const; + +type PromptQueueItem = + | { + readonly type: "message"; + readonly message: SDKUserMessage; + } + | { + readonly type: "terminate"; + }; + +interface ClaudeResumeState { + readonly threadId?: ThreadId; + readonly resume?: string; + readonly resumeSessionAt?: string; + readonly turnCount?: number; +} + +interface ClaudeTurnState { + readonly turnId: TurnId; + readonly assistantItemId: string; + readonly startedAt: string; + readonly items: Array; + readonly messageCompleted: boolean; + readonly emittedTextDelta: boolean; + readonly fallbackAssistantText: string; +} + +interface ApprovalResponse { + readonly decision: ProviderApprovalDecision; + readonly feedback?: string; +} + +interface PendingApproval { + readonly requestType: CanonicalRequestType; + readonly detail?: string; + readonly suggestions?: ReadonlyArray; + readonly decision: Deferred.Deferred; +} + +interface ToolInFlight { + readonly itemId: string; + readonly itemType: CanonicalItemType; + readonly toolName: string; + readonly title: string; + detail?: string; + inputJsonChunks: string[]; +} + +interface PendingUserInput { + readonly questions: Array<{ + question: string; + header: string; + options: Array<{ label: string; description: string }>; + multiSelect?: boolean; + }>; + readonly answers: Deferred.Deferred; +} + +interface ClaudeSessionContext { + session: ProviderSession; + readonly promptQueue: Queue.Queue; + readonly query: ClaudeQueryRuntime; + readonly startedAt: string; + resumeSessionId: string | undefined; + readonly pendingApprovals: Map; + readonly pendingUserInputs: Map; + readonly turns: Array<{ + id: TurnId; + items: Array; + }>; + readonly inFlightTools: Map; + turnState: ClaudeTurnState | undefined; + lastAssistantUuid: string | undefined; + lastThreadStartedId: string | undefined; + stopped: boolean; + interactionMode: "default" | "plan" | undefined; + /** Maps task_id (from subagent tasks) to the Agent tool_use block ID */ + readonly taskIdToParentToolUseId: Map; + /** FIFO queue of Agent tool_use IDs awaiting their task_started event (handles parallel agents) */ + readonly pendingAgentToolUseIds: string[]; +} + +interface ClaudeQueryRuntime extends AsyncIterable { + readonly interrupt: () => Promise; + readonly setModel: (model?: string) => Promise; + readonly setPermissionMode: (mode: PermissionMode) => Promise; + readonly setMaxThinkingTokens: (maxThinkingTokens: number | null) => Promise; + readonly close: () => void; +} + +export interface ClaudeCodeAdapterLiveOptions { + readonly createQuery?: (input: { + readonly prompt: AsyncIterable; + readonly options: ClaudeQueryOptions; + }) => ClaudeQueryRuntime; + readonly nativeEventLogPath?: string; + readonly nativeEventLogger?: EventNdjsonLogger; + readonly reviewCommentRepository: ReviewCommentRepositoryShape; +} + +function isUuid(value: string): boolean { + return /^[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(value); +} + +function isSyntheticClaudeThreadId(value: string): boolean { + return value.startsWith("claude-thread-"); +} + +function toMessage(cause: unknown, fallback: string): string { + if (cause instanceof Error && cause.message.length > 0) { + return cause.message; + } + return fallback; +} + +function asRuntimeItemId(value: string): RuntimeItemId { + return RuntimeItemId.makeUnsafe(value); +} + +function asCanonicalTurnId(value: TurnId): TurnId { + return value; +} + +function asRuntimeRequestId(value: ApprovalRequestId): RuntimeRequestId { + return RuntimeRequestId.makeUnsafe(value); +} + +/** Tools allowed in plan mode — read-only exploration + plan lifecycle. */ +const PLAN_MODE_ALLOWED_TOOLS = new Set([ + "Read", + "Glob", + "Grep", + "Bash", // plan mode needs Bash for read-only commands (git log, ls, etc.) + "ExitPlanMode", + "EnterPlanMode", + "EnterWorktree", + "ExitWorktree", + "Agent", + "TaskCreate", + "TaskGet", + "TaskList", + "TaskUpdate", + "WebFetch", + "WebSearch", + "LSP", +]); + +function isPlanModeAllowedTool(toolName: string): boolean { + // Allow built-in plan-safe tools + if (PLAN_MODE_ALLOWED_TOOLS.has(toolName)) return true; + // Allow all MCP tools (they're user-provided and may be read-only) + if (toolName.startsWith("mcp__")) return true; + return false; +} + +function toPermissionMode(value: unknown): PermissionMode | undefined { + switch (value) { + case "default": + case "acceptEdits": + case "bypassPermissions": + case "plan": + case "dontAsk": + return value; + default: + return undefined; + } +} + +function readClaudeResumeState(resumeCursor: unknown): ClaudeResumeState | undefined { + if (!resumeCursor || typeof resumeCursor !== "object") { + return undefined; + } + const cursor = resumeCursor as { + threadId?: unknown; + resume?: unknown; + sessionId?: unknown; + resumeSessionAt?: unknown; + turnCount?: unknown; + }; + + const threadIdCandidate = typeof cursor.threadId === "string" ? cursor.threadId : undefined; + const threadId = + threadIdCandidate && !isSyntheticClaudeThreadId(threadIdCandidate) + ? ThreadId.makeUnsafe(threadIdCandidate) + : undefined; + const resumeCandidate = + typeof cursor.resume === "string" + ? cursor.resume + : typeof cursor.sessionId === "string" + ? cursor.sessionId + : undefined; + const resume = resumeCandidate && isUuid(resumeCandidate) ? resumeCandidate : undefined; + const resumeSessionAt = + typeof cursor.resumeSessionAt === "string" ? cursor.resumeSessionAt : undefined; + const turnCountValue = typeof cursor.turnCount === "number" ? cursor.turnCount : undefined; + + return { + ...(threadId ? { threadId } : {}), + ...(resume ? { resume } : {}), + ...(resumeSessionAt ? { resumeSessionAt } : {}), + ...(turnCountValue !== undefined && Number.isInteger(turnCountValue) && turnCountValue >= 0 + ? { turnCount: turnCountValue } + : {}), + }; +} + +function classifyToolItemType(toolName: string): CanonicalItemType { + const normalized = toolName.toLowerCase(); + if ( + normalized.includes("bash") || + normalized.includes("command") || + normalized.includes("shell") || + normalized.includes("terminal") + ) { + return "command_execution"; + } + if ( + normalized.includes("edit") || + normalized.includes("write") || + normalized.includes("file") || + normalized.includes("patch") || + normalized.includes("replace") || + normalized.includes("create") || + normalized.includes("delete") + ) { + return "file_change"; + } + if (normalized.includes("mcp")) { + return "mcp_tool_call"; + } + return "dynamic_tool_call"; +} + +function classifyRequestType(toolName: string): CanonicalRequestType { + const normalized = toolName.toLowerCase(); + if (normalized === "read" || normalized.includes("read file") || normalized.includes("view")) { + return "file_read_approval"; + } + return classifyToolItemType(toolName) === "command_execution" + ? "command_execution_approval" + : "file_change_approval"; +} + +function summarizeToolRequest(toolName: string, input: Record): string { + const commandValue = input.command ?? input.cmd; + const command = typeof commandValue === "string" ? commandValue : undefined; + if (command && command.trim().length > 0) { + return `${toolName}: ${command.trim().slice(0, 400)}`; + } + + const serialized = JSON.stringify(input); + if (serialized.length <= 400) { + return `${toolName}: ${serialized}`; + } + return `${toolName}: ${serialized.slice(0, 397)}...`; +} + +function titleForTool(itemType: CanonicalItemType, toolName?: string): string { + if (toolName) { + return toolName; + } + switch (itemType) { + case "command_execution": + return "Command run"; + case "file_change": + return "File change"; + case "mcp_tool_call": + return "MCP tool call"; + case "dynamic_tool_call": + return "Tool call"; + default: + return "Item"; + } +} + +function buildUserMessage(input: ProviderSendTurnInput): SDKUserMessage { + const fragments: string[] = []; + + if (input.input && input.input.trim().length > 0) { + fragments.push(input.input.trim()); + } + + for (const attachment of input.attachments ?? []) { + if (attachment.type === "image") { + fragments.push( + `Attached image: ${attachment.name} (${attachment.mimeType}, ${attachment.sizeBytes} bytes).`, + ); + } + } + + const text = fragments.join("\n\n"); + + return { + type: "user", + session_id: "", + parent_tool_use_id: null, + message: { + role: "user", + content: [{ type: "text", text }], + }, + } as SDKUserMessage; +} + +function turnStatusFromResult(result: SDKResultMessage): ProviderRuntimeTurnStatus { + if (result.subtype === "success") { + return "completed"; + } + + const errors = result.errors.join(" ").toLowerCase(); + if (errors.includes("interrupt")) { + return "interrupted"; + } + if (errors.includes("cancel")) { + return "cancelled"; + } + return "failed"; +} + +function streamKindFromDeltaType(deltaType: string): "assistant_text" | "reasoning_text" { + return deltaType.includes("thinking") ? "reasoning_text" : "assistant_text"; +} + +function providerThreadRef( + context: ClaudeSessionContext, +): { readonly providerThreadId: string } | {} { + return context.resumeSessionId ? { providerThreadId: context.resumeSessionId } : {}; +} + +function extractAssistantText(message: SDKMessage): string { + if (message.type !== "assistant") { + return ""; + } + + const content = (message.message as { content?: unknown } | undefined)?.content; + if (!Array.isArray(content)) { + return ""; + } + + const fragments: string[] = []; + for (const block of content) { + if (!block || typeof block !== "object") { + continue; + } + const candidate = block as { type?: unknown; text?: unknown }; + if ( + candidate.type === "text" && + typeof candidate.text === "string" && + candidate.text.length > 0 + ) { + fragments.push(candidate.text); + } + } + + return fragments.join(""); +} + +function toSessionError( + threadId: ThreadId, + cause: unknown, +): ProviderAdapterSessionNotFoundError | ProviderAdapterSessionClosedError | undefined { + const normalized = toMessage(cause, "").toLowerCase(); + if (normalized.includes("unknown session") || normalized.includes("not found")) { + return new ProviderAdapterSessionNotFoundError({ + provider: PROVIDER, + threadId, + cause, + }); + } + if (normalized.includes("closed")) { + return new ProviderAdapterSessionClosedError({ + provider: PROVIDER, + threadId, + cause, + }); + } + return undefined; +} + +function toRequestError(threadId: ThreadId, method: string, cause: unknown): ProviderAdapterError { + const sessionError = toSessionError(threadId, cause); + if (sessionError) { + return sessionError; + } + return new ProviderAdapterRequestError({ + provider: PROVIDER, + method, + detail: toMessage(cause, `${method} failed`), + cause, + }); +} + +function sdkMessageType(value: unknown): string | undefined { + if (!value || typeof value !== "object") { + return undefined; + } + const record = value as { type?: unknown }; + return typeof record.type === "string" ? record.type : undefined; +} + +function sdkMessageSubtype(value: unknown): string | undefined { + if (!value || typeof value !== "object") { + return undefined; + } + const record = value as { subtype?: unknown }; + return typeof record.subtype === "string" ? record.subtype : undefined; +} + +function sdkNativeMethod(message: SDKMessage): string { + const subtype = sdkMessageSubtype(message); + if (subtype) { + return `claude/${message.type}/${subtype}`; + } + + if (message.type === "stream_event") { + const streamType = sdkMessageType(message.event); + if (streamType) { + const deltaType = + streamType === "content_block_delta" + ? sdkMessageType((message.event as { delta?: unknown }).delta) + : undefined; + if (deltaType) { + return `claude/${message.type}/${streamType}/${deltaType}`; + } + return `claude/${message.type}/${streamType}`; + } + } + + return `claude/${message.type}`; +} + +function sdkNativeItemId(message: SDKMessage): string | undefined { + if (message.type === "assistant") { + const maybeId = (message.message as { id?: unknown }).id; + if (typeof maybeId === "string") { + return maybeId; + } + return undefined; + } + + if (message.type === "stream_event") { + const event = message.event as { + type?: unknown; + content_block?: { id?: unknown }; + }; + if (event.type === "content_block_start" && typeof event.content_block?.id === "string") { + return event.content_block.id; + } + } + + return undefined; +} + +function makeClaudeCodeAdapter(options: ClaudeCodeAdapterLiveOptions) { + return Effect.gen(function* () { + const reviewCommentRepo = options.reviewCommentRepository; + + const nativeEventLogger = + options?.nativeEventLogger ?? + (options?.nativeEventLogPath !== undefined + ? yield* makeEventNdjsonLogger(options.nativeEventLogPath, { + stream: "native", + }) + : undefined); + + const createQuery = + options?.createQuery ?? + ((input: { + readonly prompt: AsyncIterable; + readonly options: ClaudeQueryOptions; + }) => query({ prompt: input.prompt, options: input.options }) as ClaudeQueryRuntime); + + const sessions = new Map(); + const runtimeEventQueue = yield* Queue.unbounded(); + + const nowIso = Effect.map(DateTime.now, DateTime.formatIso); + const nextEventId = Effect.map(Random.nextUUIDv4, (id) => EventId.makeUnsafe(id)); + const makeEventStamp = () => Effect.all({ eventId: nextEventId, createdAt: nowIso }); + + const offerRuntimeEvent = (event: ProviderRuntimeEvent): Effect.Effect => + Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid); + + const logNativeSdkMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (!nativeEventLogger) { + return; + } + + const observedAt = new Date().toISOString(); + const itemId = sdkNativeItemId(message); + + yield* nativeEventLogger.write( + { + observedAt, + event: { + id: + "uuid" in message && typeof message.uuid === "string" + ? message.uuid + : crypto.randomUUID(), + kind: "notification", + provider: PROVIDER, + createdAt: observedAt, + method: sdkNativeMethod(message), + ...(typeof message.session_id === "string" + ? { providerThreadId: message.session_id } + : {}), + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + ...(itemId ? { itemId: ProviderItemId.makeUnsafe(itemId) } : {}), + payload: message, + }, + }, + null, + ); + }); + + const snapshotThread = ( + context: ClaudeSessionContext, + ): Effect.Effect< + { + threadId: ThreadId; + turns: ReadonlyArray<{ + id: TurnId; + items: ReadonlyArray; + }>; + }, + ProviderAdapterValidationError + > => + Effect.gen(function* () { + const threadId = context.session.threadId; + if (!threadId) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "readThread", + issue: "Session thread id is not initialized yet.", + }); + } + return { + threadId, + turns: context.turns.map((turn) => ({ + id: turn.id, + items: [...turn.items], + })), + }; + }); + + const updateResumeCursor = (context: ClaudeSessionContext): Effect.Effect => + Effect.gen(function* () { + const threadId = context.session.threadId; + if (!threadId) return; + + const resumeCursor = { + threadId, + ...(context.resumeSessionId ? { resume: context.resumeSessionId } : {}), + ...(context.lastAssistantUuid ? { resumeSessionAt: context.lastAssistantUuid } : {}), + turnCount: context.turns.length, + }; + + context.session = { + ...context.session, + resumeCursor, + updatedAt: yield* nowIso, + }; + }); + + const ensureThreadId = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (typeof message.session_id !== "string" || message.session_id.length === 0) { + return; + } + const nextThreadId = message.session_id; + context.resumeSessionId = message.session_id; + yield* updateResumeCursor(context); + + if (context.lastThreadStartedId !== nextThreadId) { + context.lastThreadStartedId = nextThreadId; + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "thread.started", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + payload: { + providerThreadId: nextThreadId, + }, + providerRefs: {}, + raw: { + source: "claude.sdk.message", + method: "claude/thread/started", + payload: { + session_id: message.session_id, + }, + }, + }); + } + }); + + const emitRuntimeError = ( + context: ClaudeSessionContext, + message: string, + cause?: unknown, + ): Effect.Effect => + Effect.gen(function* () { + if (cause !== undefined) { + void cause; + } + const turnState = context.turnState; + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "runtime.error", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(turnState ? { turnId: asCanonicalTurnId(turnState.turnId) } : {}), + payload: { + message, + class: "provider_error", + ...(cause !== undefined ? { detail: cause } : {}), + }, + providerRefs: { + ...providerThreadRef(context), + ...(turnState ? { providerTurnId: String(turnState.turnId) } : {}), + }, + }); + }); + + const emitRuntimeWarning = ( + context: ClaudeSessionContext, + message: string, + detail?: unknown, + ): Effect.Effect => + Effect.gen(function* () { + const turnState = context.turnState; + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "runtime.warning", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(turnState ? { turnId: asCanonicalTurnId(turnState.turnId) } : {}), + payload: { + message, + ...(detail !== undefined ? { detail } : {}), + }, + providerRefs: { + ...providerThreadRef(context), + ...(turnState ? { providerTurnId: String(turnState.turnId) } : {}), + }, + }); + }); + + const completeTurn = ( + context: ClaudeSessionContext, + status: ProviderRuntimeTurnStatus, + errorMessage?: string, + result?: SDKResultMessage, + ): Effect.Effect => + Effect.gen(function* () { + const turnState = context.turnState; + if (!turnState) { + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "turn.completed", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + payload: { + state: status, + ...(result?.stop_reason !== undefined ? { stopReason: result.stop_reason } : {}), + ...(result?.usage ? { usage: result.usage } : {}), + ...(result?.modelUsage ? { modelUsage: result.modelUsage } : {}), + ...(typeof result?.total_cost_usd === "number" + ? { totalCostUsd: result.total_cost_usd } + : {}), + ...(errorMessage ? { errorMessage } : {}), + }, + providerRefs: {}, + }); + return; + } + + if (!turnState.messageCompleted) { + if (!turnState.emittedTextDelta && turnState.fallbackAssistantText.length > 0) { + const deltaStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "content.delta", + eventId: deltaStamp.eventId, + provider: PROVIDER, + createdAt: deltaStamp.createdAt, + threadId: context.session.threadId, + turnId: turnState.turnId, + itemId: asRuntimeItemId(turnState.assistantItemId), + payload: { + streamKind: "assistant_text", + delta: turnState.fallbackAssistantText, + }, + providerRefs: { + ...providerThreadRef(context), + providerTurnId: String(turnState.turnId), + providerItemId: ProviderItemId.makeUnsafe(turnState.assistantItemId), + }, + }); + } + + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "item.completed", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + itemId: asRuntimeItemId(turnState.assistantItemId), + threadId: context.session.threadId, + turnId: turnState.turnId, + payload: { + itemType: "assistant_message", + status: "completed", + title: "Assistant message", + }, + providerRefs: { + ...providerThreadRef(context), + providerTurnId: turnState.turnId, + providerItemId: ProviderItemId.makeUnsafe(turnState.assistantItemId), + }, + }); + } + + context.turns.push({ + id: turnState.turnId, + items: [...turnState.items], + }); + + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "turn.completed", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + turnId: turnState.turnId, + payload: { + state: status, + ...(result?.stop_reason !== undefined ? { stopReason: result.stop_reason } : {}), + ...(result?.usage ? { usage: result.usage } : {}), + ...(result?.modelUsage ? { modelUsage: result.modelUsage } : {}), + ...(typeof result?.total_cost_usd === "number" + ? { totalCostUsd: result.total_cost_usd } + : {}), + ...(errorMessage ? { errorMessage } : {}), + }, + providerRefs: { + ...providerThreadRef(context), + providerTurnId: turnState.turnId, + }, + }); + + const updatedAt = yield* nowIso; + context.turnState = undefined; + context.session = { + ...context.session, + status: "ready", + activeTurnId: undefined, + updatedAt, + ...(status === "failed" && errorMessage ? { lastError: errorMessage } : {}), + }; + yield* updateResumeCursor(context); + }); + + const handleStreamEvent = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (message.type !== "stream_event") { + return; + } + + const parentToolUseId = + (message as { parent_tool_use_id?: string | null }).parent_tool_use_id ?? null; + + const { event } = message; + + if (event.type === "content_block_delta") { + // Extract text and stream kind from both text_delta and thinking_delta events + const deltaText = + event.delta.type === "text_delta" + ? event.delta.text + : event.delta.type === "thinking_delta" + ? ((event.delta as { thinking?: string }).thinking ?? "") + : undefined; + + if (deltaText !== undefined && deltaText.length > 0 && context.turnState) { + if (!context.turnState.emittedTextDelta) { + context.turnState = { + ...context.turnState, + emittedTextDelta: true, + }; + } + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "content.delta", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + turnId: context.turnState.turnId, + itemId: asRuntimeItemId(context.turnState.assistantItemId), + parentToolUseId, + payload: { + streamKind: streamKindFromDeltaType(event.delta.type), + delta: deltaText, + }, + providerRefs: { + ...providerThreadRef(context), + providerTurnId: context.turnState.turnId, + providerItemId: ProviderItemId.makeUnsafe(context.turnState.assistantItemId), + }, + raw: { + source: "claude.sdk.message", + method: "claude/stream_event/content_block_delta", + payload: message, + }, + }); + } + + // Accumulate tool input JSON from streaming deltas + if (event.delta.type === "input_json_delta") { + const tool = context.inFlightTools.get(event.index); + if (tool) { + tool.inputJsonChunks.push( + (event.delta as { partial_json?: string }).partial_json ?? "", + ); + } + } + return; + } + + if (event.type === "content_block_start") { + const { index, content_block: block } = event; + if ( + block.type !== "tool_use" && + block.type !== "server_tool_use" && + block.type !== "mcp_tool_use" + ) { + return; + } + + const toolName = block.name; + const itemType = classifyToolItemType(toolName); + const toolInput = + typeof block.input === "object" && block.input !== null + ? (block.input as Record) + : {}; + const itemId = block.id; + const detail = summarizeToolRequest(toolName, toolInput); + + const tool: ToolInFlight = { + itemId, + itemType, + toolName, + title: titleForTool(itemType, toolName), + detail, + inputJsonChunks: [], + }; + context.inFlightTools.set(index, tool); + + // Queue Agent tool_use ID so we can link the next task_started event (FIFO for parallel agents) + if (toolName === "Agent") { + context.pendingAgentToolUseIds.push(itemId); + } + + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "item.started", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + itemId: asRuntimeItemId(tool.itemId), + parentToolUseId, + payload: { + itemType: tool.itemType, + status: "inProgress", + title: tool.title, + ...(tool.detail ? { detail: tool.detail } : {}), + data: { + toolName: tool.toolName, + input: toolInput, + }, + }, + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: String(context.turnState.turnId) } : {}), + providerItemId: ProviderItemId.makeUnsafe(tool.itemId), + }, + raw: { + source: "claude.sdk.message", + method: "claude/stream_event/content_block_start", + payload: message, + }, + }); + return; + } + + if (event.type === "content_block_stop") { + const { index } = event; + const tool = context.inFlightTools.get(index); + if (!tool) { + return; + } + context.inFlightTools.delete(index); + + // Rebuild detail and structured input from accumulated input JSON if available + let parsedToolInput: Record | undefined; + if (tool.inputJsonChunks.length > 0) { + const rawJson = tool.inputJsonChunks.join(""); + // eslint-disable-next-line -- Effect.sync callback, not in generator scope + parsedToolInput = yield* Effect.sync((): Record | undefined => { + try { + return JSON.parse(rawJson) as Record; + } catch { + return undefined; + } + }); + if (parsedToolInput) { + tool.detail = summarizeToolRequest(tool.toolName, parsedToolInput); + } + } + + // Track when the agent changes plan mode via tools + if (tool.toolName === "ExitPlanMode") { + context.interactionMode = "default"; + } else if (tool.toolName === "EnterPlanMode") { + context.interactionMode = "plan"; + } + + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "item.completed", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + itemId: asRuntimeItemId(tool.itemId), + parentToolUseId, + payload: { + itemType: tool.itemType, + status: "completed", + title: tool.title, + ...(tool.detail ? { detail: tool.detail } : {}), + data: { + toolName: tool.toolName, + ...(parsedToolInput ? { item: { input: parsedToolInput } } : {}), + }, + }, + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: String(context.turnState.turnId) } : {}), + providerItemId: ProviderItemId.makeUnsafe(tool.itemId), + }, + raw: { + source: "claude.sdk.message", + method: "claude/stream_event/content_block_stop", + payload: message, + }, + }); + } + }); + + const handleAssistantMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (message.type !== "assistant") { + return; + } + + const parentToolUseId = + (message as { parent_tool_use_id?: string | null }).parent_tool_use_id ?? null; + + if (context.turnState) { + context.turnState.items.push(message.message); + const fallbackAssistantText = extractAssistantText(message); + if ( + fallbackAssistantText.length > 0 && + fallbackAssistantText !== context.turnState.fallbackAssistantText + ) { + context.turnState = { + ...context.turnState, + fallbackAssistantText, + }; + } + + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "item.updated", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + turnId: context.turnState.turnId, + itemId: asRuntimeItemId(context.turnState.assistantItemId), + parentToolUseId, + payload: { + itemType: "assistant_message", + status: "inProgress", + title: "Assistant message", + data: message.message, + }, + providerRefs: { + ...providerThreadRef(context), + providerTurnId: context.turnState.turnId, + providerItemId: ProviderItemId.makeUnsafe(context.turnState.assistantItemId), + }, + raw: { + source: "claude.sdk.message", + method: "claude/assistant", + payload: message, + }, + }); + } + + context.lastAssistantUuid = message.uuid; + yield* updateResumeCursor(context); + }); + + const handleResultMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (message.type !== "result") { + return; + } + + const status = turnStatusFromResult(message); + const errorMessage = message.subtype === "success" ? undefined : message.errors[0]; + + if (status === "failed") { + yield* emitRuntimeError(context, errorMessage ?? "Claude turn failed."); + } + + yield* completeTurn(context, status, errorMessage, message); + }); + + const handleSystemMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + if (message.type !== "system") { + return; + } + + const stamp = yield* makeEventStamp(); + const base = { + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: context.turnState.turnId } : {}), + }, + raw: { + source: "claude.sdk.message" as const, + method: sdkNativeMethod(message), + messageType: `${message.type}:${message.subtype}`, + payload: message, + }, + }; + + switch (message.subtype) { + case "init": + yield* offerRuntimeEvent({ + ...base, + type: "session.configured", + payload: { + config: message as Record, + }, + }); + return; + case "status": + yield* offerRuntimeEvent({ + ...base, + type: "session.state.changed", + payload: { + state: message.status === "compacting" ? "waiting" : "running", + reason: `status:${message.status ?? "active"}`, + detail: message, + }, + }); + return; + case "compact_boundary": + yield* offerRuntimeEvent({ + ...base, + type: "thread.state.changed", + payload: { + state: "compacted", + detail: message, + }, + }); + return; + case "hook_started": + yield* offerRuntimeEvent({ + ...base, + type: "hook.started", + payload: { + hookId: message.hook_id, + hookName: message.hook_name, + hookEvent: message.hook_event, + }, + }); + return; + case "hook_progress": + yield* offerRuntimeEvent({ + ...base, + type: "hook.progress", + payload: { + hookId: message.hook_id, + output: message.output, + stdout: message.stdout, + stderr: message.stderr, + }, + }); + return; + case "hook_response": + yield* offerRuntimeEvent({ + ...base, + type: "hook.completed", + payload: { + hookId: message.hook_id, + outcome: message.outcome, + output: message.output, + stdout: message.stdout, + stderr: message.stderr, + ...(typeof message.exit_code === "number" ? { exitCode: message.exit_code } : {}), + }, + }); + return; + case "task_started": { + // Pop the next pending Agent tool_use ID from the FIFO queue + const nextAgentToolUseId = context.pendingAgentToolUseIds.shift(); + if (nextAgentToolUseId) { + context.taskIdToParentToolUseId.set(message.task_id, nextAgentToolUseId); + } + const taskStartedParent = context.taskIdToParentToolUseId.get(message.task_id) ?? null; + yield* offerRuntimeEvent({ + ...base, + type: "task.started", + parentToolUseId: taskStartedParent, + payload: { + taskId: RuntimeTaskId.makeUnsafe(message.task_id), + description: message.description, + ...(message.task_type ? { taskType: message.task_type } : {}), + }, + }); + return; + } + case "task_progress": { + const taskProgressParent = context.taskIdToParentToolUseId.get(message.task_id) ?? null; + yield* offerRuntimeEvent({ + ...base, + type: "task.progress", + parentToolUseId: taskProgressParent, + payload: { + taskId: RuntimeTaskId.makeUnsafe(message.task_id), + description: message.description, + ...(message.usage ? { usage: message.usage } : {}), + ...(message.last_tool_name ? { lastToolName: message.last_tool_name } : {}), + }, + }); + return; + } + case "task_notification": { + const taskNotificationParent = + context.taskIdToParentToolUseId.get(message.task_id) ?? null; + yield* offerRuntimeEvent({ + ...base, + type: "task.completed", + parentToolUseId: taskNotificationParent, + payload: { + taskId: RuntimeTaskId.makeUnsafe(message.task_id), + status: message.status, + ...(message.summary ? { summary: message.summary } : {}), + ...(message.usage ? { usage: message.usage } : {}), + }, + }); + // Clean up the mapping for this completed task + context.taskIdToParentToolUseId.delete(message.task_id); + return; + } + case "files_persisted": + yield* offerRuntimeEvent({ + ...base, + type: "files.persisted", + payload: { + files: Array.isArray(message.files) + ? message.files.map((file: { filename: string; file_id: string }) => ({ + filename: file.filename, + fileId: file.file_id, + })) + : [], + ...(Array.isArray(message.failed) + ? { + failed: message.failed.map((entry: { filename: string; error: string }) => ({ + filename: entry.filename, + error: entry.error, + })), + } + : {}), + }, + }); + return; + default: + yield* emitRuntimeWarning( + context, + `Unhandled Claude system message subtype '${message.subtype}'.`, + message, + ); + return; + } + }); + + const handleSdkTelemetryMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + const stamp = yield* makeEventStamp(); + const base = { + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: context.turnState.turnId } : {}), + }, + raw: { + source: "claude.sdk.message" as const, + method: sdkNativeMethod(message), + messageType: message.type, + payload: message, + }, + }; + + if (message.type === "tool_progress") { + yield* offerRuntimeEvent({ + ...base, + type: "tool.progress", + payload: { + toolUseId: message.tool_use_id, + toolName: message.tool_name, + elapsedSeconds: message.elapsed_time_seconds, + ...(message.task_id ? { summary: `task:${message.task_id}` } : {}), + }, + }); + return; + } + + if (message.type === "tool_use_summary") { + yield* offerRuntimeEvent({ + ...base, + type: "tool.summary", + payload: { + summary: message.summary, + ...(message.preceding_tool_use_ids.length > 0 + ? { precedingToolUseIds: message.preceding_tool_use_ids } + : {}), + }, + }); + return; + } + + if (message.type === "auth_status") { + yield* offerRuntimeEvent({ + ...base, + type: "auth.status", + payload: { + isAuthenticating: message.isAuthenticating, + output: message.output, + ...(message.error ? { error: message.error } : {}), + }, + }); + return; + } + + if (message.type === "rate_limit_event") { + yield* offerRuntimeEvent({ + ...base, + type: "account.rate-limits.updated", + payload: { + rateLimits: message, + }, + }); + return; + } + }); + + const handleSdkMessage = ( + context: ClaudeSessionContext, + message: SDKMessage, + ): Effect.Effect => + Effect.gen(function* () { + yield* logNativeSdkMessage(context, message); + yield* ensureThreadId(context, message); + + switch (message.type) { + case "stream_event": + yield* handleStreamEvent(context, message); + return; + case "user": + return; + case "assistant": + yield* handleAssistantMessage(context, message); + return; + case "result": + yield* handleResultMessage(context, message); + return; + case "system": + yield* handleSystemMessage(context, message); + return; + case "tool_progress": + case "tool_use_summary": + case "auth_status": + case "rate_limit_event": + yield* handleSdkTelemetryMessage(context, message); + return; + default: + yield* emitRuntimeWarning( + context, + `Unhandled Claude SDK message type '${message.type}'.`, + message, + ); + return; + } + }); + + const runSdkStream = (context: ClaudeSessionContext): Effect.Effect => + Stream.fromAsyncIterable(context.query, (cause) => cause).pipe( + Stream.takeWhile(() => !context.stopped), + Stream.runForEach((message) => handleSdkMessage(context, message)), + Effect.catchCause((cause) => + Effect.gen(function* () { + if (Cause.hasInterruptsOnly(cause) || context.stopped) { + return; + } + const rawMessage = toMessage(Cause.squash(cause), "Claude runtime stream failed."); + // Provide a human-readable message when a resumed session fails + const message = + context.resumeSessionId && /exit(ed)?\s*(with\s*)?(code\s*)?1/i.test(rawMessage) + ? "Session expired — the previous Claude Code process is no longer available. Send a new message to start a fresh session." + : rawMessage; + yield* emitRuntimeError(context, message, cause); + yield* completeTurn(context, "failed", message); + }), + ), + Effect.ensuring( + Effect.gen(function* () { + if (!context.stopped) { + yield* stopSessionInternal(context, { emitExitEvent: true }); + } + }), + ), + ); + + const stopSessionInternal = ( + context: ClaudeSessionContext, + options?: { readonly emitExitEvent?: boolean }, + ): Effect.Effect => + Effect.gen(function* () { + if (context.stopped) return; + + context.stopped = true; + + for (const [requestId, pending] of context.pendingApprovals) { + yield* Deferred.succeed(pending.decision, { decision: "cancel" }); + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "request.resolved", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + requestId: asRuntimeRequestId(requestId), + payload: { + requestType: pending.requestType, + decision: "cancel", + }, + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: String(context.turnState.turnId) } : {}), + providerRequestId: requestId, + }, + }); + } + context.pendingApprovals.clear(); + + // Cancel any pending user-input requests (AskUserQuestion) + for (const [requestId, pending] of context.pendingUserInputs) { + yield* Deferred.succeed(pending.answers, {}); + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "user-input.resolved", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState ? { turnId: asCanonicalTurnId(context.turnState.turnId) } : {}), + requestId: asRuntimeRequestId(requestId), + payload: { answers: {} }, + providerRefs: { + ...providerThreadRef(context), + ...(context.turnState ? { providerTurnId: String(context.turnState.turnId) } : {}), + providerRequestId: requestId, + }, + }); + } + context.pendingUserInputs.clear(); + + if (context.turnState) { + yield* completeTurn(context, "interrupted", "Session stopped."); + } + + yield* Queue.offer(context.promptQueue, { + type: "terminate", + }); + + yield* Queue.shutdown(context.promptQueue); + + context.query.close(); + + const updatedAt = yield* nowIso; + context.session = { + ...context.session, + status: "closed", + activeTurnId: undefined, + updatedAt, + }; + + if (options?.emitExitEvent !== false) { + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "session.exited", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + payload: { + reason: "Session stopped", + exitKind: "graceful", + }, + providerRefs: {}, + }); + } + + sessions.delete(context.session.threadId); + }); + + const requireSession = ( + threadId: ThreadId, + ): Effect.Effect => { + const context = sessions.get(threadId); + if (!context) { + return Effect.fail( + new ProviderAdapterSessionNotFoundError({ + provider: PROVIDER, + threadId, + }), + ); + } + if (context.stopped || context.session.status === "closed") { + return Effect.fail( + new ProviderAdapterSessionClosedError({ + provider: PROVIDER, + threadId, + }), + ); + } + return Effect.succeed(context); + }; + + const startSession: ClaudeCodeAdapterShape["startSession"] = (input) => + Effect.gen(function* () { + if (input.provider !== undefined && input.provider !== PROVIDER) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "startSession", + issue: `Expected provider '${PROVIDER}' but received '${input.provider}'.`, + }); + } + + const startedAt = yield* nowIso; + const resumeState = readClaudeResumeState(input.resumeCursor); + const threadId = input.threadId; + + const promptQueue = yield* Queue.unbounded(); + const prompt = Stream.fromQueue(promptQueue).pipe( + Stream.takeWhile((item) => item.type !== "terminate"), + Stream.catchCause((cause) => + Cause.hasInterruptsOnly(cause) ? Stream.empty : Stream.failCause(cause), + ), + Stream.filter((item) => item.type === "message"), + Stream.map((item) => item.message), + Stream.toAsyncIterable, + ); + + const pendingApprovals = new Map(); + const pendingUserInputs = new Map(); + const inFlightTools = new Map(); + + const contextRef = yield* Ref.make(undefined); + + const canUseTool: CanUseTool = (toolName, toolInput, callbackOptions) => + Effect.runPromise( + Effect.gen(function* () { + const context = yield* Ref.get(contextRef); + if (!context) { + return { + behavior: "deny", + message: "Claude session context is unavailable.", + } satisfies PermissionResult; + } + + // Special-case AskUserQuestion: show a rich user-input dialog + // instead of the generic file-change approval panel. + if (toolName === "AskUserQuestion") { + const questions = (toolInput as Record).questions as + | Array<{ + question: string; + header: string; + options: Array<{ label: string; description: string }>; + multiSelect?: boolean; + }> + | undefined; + + if (!Array.isArray(questions) || questions.length === 0) { + return { + behavior: "allow", + updatedInput: toolInput, + } satisfies PermissionResult; + } + + const requestId = ApprovalRequestId.makeUnsafe(yield* Random.nextUUIDv4); + const answersDeferred = yield* Deferred.make(); + + const pendingInput: PendingUserInput = { + questions, + answers: answersDeferred, + }; + context.pendingUserInputs.set(requestId, pendingInput); + + // Emit user-input.requested event so the web UI shows the rich dialog + const stamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "user-input.requested", + eventId: stamp.eventId, + provider: PROVIDER, + createdAt: stamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState + ? { turnId: asCanonicalTurnId(context.turnState.turnId) } + : {}), + requestId: asRuntimeRequestId(requestId), + payload: { + questions: questions.map((q) => ({ + id: q.question, + header: q.header, + question: q.question, + options: q.options.map((o) => ({ + label: o.label, + description: o.description, + })), + })), + }, + providerRefs: { + ...(context.session.threadId + ? { providerThreadId: context.session.threadId } + : {}), + ...(context.turnState + ? { providerTurnId: String(context.turnState.turnId) } + : {}), + providerRequestId: requestId, + }, + raw: { + source: "claude.sdk.permission", + method: "canUseTool/askUserQuestion", + payload: { toolName, input: toolInput }, + }, + }); + + // Handle abort (e.g. user cancels the turn) + const onAbort = () => { + if (!context.pendingUserInputs.has(requestId)) return; + context.pendingUserInputs.delete(requestId); + Effect.runFork(Deferred.succeed(answersDeferred, {})); + }; + callbackOptions.signal.addEventListener("abort", onAbort, { once: true }); + + // Wait for user answers + const answers = yield* Deferred.await(answersDeferred); + context.pendingUserInputs.delete(requestId); + + // Emit user-input.resolved event + const resolvedStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "user-input.resolved", + eventId: resolvedStamp.eventId, + provider: PROVIDER, + createdAt: resolvedStamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState + ? { turnId: asCanonicalTurnId(context.turnState.turnId) } + : {}), + requestId: asRuntimeRequestId(requestId), + payload: { answers }, + providerRefs: { + ...(context.session.threadId + ? { providerThreadId: context.session.threadId } + : {}), + ...(context.turnState + ? { providerTurnId: String(context.turnState.turnId) } + : {}), + providerRequestId: requestId, + }, + raw: { + source: "claude.sdk.permission", + method: "canUseTool/askUserQuestion/resolved", + payload: { answers }, + }, + }); + + return { + behavior: "allow", + updatedInput: { ...toolInput, answers }, + } satisfies PermissionResult; + } + + const runtimeMode = input.runtimeMode ?? "full-access"; + if (runtimeMode === "full-access") { + if (context.interactionMode === "plan") { + // In plan mode: allow read-only tools, deny write/execute tools. + // The model gets plan mode system reminders from the CLI, but we + // enforce tool restrictions here as a hard guardrail. + if (isPlanModeAllowedTool(toolName)) { + return { + behavior: "allow", + updatedInput: toolInput, + } satisfies PermissionResult; + } + return { + behavior: "deny", + message: + "Tool execution is not allowed in plan mode. Use ExitPlanMode to leave plan mode first.", + } satisfies PermissionResult; + } + return { + behavior: "allow", + updatedInput: toolInput, + } satisfies PermissionResult; + } + + const requestId = ApprovalRequestId.makeUnsafe(yield* Random.nextUUIDv4); + const requestType = classifyRequestType(toolName); + const detail = summarizeToolRequest(toolName, toolInput); + const decisionDeferred = yield* Deferred.make(); + const pendingApproval: PendingApproval = { + requestType, + detail, + decision: decisionDeferred, + ...(callbackOptions.suggestions + ? { suggestions: callbackOptions.suggestions } + : {}), + }; + + const requestedStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "request.opened", + eventId: requestedStamp.eventId, + provider: PROVIDER, + createdAt: requestedStamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState + ? { turnId: asCanonicalTurnId(context.turnState.turnId) } + : {}), + requestId: asRuntimeRequestId(requestId), + payload: { + requestType, + detail, + args: { + toolName, + input: toolInput, + ...(callbackOptions.toolUseID ? { toolUseId: callbackOptions.toolUseID } : {}), + }, + }, + providerRefs: { + ...(context.session.threadId + ? { providerThreadId: context.session.threadId } + : {}), + ...(context.turnState + ? { providerTurnId: String(context.turnState.turnId) } + : {}), + providerRequestId: requestId, + }, + raw: { + source: "claude.sdk.permission", + method: "canUseTool/request", + payload: { + toolName, + input: toolInput, + }, + }, + }); + + pendingApprovals.set(requestId, pendingApproval); + + const onAbort = () => { + if (!pendingApprovals.has(requestId)) { + return; + } + pendingApprovals.delete(requestId); + Effect.runFork(Deferred.succeed(decisionDeferred, { decision: "cancel" })); + }; + + callbackOptions.signal.addEventListener("abort", onAbort, { + once: true, + }); + + const approvalResponse = yield* Deferred.await(decisionDeferred); + pendingApprovals.delete(requestId); + + const resolvedStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "request.resolved", + eventId: resolvedStamp.eventId, + provider: PROVIDER, + createdAt: resolvedStamp.createdAt, + threadId: context.session.threadId, + ...(context.turnState + ? { turnId: asCanonicalTurnId(context.turnState.turnId) } + : {}), + requestId: asRuntimeRequestId(requestId), + payload: { + requestType, + decision: approvalResponse.decision, + }, + providerRefs: { + ...(context.session.threadId + ? { providerThreadId: context.session.threadId } + : {}), + ...(context.turnState + ? { providerTurnId: String(context.turnState.turnId) } + : {}), + providerRequestId: requestId, + }, + raw: { + source: "claude.sdk.permission", + method: "canUseTool/decision", + payload: { + decision: approvalResponse.decision, + }, + }, + }); + + if ( + approvalResponse.decision === "accept" || + approvalResponse.decision === "acceptForSession" + ) { + return { + behavior: "allow", + updatedInput: toolInput, + ...(approvalResponse.decision === "acceptForSession" && + pendingApproval.suggestions + ? { updatedPermissions: [...pendingApproval.suggestions] } + : {}), + } satisfies PermissionResult; + } + + return { + behavior: "deny", + message: + approvalResponse.decision === "cancel" + ? "User cancelled tool execution." + : (approvalResponse.feedback ?? "User declined tool execution."), + } satisfies PermissionResult; + }), + ); + + const providerOptions = input.providerOptions?.claudeCode; + const modelOptions = input.modelOptions?.claudeCode; + // We intentionally do NOT set permissionMode to "bypassPermissions" here even for + // full-access mode. Instead, tool permissions are handled by the canUseTool callback + // which auto-allows in full-access mode. This ensures that setPermissionMode("plan") + // actually restricts tool execution (the CLI's isBypassPermissionsModeAvailable flag + // would otherwise override plan mode restrictions). + const permissionMode = toPermissionMode(providerOptions?.permissionMode); + + const resolvedPlugins = resolveEnabledPlugins(input.cwd ? { cwd: input.cwd } : undefined); + const sdkPlugins = resolvedPlugins.map((p) => ({ type: "local" as const, path: p.path })); + + // Enable the 1M-token context beta for models that require it (e.g. Sonnet 4/4.5). + // Native 1M models (Opus 4.6, Sonnet 4.6) don't need this header — they have 1M by default. + const contextWindowMode = getClaudeContextWindowMode(input.model); + const use1MBeta = modelOptions?.largeContext === true && contextWindowMode === "1m-beta"; + + // Register review comment MCP tools for this session + const reviewCommentMcpServer = createReviewCommentMcpServer(threadId, reviewCommentRepo); + + const queryOptions: ClaudeQueryOptions = { + ...(input.cwd ? { cwd: input.cwd } : {}), + ...(input.model ? { model: input.model } : {}), + ...(providerOptions?.binaryPath + ? { pathToClaudeCodeExecutable: providerOptions.binaryPath } + : {}), + ...(permissionMode ? { permissionMode } : {}), + ...(permissionMode === "bypassPermissions" + ? { allowDangerouslySkipPermissions: true } + : {}), + ...(providerOptions?.maxThinkingTokens !== undefined + ? { maxThinkingTokens: providerOptions.maxThinkingTokens } + : {}), + ...(use1MBeta ? { betas: ["context-1m-2025-08-07" as const] } : {}), + ...(resumeState?.resume ? { resume: resumeState.resume } : {}), + ...(resumeState?.resumeSessionAt ? { resumeSessionAt: resumeState.resumeSessionAt } : {}), + settingSources: ["user", "project", "local"], + includePartialMessages: true, + canUseTool, + env: process.env, + ...(input.cwd ? { additionalDirectories: [input.cwd] } : {}), + ...(sdkPlugins.length > 0 ? { plugins: sdkPlugins } : {}), + mcpServers: { "review-comments": reviewCommentMcpServer }, + }; + + const queryRuntime = yield* Effect.try({ + try: () => + createQuery({ + prompt, + options: queryOptions, + }), + catch: (cause) => + new ProviderAdapterProcessError({ + provider: PROVIDER, + threadId, + detail: toMessage(cause, "Failed to start Claude runtime session."), + cause, + }), + }); + + const session: ProviderSession = { + threadId, + provider: PROVIDER, + status: "ready", + runtimeMode: input.runtimeMode, + ...(input.cwd ? { cwd: input.cwd } : {}), + ...(input.model ? { model: input.model } : {}), + ...(threadId ? { threadId } : {}), + resumeCursor: { + ...(threadId ? { threadId } : {}), + ...(resumeState?.resume ? { resume: resumeState.resume } : {}), + ...(resumeState?.resumeSessionAt + ? { resumeSessionAt: resumeState.resumeSessionAt } + : {}), + turnCount: resumeState?.turnCount ?? 0, + }, + createdAt: startedAt, + updatedAt: startedAt, + }; + + const context: ClaudeSessionContext = { + session, + promptQueue, + query: queryRuntime, + startedAt, + resumeSessionId: resumeState?.resume, + pendingApprovals, + pendingUserInputs, + turns: [], + inFlightTools, + turnState: undefined, + lastAssistantUuid: resumeState?.resumeSessionAt, + lastThreadStartedId: undefined, + stopped: false, + interactionMode: undefined, + taskIdToParentToolUseId: new Map(), + pendingAgentToolUseIds: [], + }; + yield* Ref.set(contextRef, context); + sessions.set(threadId, context); + + const sessionStartedStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "session.started", + eventId: sessionStartedStamp.eventId, + provider: PROVIDER, + createdAt: sessionStartedStamp.createdAt, + threadId, + payload: input.resumeCursor !== undefined ? { resume: input.resumeCursor } : {}, + providerRefs: {}, + }); + + const configuredStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "session.configured", + eventId: configuredStamp.eventId, + provider: PROVIDER, + createdAt: configuredStamp.createdAt, + threadId, + payload: { + config: { + ...(input.model ? { model: input.model } : {}), + ...(input.cwd ? { cwd: input.cwd } : {}), + ...(permissionMode ? { permissionMode } : {}), + ...(providerOptions?.maxThinkingTokens !== undefined + ? { maxThinkingTokens: providerOptions.maxThinkingTokens } + : {}), + }, + }, + providerRefs: {}, + }); + + const readyStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "session.state.changed", + eventId: readyStamp.eventId, + provider: PROVIDER, + createdAt: readyStamp.createdAt, + threadId, + payload: { + state: "ready", + }, + providerRefs: {}, + }); + + Effect.runFork(runSdkStream(context)); + + return { + ...session, + }; + }); + + const sendTurn: ClaudeCodeAdapterShape["sendTurn"] = (input) => + Effect.gen(function* () { + const context = yield* requireSession(input.threadId); + + if (context.turnState) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "sendTurn", + issue: `Thread '${input.threadId}' already has an active turn '${context.turnState.turnId}'.`, + }); + } + + if (input.model) { + yield* Effect.tryPromise({ + try: () => context.query.setModel(input.model), + catch: (cause) => toRequestError(input.threadId, "turn/setModel", cause), + }); + } + + if (input.interactionMode) { + context.interactionMode = input.interactionMode === "plan" ? "plan" : "default"; + const permissionMode = toPermissionMode(input.interactionMode); + if (permissionMode) { + yield* Effect.tryPromise({ + try: () => context.query.setPermissionMode(permissionMode), + catch: (cause) => toRequestError(input.threadId, "turn/setPermissionMode", cause), + }); + } + } + + const turnId = TurnId.makeUnsafe(yield* Random.nextUUIDv4); + const turnState: ClaudeTurnState = { + turnId, + assistantItemId: yield* Random.nextUUIDv4, + startedAt: yield* nowIso, + items: [], + messageCompleted: false, + emittedTextDelta: false, + fallbackAssistantText: "", + }; + + const updatedAt = yield* nowIso; + context.turnState = turnState; + context.session = { + ...context.session, + status: "running", + activeTurnId: turnId, + updatedAt, + }; + + const turnStartedStamp = yield* makeEventStamp(); + yield* offerRuntimeEvent({ + type: "turn.started", + eventId: turnStartedStamp.eventId, + provider: PROVIDER, + createdAt: turnStartedStamp.createdAt, + threadId: context.session.threadId, + turnId, + payload: input.model ? { model: input.model } : {}, + providerRefs: { + providerTurnId: String(turnId), + }, + }); + + const message = buildUserMessage(input); + + yield* Queue.offer(context.promptQueue, { + type: "message", + message, + }).pipe(Effect.mapError((cause) => toRequestError(input.threadId, "turn/start", cause))); + + return { + threadId: context.session.threadId, + turnId, + ...(context.session.resumeCursor !== undefined + ? { resumeCursor: context.session.resumeCursor } + : {}), + }; + }); + + const interruptTurn: ClaudeCodeAdapterShape["interruptTurn"] = (threadId, _turnId) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + yield* Effect.tryPromise({ + try: () => context.query.interrupt(), + catch: (cause) => toRequestError(threadId, "turn/interrupt", cause), + }); + }); + + const readThread: ClaudeCodeAdapterShape["readThread"] = (threadId) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + return yield* snapshotThread(context); + }); + + const rollbackThread: ClaudeCodeAdapterShape["rollbackThread"] = (threadId, numTurns) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + const nextLength = Math.max(0, context.turns.length - numTurns); + context.turns.splice(nextLength); + yield* updateResumeCursor(context); + return yield* snapshotThread(context); + }); + + const respondToRequest: ClaudeCodeAdapterShape["respondToRequest"] = ( + threadId, + requestId, + decision, + feedback, + ) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + const pending = context.pendingApprovals.get(requestId); + if (!pending) { + return yield* new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "item/requestApproval/decision", + detail: `Unknown pending approval request: ${requestId}`, + }); + } + + context.pendingApprovals.delete(requestId); + yield* Deferred.succeed(pending.decision, feedback ? { decision, feedback } : { decision }); + }); + + const respondToUserInput: ClaudeCodeAdapterShape["respondToUserInput"] = ( + threadId, + requestId, + answers, + ) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + const pending = context.pendingUserInputs.get(requestId); + if (!pending) { + return yield* new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "item/tool/requestUserInput", + detail: `Unknown pending user-input request: ${requestId}`, + }); + } + context.pendingUserInputs.delete(requestId); + yield* Deferred.succeed(pending.answers, answers); + }); + + const stopSession: ClaudeCodeAdapterShape["stopSession"] = (threadId) => + Effect.gen(function* () { + const context = yield* requireSession(threadId); + yield* stopSessionInternal(context, { + emitExitEvent: true, + }); + }); + + const listSessions: ClaudeCodeAdapterShape["listSessions"] = () => + Effect.sync(() => Array.from(sessions.values(), ({ session }) => ({ ...session }))); + + const hasSession: ClaudeCodeAdapterShape["hasSession"] = (threadId) => + Effect.sync(() => { + const context = sessions.get(threadId); + return context !== undefined && !context.stopped; + }); + + const stopAll: ClaudeCodeAdapterShape["stopAll"] = () => + Effect.forEach( + sessions, + ([, context]) => + stopSessionInternal(context, { + emitExitEvent: true, + }), + { discard: true }, + ); + + yield* Effect.addFinalizer(() => + Effect.forEach( + sessions, + ([, context]) => + stopSessionInternal(context, { + emitExitEvent: false, + }), + { discard: true }, + ).pipe(Effect.tap(() => Queue.shutdown(runtimeEventQueue))), + ); + + return { + provider: PROVIDER, + capabilities: { + sessionModelSwitch: "in-session", + }, + startSession, + sendTurn, + interruptTurn, + readThread, + rollbackThread, + respondToRequest, + respondToUserInput, + stopSession, + listSessions, + hasSession, + stopAll, + streamEvents: Stream.fromQueue(runtimeEventQueue), + } satisfies ClaudeCodeAdapterShape; + }); +} + +export function makeClaudeCodeAdapterLive(options: ClaudeCodeAdapterLiveOptions) { + return Layer.effect(ClaudeCodeAdapter, makeClaudeCodeAdapter(options)); +} diff --git a/apps/server/src/provider/Layers/CodexAdapter.test.ts b/apps/server/src/provider/Layers/CodexAdapter.test.ts index 3ad206d0be..b087fc8288 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.test.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.test.ts @@ -24,6 +24,7 @@ import { import { ServerConfig } from "../../config.ts"; import { CodexAdapter } from "../Services/CodexAdapter.ts"; import { ProviderSessionDirectory } from "../Services/ProviderSessionDirectory.ts"; +import { ProviderAdapterValidationError } from "../Errors.ts"; import { makeCodexAdapterLive } from "./CodexAdapter.ts"; const asThreadId = (value: string): ThreadId => ThreadId.makeUnsafe(value); @@ -156,6 +157,31 @@ const validationLayer = it.layer( ); validationLayer("CodexAdapterLive validation", (it) => { + it.effect("maps codex model options before starting a session", () => + Effect.gen(function* () { + validationManager.startSessionImpl.mockClear(); + const adapter = yield* CodexAdapter; + const result = yield* adapter + .startSession({ + provider: "claudeCode", + threadId: asThreadId("thread-1"), + runtimeMode: "full-access", + }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + assert.deepStrictEqual( + result.failure, + new ProviderAdapterValidationError({ + provider: "codex", + operation: "startSession", + issue: "Expected provider 'codex' but received 'claudeCode'.", + }), + ); + assert.equal(validationManager.startSessionImpl.mock.calls.length, 0); + }), + ); + it.effect("maps codex model options before starting a session", () => Effect.gen(function* () { validationManager.startSessionImpl.mockClear(); @@ -479,78 +505,6 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { }), ); - it.effect("preserves file-read request type when mapping serverRequest/resolved", () => - Effect.gen(function* () { - const adapter = yield* CodexAdapter; - const firstEventFiber = yield* Stream.runHead(adapter.streamEvents).pipe(Effect.forkChild); - - const event: ProviderEvent = { - id: asEventId("evt-file-read-request-resolved"), - kind: "notification", - provider: "codex", - threadId: asThreadId("thread-1"), - createdAt: new Date().toISOString(), - method: "serverRequest/resolved", - requestId: ApprovalRequestId.makeUnsafe("req-file-read-1"), - payload: { - request: { - method: "item/fileRead/requestApproval", - }, - decision: "accept", - }, - }; - - lifecycleManager.emit("event", event); - const firstEvent = yield* Fiber.join(firstEventFiber); - - assert.equal(firstEvent._tag, "Some"); - if (firstEvent._tag !== "Some") { - return; - } - assert.equal(firstEvent.value.type, "request.resolved"); - if (firstEvent.value.type !== "request.resolved") { - return; - } - assert.equal(firstEvent.value.payload.requestType, "file_read_approval"); - }), - ); - - it.effect("preserves explicit empty multi-select user-input answers", () => - Effect.gen(function* () { - const adapter = yield* CodexAdapter; - const firstEventFiber = yield* Stream.runHead(adapter.streamEvents).pipe(Effect.forkChild); - - const event: ProviderEvent = { - id: asEventId("evt-user-input-empty"), - kind: "notification", - provider: "codex", - threadId: asThreadId("thread-1"), - createdAt: new Date().toISOString(), - method: "item/tool/requestUserInput/answered", - payload: { - answers: { - scope: [], - }, - }, - }; - - lifecycleManager.emit("event", event); - const firstEvent = yield* Fiber.join(firstEventFiber); - - assert.equal(firstEvent._tag, "Some"); - if (firstEvent._tag !== "Some") { - return; - } - assert.equal(firstEvent.value.type, "user-input.resolved"); - if (firstEvent.value.type !== "user-input.resolved") { - return; - } - assert.deepEqual(firstEvent.value.payload.answers, { - scope: [], - }); - }), - ); - it.effect("maps windowsSandbox/setupCompleted to session state and warning on failure", () => Effect.gen(function* () { const adapter = yield* CodexAdapter; diff --git a/apps/server/src/provider/Layers/CodexAdapter.ts b/apps/server/src/provider/Layers/CodexAdapter.ts index 1e4b80ae9c..ead30be6a4 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.ts @@ -274,6 +274,9 @@ function toCanonicalUserInputAnswers( if (Array.isArray(value)) { const normalized = value.filter((entry): entry is string => typeof entry === "string"); + if (normalized.length === 0) { + return []; + } return [[questionId, normalized.length === 1 ? normalized[0] : normalized] as const]; } @@ -281,7 +284,7 @@ function toCanonicalUserInputAnswers( const answerList = asArray(answerObject?.answers)?.filter( (entry): entry is string => typeof entry === "string", ); - if (!answerList) { + if (!answerList || answerList.length === 0) { return []; } return [[questionId, answerList.length === 1 ? answerList[0] : answerList] as const]; @@ -1426,6 +1429,7 @@ const makeCodexAdapter = (options?: CodexAdapterLiveOptions) => threadId, requestId, decision, + _feedback, ) => Effect.tryPromise({ try: () => manager.respondToRequest(threadId, requestId, decision), diff --git a/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts b/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts index c6f4a3c08c..795c106f05 100644 --- a/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts +++ b/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts @@ -4,6 +4,7 @@ import { assertFailure } from "@effect/vitest/utils"; import { Effect, Layer, Stream } from "effect"; +import { ClaudeCodeAdapter, ClaudeCodeAdapterShape } from "../Services/ClaudeCodeAdapter.ts"; import { CodexAdapter, CodexAdapterShape } from "../Services/CodexAdapter.ts"; import { ProviderAdapterRegistry } from "../Services/ProviderAdapterRegistry.ts"; import { ProviderAdapterRegistryLive } from "./ProviderAdapterRegistry.ts"; @@ -27,9 +28,32 @@ const fakeCodexAdapter: CodexAdapterShape = { streamEvents: Stream.empty, }; +const fakeClaudeAdapter: ClaudeCodeAdapterShape = { + provider: "claudeCode", + capabilities: { sessionModelSwitch: "in-session" }, + startSession: vi.fn(), + sendTurn: vi.fn(), + interruptTurn: vi.fn(), + respondToRequest: vi.fn(), + respondToUserInput: vi.fn(), + stopSession: vi.fn(), + listSessions: vi.fn(), + hasSession: vi.fn(), + readThread: vi.fn(), + rollbackThread: vi.fn(), + stopAll: vi.fn(), + streamEvents: Stream.empty, +}; + const layer = it.layer( Layer.mergeAll( - Layer.provide(ProviderAdapterRegistryLive, Layer.succeed(CodexAdapter, fakeCodexAdapter)), + Layer.provide( + ProviderAdapterRegistryLive, + Layer.mergeAll( + Layer.succeed(CodexAdapter, fakeCodexAdapter), + Layer.succeed(ClaudeCodeAdapter, fakeClaudeAdapter), + ), + ), NodeServices.layer, ), ); @@ -39,10 +63,12 @@ layer("ProviderAdapterRegistryLive", (it) => { Effect.gen(function* () { const registry = yield* ProviderAdapterRegistry; const codex = yield* registry.getByProvider("codex"); + const claude = yield* registry.getByProvider("claudeCode"); assert.equal(codex, fakeCodexAdapter); + assert.equal(claude, fakeClaudeAdapter); const providers = yield* registry.listProviders(); - assert.deepEqual(providers, ["codex"]); + assert.deepEqual(providers, ["codex", "claudeCode"]); }), ); diff --git a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts index 3062ed7907..61fa2d18cd 100644 --- a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts +++ b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts @@ -15,6 +15,7 @@ import { ProviderAdapterRegistry, type ProviderAdapterRegistryShape, } from "../Services/ProviderAdapterRegistry.ts"; +import { ClaudeCodeAdapter } from "../Services/ClaudeCodeAdapter.ts"; import { CodexAdapter } from "../Services/CodexAdapter.ts"; export interface ProviderAdapterRegistryLiveOptions { @@ -23,7 +24,10 @@ export interface ProviderAdapterRegistryLiveOptions { const makeProviderAdapterRegistry = (options?: ProviderAdapterRegistryLiveOptions) => Effect.gen(function* () { - const adapters = options?.adapters !== undefined ? options.adapters : [yield* CodexAdapter]; + const adapters = + options?.adapters !== undefined + ? options.adapters + : [yield* CodexAdapter, yield* ClaudeCodeAdapter]; const byProvider = new Map(adapters.map((adapter) => [adapter.provider, adapter])); const getByProvider: ProviderAdapterRegistryShape["getByProvider"] = (provider) => { diff --git a/apps/server/src/provider/Layers/ProviderService.test.ts b/apps/server/src/provider/Layers/ProviderService.test.ts index d5cf4424b1..561b1c3df8 100644 --- a/apps/server/src/provider/Layers/ProviderService.test.ts +++ b/apps/server/src/provider/Layers/ProviderService.test.ts @@ -52,7 +52,7 @@ const asTurnId = (value: string): TurnId => TurnId.makeUnsafe(value); type LegacyProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: "codex"; + readonly provider: "codex" | "claudeCode" | "cursor"; readonly createdAt: string; readonly threadId: ThreadId; readonly turnId?: string | undefined; @@ -217,12 +217,15 @@ const sleep = (ms: number) => function makeProviderServiceLayer() { const codex = makeFakeCodexAdapter(); + const claude = makeFakeCodexAdapter("claudeCode"); const registry: typeof ProviderAdapterRegistry.Service = { getByProvider: (provider) => provider === "codex" ? Effect.succeed(codex.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), + : provider === "claudeCode" + ? Effect.succeed(claude.adapter) + : Effect.fail(new ProviderUnsupportedError({ provider })), + listProviders: () => Effect.succeed(["codex", "claudeCode"]), }; const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); @@ -247,6 +250,7 @@ function makeProviderServiceLayer() { return { codex, + claude, layer, }; } @@ -533,6 +537,29 @@ routing.layer("ProviderServiceLive routing", (it) => { }), ); + it.effect("routes explicit claudeCode provider session starts to the claude adapter", () => + Effect.gen(function* () { + const provider = yield* ProviderService; + + const session = yield* provider.startSession(asThreadId("thread-claude"), { + provider: "claudeCode", + threadId: asThreadId("thread-claude"), + cwd: "/tmp/project-claude", + runtimeMode: "full-access", + }); + + assert.equal(session.provider, "claudeCode"); + assert.equal(routing.claude.startSession.mock.calls.length, 1); + const startInput = routing.claude.startSession.mock.calls[0]?.[0]; + assert.equal(typeof startInput === "object" && startInput !== null, true); + if (startInput && typeof startInput === "object") { + const startPayload = startInput as { provider?: string; cwd?: string }; + assert.equal(startPayload.provider, "claudeCode"); + assert.equal(startPayload.cwd, "/tmp/project-claude"); + } + }), + ); + it.effect("recovers stale sessions for sendTurn using persisted cwd", () => Effect.gen(function* () { const provider = yield* ProviderService; @@ -635,6 +662,15 @@ routing.layer("ProviderServiceLive routing", (it) => { assert.equal(runtimePayload.lastRuntimeEvent, "provider.sendTurn"); } } + + yield* provider.stopAll(); + const stoppedRuntime = yield* runtimeRepository.getByThreadId({ + threadId: session.threadId, + }); + assert.equal(Option.isSome(stoppedRuntime), true); + if (Option.isSome(stoppedRuntime)) { + assert.equal(stoppedRuntime.value.status, "stopped"); + } }), ); }); diff --git a/apps/server/src/provider/Layers/ProviderService.ts b/apps/server/src/provider/Layers/ProviderService.ts index 8e3bc72041..6e57f629bc 100644 --- a/apps/server/src/provider/Layers/ProviderService.ts +++ b/apps/server/src/provider/Layers/ProviderService.ts @@ -196,11 +196,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => ); if (existing) { yield* upsertSessionBinding(existing, input.binding.threadId); - yield* analytics.record("provider.session.recovered", { - provider: existing.provider, - strategy: "adopt-existing", - hasResumeCursor: existing.resumeCursor !== undefined, - }); return { adapter, session: existing } as const; } } @@ -231,11 +226,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => } yield* upsertSessionBinding(resumed, input.binding.threadId); - yield* analytics.record("provider.session.recovered", { - provider: resumed.provider, - strategy: "resume-thread", - hasResumeCursor: resumed.resumeCursor !== undefined, - }); return { adapter, session: resumed } as const; }); @@ -363,9 +353,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => allowRecovery: true, }); yield* routed.adapter.interruptTurn(routed.threadId, input.turnId); - yield* analytics.record("provider.turn.interrupted", { - provider: routed.adapter.provider, - }); }); const respondToRequest: ProviderServiceShape["respondToRequest"] = (rawInput) => @@ -380,11 +367,12 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => operation: "ProviderService.respondToRequest", allowRecovery: true, }); - yield* routed.adapter.respondToRequest(routed.threadId, input.requestId, input.decision); - yield* analytics.record("provider.request.responded", { - provider: routed.adapter.provider, - decision: input.decision, - }); + yield* routed.adapter.respondToRequest( + routed.threadId, + input.requestId, + input.decision, + input.feedback, + ); }); const respondToUserInput: ProviderServiceShape["respondToUserInput"] = (rawInput) => @@ -418,9 +406,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => yield* routed.adapter.stopSession(routed.threadId); } yield* directory.remove(input.threadId); - yield* analytics.record("provider.session.stopped", { - provider: routed.adapter.provider, - }); }); const listSessions: ProviderServiceShape["listSessions"] = () => @@ -456,17 +441,13 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => return session; } - const overrides: { - resumeCursor?: ProviderSession["resumeCursor"]; - runtimeMode?: ProviderSession["runtimeMode"]; - } = {}; - if (session.resumeCursor === undefined && binding.resumeCursor !== undefined) { - overrides.resumeCursor = binding.resumeCursor; - } - if (binding.runtimeMode !== undefined) { - overrides.runtimeMode = binding.runtimeMode; - } - return Object.assign({}, session, overrides); + return { + ...session, + ...(session.resumeCursor === undefined && binding.resumeCursor !== undefined + ? { resumeCursor: binding.resumeCursor } + : {}), + ...(binding.runtimeMode !== undefined ? { runtimeMode: binding.runtimeMode } : {}), + }; }); }); @@ -489,10 +470,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => allowRecovery: true, }); yield* routed.adapter.rollbackThread(routed.threadId, input.numTurns); - yield* analytics.record("provider.conversation.rolled_back", { - provider: routed.adapter.provider, - turns: input.numTurns, - }); }); const runStopAll = () => @@ -515,10 +492,6 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => ), ), ).pipe(Effect.asVoid); - yield* analytics.record("provider.sessions.stopped_all", { - sessionCount: threadIds.length, - }); - yield* analytics.flush; }); yield* Effect.addFinalizer(() => @@ -537,6 +510,7 @@ const makeProviderService = (options?: ProviderServiceLiveOptions) => listSessions, getCapabilities, rollbackConversation, + stopAll: runStopAll, // Each access creates a fresh PubSub subscription so that multiple // consumers (ProviderRuntimeIngestion, CheckpointReactor, etc.) each // independently receive all runtime events. diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts index 1882c1cc0e..22d4155622 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts @@ -133,36 +133,6 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL } })); - it("resets adapterKey to the new provider when provider changes without an explicit adapter key", () => - Effect.gen(function* () { - const directory = yield* ProviderSessionDirectory; - const runtimeRepository = yield* ProviderSessionRuntimeRepository; - const threadId = ThreadId.makeUnsafe("thread-provider-change"); - - yield* runtimeRepository.upsert({ - threadId, - providerName: "cursor", - adapterKey: "cursor", - runtimeMode: "full-access", - status: "running", - lastSeenAt: new Date().toISOString(), - resumeCursor: null, - runtimePayload: null, - }); - - yield* directory.upsert({ - provider: "codex", - threadId, - }); - - const runtime = yield* runtimeRepository.getByThreadId({ threadId }); - assert.equal(Option.isSome(runtime), true); - if (Option.isSome(runtime)) { - assert.equal(runtime.value.providerName, "codex"); - assert.equal(runtime.value.adapterKey, "codex"); - } - })); - it("rehydrates persisted mappings across layer restart", () => Effect.gen(function* () { const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-provider-directory-")); @@ -204,4 +174,26 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL fs.rmSync(tempDir, { recursive: true, force: true }); })); + + it("accepts cursor provider bindings", () => + Effect.gen(function* () { + const directory = yield* ProviderSessionDirectory; + const threadId = ThreadId.makeUnsafe("thread-cursor"); + + yield* directory.upsert({ + provider: "cursor", + threadId, + }); + + const provider = yield* directory.getProvider(threadId); + assert.equal(provider, "cursor"); + const resolvedBinding = yield* directory.getBinding(threadId); + assertSome(resolvedBinding, { + threadId, + provider: "cursor", + }); + if (Option.isSome(resolvedBinding)) { + assert.equal(resolvedBinding.value.threadId, threadId); + } + })); }); diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts index 38e097e1c9..168e36f47a 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts @@ -22,7 +22,7 @@ function decodeProviderKind( providerName: string, operation: string, ): Effect.Effect { - if (providerName === "codex") { + if (providerName === "codex" || providerName === "claudeCode" || providerName === "cursor") { return Effect.succeed(providerName); } return Effect.fail( @@ -92,15 +92,11 @@ const makeProviderSessionDirectory = Effect.gen(function* () { } const now = new Date().toISOString(); - const providerChanged = - existingRuntime !== undefined && existingRuntime.providerName !== binding.provider; yield* repository .upsert({ threadId: resolvedThreadId, providerName: binding.provider, - adapterKey: - binding.adapterKey ?? - (providerChanged ? binding.provider : (existingRuntime?.adapterKey ?? binding.provider)), + adapterKey: binding.adapterKey ?? existingRuntime?.adapterKey ?? binding.provider, runtimeMode: binding.runtimeMode ?? existingRuntime?.runtimeMode ?? "full-access", status: binding.status ?? existingRuntime?.status ?? "running", lastSeenAt: now, diff --git a/apps/server/src/provider/Services/ClaudeCodeAdapter.ts b/apps/server/src/provider/Services/ClaudeCodeAdapter.ts new file mode 100644 index 0000000000..6ef6876061 --- /dev/null +++ b/apps/server/src/provider/Services/ClaudeCodeAdapter.ts @@ -0,0 +1,31 @@ +/** + * ClaudeCodeAdapter - Claude Code implementation of the generic provider adapter contract. + * + * This service owns Claude runtime/session semantics and emits canonical + * provider runtime events. It does not perform cross-provider routing, shared + * event fan-out, or checkpoint orchestration. + * + * Uses Effect `ServiceMap.Service` for dependency injection and returns the + * shared provider-adapter error channel with `provider: "claudeCode"` context. + * + * @module ClaudeCodeAdapter + */ +import { ServiceMap } from "effect"; + +import type { ProviderAdapterError } from "../Errors.ts"; +import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; + +/** + * ClaudeCodeAdapterShape - Service API for the Claude Code provider adapter. + */ +export interface ClaudeCodeAdapterShape extends ProviderAdapterShape { + readonly provider: "claudeCode"; +} + +/** + * ClaudeCodeAdapter - Service tag for Claude Code provider adapter operations. + */ +export class ClaudeCodeAdapter extends ServiceMap.Service< + ClaudeCodeAdapter, + ClaudeCodeAdapterShape +>()("t3/provider/Services/ClaudeCodeAdapter") {} diff --git a/apps/server/src/provider/Services/ProviderAdapter.ts b/apps/server/src/provider/Services/ProviderAdapter.ts index 38a05f7574..2c72b916c6 100644 --- a/apps/server/src/provider/Services/ProviderAdapter.ts +++ b/apps/server/src/provider/Services/ProviderAdapter.ts @@ -75,6 +75,7 @@ export interface ProviderAdapterShape { threadId: ThreadId, requestId: ApprovalRequestId, decision: ProviderApprovalDecision, + feedback?: string, ) => Effect.Effect; /** diff --git a/apps/server/src/provider/Services/ProviderService.ts b/apps/server/src/provider/Services/ProviderService.ts index ebfe8c8ab1..600541e895 100644 --- a/apps/server/src/provider/Services/ProviderService.ts +++ b/apps/server/src/provider/Services/ProviderService.ts @@ -99,6 +99,16 @@ export interface ProviderServiceShape { readonly numTurns: number; }) => Effect.Effect; + /** + * Stop all active provider sessions. + * + * Persisted session bindings remain so stale sessions can be resumed after + * process restart. + * + * Runtime alias mappings are cleared as part of shutdown. + */ + readonly stopAll: () => Effect.Effect; + /** * Canonical provider runtime event stream. * diff --git a/apps/server/src/provider/reviewCommentTools.ts b/apps/server/src/provider/reviewCommentTools.ts new file mode 100644 index 0000000000..65d7d92897 --- /dev/null +++ b/apps/server/src/provider/reviewCommentTools.ts @@ -0,0 +1,131 @@ +/** + * Review Comment MCP Tools — Programmatic MCP server providing review_comment, + * update_review_comment, and list_review_comments tools for Claude Code sessions. + */ +import { + createSdkMcpServer, + tool, + type McpSdkServerConfigWithInstance, +} from "@anthropic-ai/claude-agent-sdk"; +import { Effect } from "effect"; +import { z } from "zod"; + +import type { ThreadId } from "@t3tools/contracts"; +import type { ReviewCommentRepositoryShape } from "../persistence/Services/ReviewCommentRepository.ts"; + +export function createReviewCommentMcpServer( + threadId: ThreadId, + repository: ReviewCommentRepositoryShape, +): McpSdkServerConfigWithInstance { + return createSdkMcpServer({ + name: "review-comments", + version: "1.0.0", + tools: [ + tool( + "review_comment", + "Add a review comment on a specific file and line number in the PR diff. Use this to annotate issues, suggestions, or observations during code review.", + { + file: z.string().describe("Relative file path (e.g. src/utils.ts)"), + startLine: z + .number() + .int() + .positive() + .describe("Line number in the file where the comment applies"), + endLine: z + .number() + .int() + .positive() + .optional() + .describe("Optional end line for multi-line ranges"), + body: z.string().describe("Comment body (supports markdown)"), + severity: z + .enum(["info", "suggestion", "issue", "blocker"]) + .describe( + "info = observation, suggestion = improvement idea, issue = should fix, blocker = must fix", + ), + }, + async (args) => { + const comment = await Effect.runPromise( + repository.add({ + threadId, + file: args.file, + startLine: args.startLine, + ...(args.endLine !== undefined ? { endLine: args.endLine } : {}), + body: args.body, + severity: args.severity, + }), + ); + + return { + content: [ + { + type: "text" as const, + text: `Review comment added (id: ${comment.id}) on ${args.file}:${args.startLine} [${args.severity}]`, + }, + ], + }; + }, + ), + + tool( + "update_review_comment", + "Update an existing review comment by its ID. Use list_review_comments first to find the ID.", + { + id: z.string().describe("The comment ID to update"), + body: z.string().optional().describe("New comment body"), + severity: z + .enum(["info", "suggestion", "issue", "blocker"]) + .optional() + .describe("New severity level"), + }, + async (args) => { + await Effect.runPromise( + repository.update({ + id: args.id, + ...(args.body !== undefined ? { body: args.body } : {}), + ...(args.severity !== undefined ? { severity: args.severity } : {}), + }), + ); + + return { + content: [ + { + type: "text" as const, + text: `Review comment ${args.id} updated.`, + }, + ], + }; + }, + ), + + tool( + "list_review_comments", + "List all review comments made so far in this review session. Shows file, line, severity, and body for each comment.", + {}, + async () => { + const comments = await Effect.runPromise(repository.listByThreadId({ threadId })); + + if (comments.length === 0) { + return { + content: [{ type: "text" as const, text: "No review comments yet." }], + }; + } + + const lines = comments.map( + (c) => + `- [${c.id}] ${c.file}:${c.startLine}${c.endLine ? `-${c.endLine}` : ""} [${c.severity}]${c.publishedAt ? " (published)" : ""} ${c.body}`, + ); + + return { + content: [ + { + type: "text" as const, + text: `Review comments (${comments.length}):\n${lines.join("\n")}`, + }, + ], + }; + }, + ), + ], + }); +} diff --git a/apps/server/src/serverLayers.ts b/apps/server/src/serverLayers.ts index ff9b10d96f..b8f7de1ba2 100644 --- a/apps/server/src/serverLayers.ts +++ b/apps/server/src/serverLayers.ts @@ -19,6 +19,7 @@ import { OrchestrationProjectionSnapshotQueryLive } from "./orchestration/Layers import { ProviderRuntimeIngestionLive } from "./orchestration/Layers/ProviderRuntimeIngestion"; import { RuntimeReceiptBusLive } from "./orchestration/Layers/RuntimeReceiptBus"; import { ProviderUnsupportedError } from "./provider/Errors"; +import { makeClaudeCodeAdapterLive } from "./provider/Layers/ClaudeCodeAdapter"; import { makeCodexAdapterLive } from "./provider/Layers/CodexAdapter"; import { ProviderAdapterRegistryLive } from "./provider/Layers/ProviderAdapterRegistry"; import { makeProviderServiceLive } from "./provider/Layers/ProviderService"; @@ -33,6 +34,14 @@ import { GitCoreLive } from "./git/Layers/GitCore"; import { GitHubCliLive } from "./git/Layers/GitHubCli"; import { CodexTextGenerationLive } from "./git/Layers/CodexTextGeneration"; import { GitServiceLive } from "./git/Layers/GitService"; +import { JiraCliLive } from "./jira/Layers/JiraCli"; +import { JiraManagerLive } from "./jira/Layers/JiraManager"; +import { ReviewCommentRepositoryLive } from "./persistence/Layers/ReviewCommentRepository"; +import { ReviewCommentRepository } from "./persistence/Services/ReviewCommentRepository"; +import { ReviewRequestRepositoryLive } from "./persistence/Layers/ReviewRequestRepository"; +import { MemoryRepositoryLive } from "./persistence/Layers/MemoryRepository"; +import { MemoryExtractionLive } from "./memory/Layers/MemoryExtraction"; +import { MemoryReactorLive } from "./memory/Layers/MemoryReactor"; import { BunPtyAdapterLive } from "./terminal/Layers/BunPTY"; import { NodePtyAdapterLive } from "./terminal/Layers/NodePTY"; import { AnalyticsService } from "./telemetry/Services/AnalyticsService"; @@ -58,8 +67,18 @@ export function makeServerProviderLayer(): Layer.Layer< const codexAdapterLayer = makeCodexAdapterLive( nativeEventLogger ? { nativeEventLogger } : undefined, ); + // Build review comment repository and pass it to the Claude adapter + // so it can register MCP tools for structured review comments. + const reviewCommentRepo = yield* Effect.gen(function* () { + return yield* ReviewCommentRepository; + }).pipe(Effect.provide(ReviewCommentRepositoryLive)); + const claudeAdapterLayer = makeClaudeCodeAdapterLive({ + ...(nativeEventLogger ? { nativeEventLogger } : {}), + reviewCommentRepository: reviewCommentRepo, + }); const adapterRegistryLayer = ProviderAdapterRegistryLive.pipe( Layer.provide(codexAdapterLayer), + Layer.provide(claudeAdapterLayer), Layer.provideMerge(providerSessionDirectoryLayer), ); return makeProviderServiceLive( @@ -121,11 +140,32 @@ export function makeServerRuntimeServicesLayer() { Layer.provideMerge(textGenerationLayer), ); + const jiraManagerLayer = JiraManagerLive.pipe( + Layer.provideMerge(JiraCliLive), + Layer.provideMerge(textGenerationLayer), + ); + + const memoryExtractionLayer = MemoryExtractionLive.pipe( + Layer.provideMerge(MemoryRepositoryLive), + Layer.provideMerge(orchestrationReactorLayer), + ); + + const memoryReactorLayer = MemoryReactorLive.pipe( + Layer.provideMerge(memoryExtractionLayer), + Layer.provideMerge(orchestrationReactorLayer), + ); + return Layer.mergeAll( orchestrationReactorLayer, gitCoreLayer, gitManagerLayer, + jiraManagerLayer, terminalLayer, KeybindingsLive, + ReviewCommentRepositoryLive, + ReviewRequestRepositoryLive, + MemoryRepositoryLive, + memoryExtractionLayer, + memoryReactorLayer, ).pipe(Layer.provideMerge(NodeServices.layer)); } diff --git a/apps/server/src/serverLogger.ts b/apps/server/src/serverLogger.ts index de6b27f429..0abf9f3826 100644 --- a/apps/server/src/serverLogger.ts +++ b/apps/server/src/serverLogger.ts @@ -1,15 +1,18 @@ import fs from "node:fs"; import path from "node:path"; -import { Effect, Logger } from "effect"; +import { Effect, Logger, Option } from "effect"; import * as Layer from "effect/Layer"; import { ServerConfig } from "./config"; export const ServerLoggerLive = Effect.gen(function* () { - const config = yield* ServerConfig; + const config = yield* Effect.serviceOption(ServerConfig); + if (Option.isNone(config)) { + return Logger.layer([Logger.defaultLogger]); + } - const logDir = path.join(config.stateDir, "logs"); + const logDir = path.join(config.value.stateDir, "logs"); const logPath = path.join(logDir, "server.log"); yield* Effect.sync(() => { diff --git a/apps/server/src/wsServer.test.ts b/apps/server/src/wsServer.test.ts index f12792a318..5b477e931e 100644 --- a/apps/server/src/wsServer.test.ts +++ b/apps/server/src/wsServer.test.ts @@ -62,6 +62,7 @@ const asTurnId = (value: string): TurnId => TurnId.makeUnsafe(value); const defaultOpenService: OpenShape = { openBrowser: () => Effect.void, openInEditor: () => Effect.void, + openInWarp: () => Effect.void, }; const defaultProviderStatuses: ReadonlyArray = [ @@ -831,6 +832,7 @@ describe("WebSocket Server", () => { issues: [], providers: defaultProviderStatuses, availableEditors: expect.any(Array), + availableTerminals: expect.any(Array), }); expectAvailableEditors((response.result as { availableEditors: unknown }).availableEditors); }); @@ -856,6 +858,7 @@ describe("WebSocket Server", () => { issues: [], providers: defaultProviderStatuses, availableEditors: expect.any(Array), + availableTerminals: expect.any(Array), }); expectAvailableEditors((response.result as { availableEditors: unknown }).availableEditors); @@ -891,6 +894,7 @@ describe("WebSocket Server", () => { ], providers: defaultProviderStatuses, availableEditors: expect.any(Array), + availableTerminals: expect.any(Array), }); expectAvailableEditors((response.result as { availableEditors: unknown }).availableEditors); expect(fs.readFileSync(keybindingsPath, "utf8")).toBe("{ not-json"); @@ -990,6 +994,7 @@ describe("WebSocket Server", () => { openCalls.push({ cwd: input.cwd, editor: input.editor }); return Effect.void; }, + openInWarp: () => Effect.void, }; server = await createTestServer({ cwd: "/my/workspace", open: openService }); @@ -1038,6 +1043,7 @@ describe("WebSocket Server", () => { issues: [], providers: defaultProviderStatuses, availableEditors: expect.any(Array), + availableTerminals: expect.any(Array), }); expectAvailableEditors((response.result as { availableEditors: unknown }).availableEditors); }); @@ -1085,6 +1091,7 @@ describe("WebSocket Server", () => { issues: [], providers: defaultProviderStatuses, availableEditors: expect.any(Array), + availableTerminals: expect.any(Array), }); expectAvailableEditors( (configResponse.result as { availableEditors: unknown }).availableEditors, @@ -1228,6 +1235,7 @@ describe("WebSocket Server", () => { listSessions: () => Effect.succeed([]), getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), rollbackConversation: () => unsupported(), + stopAll: () => unsupported(), streamEvents: Stream.fromPubSub(runtimeEventPubSub), }; const providerLayer = Layer.succeed(ProviderService, providerService); @@ -1467,6 +1475,7 @@ describe("WebSocket Server", () => { openBrowser: () => Effect.void, openInEditor: () => Effect.sync(() => BigInt(1)).pipe(Effect.map((result) => result as unknown as void)), + openInWarp: () => Effect.void, }; try { @@ -1683,6 +1692,7 @@ describe("WebSocket Server", () => { aheadCount: 0, behindCount: 0, pr: null, + originUrl: null, }; const status = vi.fn(() => Effect.succeed(statusResult)); diff --git a/apps/server/src/wsServer.ts b/apps/server/src/wsServer.ts index 2e6ac51b7f..679ad11d37 100644 --- a/apps/server/src/wsServer.ts +++ b/apps/server/src/wsServer.ts @@ -47,6 +47,7 @@ import { WebSocketServer, type WebSocket } from "ws"; import { createLogger } from "./logger"; import { GitManager } from "./git/Services/GitManager.ts"; +import { JiraManager } from "./jira/Services/JiraManager.ts"; import { TerminalManager } from "./terminal/Services/Manager.ts"; import { Keybindings } from "./keybindings"; import { searchWorkspaceEntries } from "./workspaceEntries"; @@ -57,9 +58,15 @@ import { ProviderService } from "./provider/Services/ProviderService"; import { ProviderHealth } from "./provider/Services/ProviderHealth"; import { CheckpointDiffQuery } from "./checkpointing/Services/CheckpointDiffQuery"; import { clamp } from "effect/Number"; -import { Open, resolveAvailableEditors } from "./open"; +import { Open, resolveAvailableEditors, resolveAvailableTerminals } from "./open"; import { ServerConfig } from "./config"; import { GitCore } from "./git/Services/GitCore.ts"; +import { ReviewCommentRepository } from "./persistence/Services/ReviewCommentRepository.ts"; +import { ReviewRequestRepository } from "./persistence/Services/ReviewRequestRepository.ts"; +import { MemoryRepository } from "./persistence/Services/MemoryRepository.ts"; +import { MemoryExtraction } from "./memory/Services/MemoryExtraction.ts"; +import { MemoryReactor } from "./memory/Services/MemoryReactor.ts"; +import { GitHubCli } from "./git/Services/GitHubCli.ts"; import { tryHandleProjectFaviconRequest } from "./projectFaviconRoute"; import { ATTACHMENTS_ROUTE_PREFIX, @@ -72,12 +79,29 @@ import { resolveAttachmentPath, resolveAttachmentPathById, } from "./attachmentStore.ts"; +import { runProcess } from "./processRunner"; import { parseBase64DataUrl } from "./imageMime.ts"; import { AnalyticsService } from "./telemetry/Services/AnalyticsService.ts"; import { expandHomePath } from "./os-jank.ts"; import { makeServerPushBus } from "./wsServer/pushBus.ts"; import { makeServerReadiness } from "./wsServer/readiness.ts"; import { decodeJsonResult, formatSchemaError } from "@t3tools/shared/schemaJson"; +import { createTtlCache } from "@t3tools/shared/cache"; + +// Cache PR head SHA for 2 minutes — avoids re-fetching on each comment publish. +const prHeadShaCache = createTtlCache(120_000); + +// Cache review request GitHub results for 60s — collapses duplicate polls from multiple tabs. +interface CachedReviewRequestPr { + url: string; + number: number; + title: string; + body: string; + repository: { nameWithOwner: string }; + author: { login: string }; + labels: readonly { name: string }[]; +} +const reviewRequestGhCache = createTtlCache(60_000); /** * ServerShape - Service API for server lifecycle control. @@ -153,6 +177,48 @@ function websocketRawToString(raw: unknown): string | null { return null; } +/** + * Format thread messages as a plain-text conversation context for AI generation. + * Takes the most recent messages up to `maxChars` total. + */ +function formatThreadContext( + messages: ReadonlyArray<{ role: string; text: string }>, + maxChars: number, +): string { + const lines: string[] = []; + let totalChars = 0; + // Walk backwards from most recent to stay within budget + for (let i = messages.length - 1; i >= 0 && totalChars < maxChars; i--) { + const msg = messages[i]!; + const line = `[${msg.role}]: ${msg.text}`; + lines.unshift(line); + totalChars += line.length; + } + return lines.join("\n\n"); +} + +/** + * Best-effort extraction of the request `id` from raw JSON text. + * Used so that schema validation errors can be sent back with the + * correct request id instead of "unknown". + */ +function extractRequestId(messageText: string): string { + try { + const parsed: unknown = JSON.parse(messageText); + if ( + parsed && + typeof parsed === "object" && + "id" in parsed && + typeof (parsed as { id: unknown }).id === "string" + ) { + return (parsed as { id: string }).id; + } + } catch { + // Fall through + } + return "unknown"; +} + function toPosixRelativePath(input: string): string { return input.replaceAll("\\", "/"); } @@ -213,11 +279,18 @@ export type ServerCoreRuntimeServices = export type ServerRuntimeServices = | ServerCoreRuntimeServices | GitManager + | JiraManager | GitCore + | GitHubCli | TerminalManager | Keybindings | Open - | AnalyticsService; + | AnalyticsService + | ReviewCommentRepository + | ReviewRequestRepository + | MemoryRepository + | MemoryExtraction + | MemoryReactor; export class ServerLifecycleError extends Schema.TaggedErrorClass()( "ServerLifecycleError", @@ -249,12 +322,19 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< autoBootstrapProjectFromCwd, } = serverConfig; const availableEditors = resolveAvailableEditors(); + const availableTerminals = resolveAvailableTerminals(); const gitManager = yield* GitManager; + const jiraManager = yield* JiraManager; const terminalManager = yield* TerminalManager; const keybindingsManager = yield* Keybindings; const providerHealth = yield* ProviderHealth; const git = yield* GitCore; + const gitHubCli = yield* GitHubCli; + const reviewCommentRepo = yield* ReviewCommentRepository; + const reviewRequestRepo = yield* ReviewRequestRepository; + const memoryRepo = yield* MemoryRepository; + const memoryExtraction = yield* MemoryExtraction; const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; @@ -488,9 +568,21 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< return; } - // In dev mode, redirect to Vite dev server + // In dev mode, proxy to Vite dev server if (devUrl) { - respond(302, { Location: devUrl.href }); + const targetUrl = new URL(url.pathname + url.search, devUrl); + const proxyReq = http.request( + targetUrl, + { method: req.method, headers: { ...req.headers, host: targetUrl.host } }, + (proxyRes) => { + res.writeHead(proxyRes.statusCode ?? 502, proxyRes.headers); + proxyRes.pipe(res); + }, + ); + proxyReq.on("error", () => { + respond(502, { "Content-Type": "text/plain" }, "Dev server unavailable"); + }); + req.pipe(proxyReq); return; } @@ -602,7 +694,7 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< const projectionReadModelQuery = yield* ProjectionSnapshotQuery; const checkpointDiffQuery = yield* CheckpointDiffQuery; const orchestrationReactor = yield* OrchestrationReactor; - const { openInEditor } = yield* Open; + const { openInEditor, openInWarp } = yield* Open; const subscriptionsScope = yield* Scope.make("sequential"); yield* Effect.addFinalizer(() => Scope.close(subscriptionsScope, Exit.void)); @@ -619,6 +711,11 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< ).pipe(Effect.forkIn(subscriptionsScope)); yield* Scope.provide(orchestrationReactor.start, subscriptionsScope); + + // Start the memory reactor (thread summaries + periodic extraction) + const memoryReactorService = yield* MemoryReactor; + yield* Scope.provide(memoryReactorService.start, subscriptionsScope); + yield* readiness.markOrchestrationSubscriptionsReady; let welcomeBootstrapProjectId: ProjectId | undefined; @@ -748,6 +845,60 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< }); } + case WS_METHODS.projectsReadFile: { + const body = stripRequestTag(request.body); + + // Build candidate roots: cwd + git repo root (if different). + const roots = [body.cwd]; + const gitRoot = yield* git.getRepoRoot(body.cwd); + if (gitRoot && gitRoot !== path.resolve(body.cwd)) { + roots.push(gitRoot); + } + + // Resolve each candidate and validate it stays within its root + // (prevents path traversal via "../../../etc/passwd"). + const candidates: string[] = []; + for (const root of roots) { + const resolvedCandidate = yield* resolveWorkspaceWritePath({ + workspaceRoot: root, + relativePath: body.relativePath, + path, + }).pipe(Effect.catch(() => Effect.succeed(null))); + if (resolvedCandidate) { + candidates.push(resolvedCandidate.absolutePath); + } + } + + if (candidates.length === 0) { + return yield* new RouteRequestError({ + message: "File path must be relative and stay within the project root.", + }); + } + + let content: string | null = null; + for (const candidate of candidates) { + const result = yield* fileSystem.readFileString(candidate).pipe( + Effect.map((c) => ({ path: candidate, content: c })), + Effect.catch(() => Effect.succeed(null)), + ); + if (result) { + content = result.content; + break; + } + } + + if (content === null) { + return yield* new RouteRequestError({ + message: `Failed to read file: ${body.relativePath} (tried: ${candidates.join(", ")})`, + }); + } + const allLines = content.split("\n"); + const startLine = body.startLine ?? 1; + const endLine = body.endLine ?? allLines.length; + const slicedLines = allLines.slice(startLine - 1, endLine); + return { content: slicedLines.join("\n"), totalLines: allLines.length }; + } + case WS_METHODS.projectsWriteFile: { const body = stripRequestTag(request.body); const target = yield* resolveWorkspaceWritePath({ @@ -781,6 +932,11 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< return yield* openInEditor(body); } + case WS_METHODS.shellOpenInWarp: { + const body = stripRequestTag(request.body); + return yield* openInWarp(body); + } + case WS_METHODS.gitStatus: { const body = stripRequestTag(request.body); return yield* gitManager.status(body); @@ -811,6 +967,16 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< return yield* git.listBranches(body); } + case WS_METHODS.gitDiffBranch: { + const body = stripRequestTag(request.body); + return yield* git.diffBranch(body); + } + + case WS_METHODS.gitDiffWorkingTree: { + const body = stripRequestTag(request.body); + return yield* git.diffWorkingTree(body); + } + case WS_METHODS.gitCreateWorktree: { const body = stripRequestTag(request.body); return yield* git.createWorktree(body); @@ -831,11 +997,97 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< return yield* Effect.scoped(git.checkoutBranch(body)); } + case WS_METHODS.gitCloneRepo: { + const body = stripRequestTag(request.body); + return yield* git.cloneRepo(body); + } + + case WS_METHODS.gitSetBranchUpstream: { + const body = stripRequestTag(request.body); + yield* git.setBranchUpstream(body); + return {}; + } + case WS_METHODS.gitInit: { const body = stripRequestTag(request.body); return yield* git.initRepo(body); } + case WS_METHODS.gitFetchPrDetails: { + const body = stripRequestTag(request.body); + return yield* gitHubCli.fetchPrDetails(body); + } + + case WS_METHODS.gitListOpenPrs: { + const body = stripRequestTag(request.body); + const pullRequests = yield* gitHubCli.listOpenPullRequests({ + cwd: body.cwd, + ...(body.repo ? { repo: body.repo } : {}), + limit: 30, + }); + return { pullRequests }; + } + + case WS_METHODS.jiraIsConfigured: { + const baseUrl = process.env.JIRA_BASE_URL; + const email = process.env.JIRA_USER_EMAIL; + const token = process.env.JIRA_API_TOKEN; + return { configured: !!(baseUrl && email && token) }; + } + + case WS_METHODS.jiraViewIssue: { + const body = stripRequestTag(request.body); + return yield* jiraManager.viewIssue(body); + } + + case WS_METHODS.jiraCreateIssue: { + const body = stripRequestTag(request.body); + return yield* jiraManager.createIssue(body); + } + + case WS_METHODS.jiraMoveIssue: { + const body = stripRequestTag(request.body); + return yield* jiraManager.moveIssue(body); + } + + case WS_METHODS.jiraAddComment: { + const body = stripRequestTag(request.body); + return yield* jiraManager.addComment(body); + } + + case WS_METHODS.jiraListIssues: { + const body = stripRequestTag(request.body); + return yield* jiraManager.listIssues(body); + } + + case WS_METHODS.jiraListTransitions: { + const body = stripRequestTag(request.body); + return yield* jiraManager.listTransitions(body); + } + + case WS_METHODS.jiraGenerateTicketContent: { + const body = stripRequestTag(request.body); + const snapshot = yield* projectionReadModelQuery.getSnapshot(); + const thread = snapshot.threads.find((t) => t.id === body.threadId); + const conversationContext = thread ? formatThreadContext(thread.messages, 20_000) : ""; + return yield* jiraManager.generateTicketContent({ + conversationContext, + projectKey: body.projectKey, + }); + } + + case WS_METHODS.jiraGenerateProgressComment: { + const body = stripRequestTag(request.body); + const snapshot = yield* projectionReadModelQuery.getSnapshot(); + const thread = snapshot.threads.find((t) => t.id === body.threadId); + const recentConversation = thread ? formatThreadContext(thread.messages, 20_000) : ""; + return yield* jiraManager.generateProgressComment({ + ticketKey: body.ticketKey, + ticketTitle: body.ticketTitle, + recentConversation, + }); + } + case WS_METHODS.terminalOpen: { const body = stripRequestTag(request.body); return yield* terminalManager.open(body); @@ -875,6 +1127,7 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< issues: keybindingsConfig.issues, providers: providerStatuses, availableEditors, + availableTerminals, }; case WS_METHODS.serverUpsertKeybinding: { @@ -883,6 +1136,402 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< return { keybindings: keybindingsConfig, issues: [] }; } + case WS_METHODS.reviewCommentAdd: { + const body = stripRequestTag(request.body); + const comment = yield* reviewCommentRepo.add(body); + return { comment }; + } + + case WS_METHODS.reviewCommentUpdate: { + const body = stripRequestTag(request.body); + yield* reviewCommentRepo.update(body); + return {}; + } + + case WS_METHODS.reviewCommentDelete: { + const body = stripRequestTag(request.body); + yield* reviewCommentRepo.delete(body); + return {}; + } + + case WS_METHODS.reviewCommentList: { + const body = stripRequestTag(request.body); + const comments = yield* reviewCommentRepo.listByThreadId(body); + return { comments }; + } + + case WS_METHODS.reviewCommentPublish: { + const body = stripRequestTag(request.body); + const allComments = yield* reviewCommentRepo.listByThreadId({ + threadId: body.threadId, + }); + const comments = body.commentId + ? allComments.filter((c) => c.id === body.commentId) + : [...allComments]; + + if (comments.length === 0) { + return { published: 0 }; + } + + // Parse owner/repo/number from PR URL + const prUrlMatch = body.prUrl.match(/github\.com\/([\w.-]+)\/([\w.-]+)\/pull\/(\d+)/); + if (!prUrlMatch) { + return yield* new RouteRequestError({ + message: "Invalid PR URL format. Expected: https://github.com/owner/repo/pull/123", + }); + } + const [, owner, repo, prNumber] = prUrlMatch; + + // Get the PR head SHA (cached for 2 minutes to avoid repeated API calls + // when publishing multiple comments in the same review session). + const prKey = `${owner}/${repo}#${prNumber}`; + const cachedSha = prHeadShaCache.get(prKey); + const headSha = cachedSha + ? cachedSha + : yield* gitHubCli + .execute({ + cwd: body.cwd, + args: ["api", `repos/${owner}/${repo}/pulls/${prNumber}`, "--jq", ".head.sha"], + timeoutMs: 15_000, + }) + .pipe( + Effect.map((r) => r.stdout.trim()), + Effect.tap((sha) => Effect.sync(() => prHeadShaCache.set(prKey, sha))), + Effect.catch(() => + // Fallback: try local git rev-parse + git.resolveRef(body.cwd, "HEAD").pipe(Effect.catch(() => Effect.succeed("HEAD"))), + ), + ); + + // Pre-flight: check which files are in the PR diff so we don't + // waste an API call that GitHub will reject with 422. + const prFiles = yield* gitHubCli + .execute({ + cwd: body.cwd, + args: ["api", `repos/${owner}/${repo}/pulls/${prNumber}/files`, "--jq", ".[].filename"], + timeoutMs: 15_000, + }) + .pipe( + Effect.map((r) => new Set(r.stdout.trim().split("\n").filter(Boolean))), + Effect.catch(() => Effect.succeed(null as Set | null)), + ); + + if (prFiles) { + const outsideDiff = comments.filter((c) => !prFiles.has(c.file)); + if (outsideDiff.length > 0) { + const fileNames = outsideDiff.map((c) => c.file.split("/").pop()).join(", "); + return { + published: 0, + failed: outsideDiff.length, + error: `Cannot publish to GitHub: ${fileNames} ${outsideDiff.length === 1 ? "is" : "are"} not part of the PR diff`, + }; + } + } + + // Batch-submit all comments as a single pending review (1 API call + // instead of N individual comment calls). + const reviewPayload = JSON.stringify({ + commit_id: headSha, + event: "COMMENT", + comments: comments.map((c) => ({ + path: c.file, + ...(c.endLine && c.endLine !== c.startLine + ? { start_line: c.startLine, line: c.endLine, start_side: "RIGHT" } + : { line: c.startLine }), + side: "RIGHT", + body: c.body, + })), + }); + const prUrl = `https://github.com/${owner}/${repo}/pull/${prNumber}`; + + yield* Effect.logInfo("reviewComment.publish: submitting batch review", { + prUrl, + commentCount: comments.length, + commitId: headSha, + }); + + const batchResult = yield* Effect.tryPromise(() => + runProcess( + "gh", + [ + "api", + `repos/${owner}/${repo}/pulls/${prNumber}/reviews`, + "-X", + "POST", + "--input", + "-", + ], + { cwd: body.cwd, timeoutMs: 30_000, stdin: reviewPayload, allowNonZeroExit: true }, + ), + ).pipe( + Effect.map((r) => { + if (r.code !== 0) { + // Extract GitHub's error detail from the response body (stdout) + let ghDetail = ""; + try { + const respBody = JSON.parse(r.stdout) as { + message?: string; + errors?: { message?: string }[]; + }; + const messages = [ + respBody.message, + ...(respBody.errors?.map((e) => e.message).filter(Boolean) ?? []), + ].filter(Boolean); + if (messages.length > 0) ghDetail = messages.join(": "); + } catch { + /* response wasn't JSON */ + } + if (!ghDetail) ghDetail = r.stderr.trim(); + // Add file context so the user knows which file(s) caused the issue + const files = comments.map((c) => c.file.split("/").pop()).join(", "); + const hint = ghDetail.toLowerCase().includes("unprocessable") + ? ` — the file (${files}) may not be part of the PR diff` + : ""; + return { ok: false as const, url: prUrl, error: `${ghDetail}${hint}` }; + } + try { + const json = JSON.parse(r.stdout) as { html_url?: string }; + return { ok: true as const, url: json.html_url ?? prUrl }; + } catch { + return { ok: true as const, url: prUrl }; + } + }), + Effect.tap((result) => + result.ok + ? Effect.void + : Effect.logWarning("reviewComment.publish: batch publish failed", { + error: result.error, + payload: reviewPayload, + }), + ), + Effect.catch(() => + Effect.succeed({ + ok: false as const, + url: prUrl, + error: "Failed to run GitHub CLI", + }), + ), + ); + + if (batchResult.ok) { + const now = new Date().toISOString(); + for (const comment of comments) { + yield* reviewCommentRepo + .update({ id: comment.id, publishedAt: now, publishedUrl: batchResult.url }) + .pipe(Effect.ignore); + } + return { + published: comments.length, + url: prUrl, + }; + } + + // Fallback: publish comments individually if batch fails. + let published = 0; + const now = new Date().toISOString(); + for (const comment of comments) { + const ghUrl = yield* gitHubCli + .execute({ + cwd: body.cwd, + args: [ + "api", + `repos/${owner}/${repo}/pulls/${prNumber}/comments`, + "-X", + "POST", + "-f", + `body=${comment.body}`, + "-f", + `path=${comment.file}`, + "-F", + `line=${String(comment.endLine && comment.endLine !== comment.startLine ? comment.endLine : comment.startLine)}`, + "-f", + `side=RIGHT`, + "-f", + `commit_id=${headSha}`, + ...(comment.endLine && comment.endLine !== comment.startLine + ? ["-F", `start_line=${String(comment.startLine)}`, "-f", `start_side=RIGHT`] + : []), + ], + timeoutMs: 15_000, + }) + .pipe( + Effect.map((r) => { + try { + const json = JSON.parse(r.stdout) as { html_url?: string }; + return json.html_url ?? null; + } catch { + return null; + } + }), + Effect.catch(() => Effect.succeed(null as string | null)), + ); + if (ghUrl !== null) { + yield* reviewCommentRepo + .update({ id: comment.id, publishedAt: now, publishedUrl: ghUrl }) + .pipe(Effect.ignore); + published++; + } + } + + return { + published, + failed: comments.length - published, + ...(published === 0 ? { error: batchResult.error ?? "All publish attempts failed" } : {}), + url: prUrl, + }; + } + + case WS_METHODS.reviewRequestList: { + // Fetch current review requests from GitHub, using a 60s server-side + // cache to collapse duplicate polls from multiple browser tabs. + const cachedGhResults = reviewRequestGhCache.get("review-requests"); + const ghResults = cachedGhResults + ? cachedGhResults + : yield* gitHubCli.listReviewRequests({ limit: 30 }).pipe( + Effect.tap((results) => + Effect.sync(() => reviewRequestGhCache.set("review-requests", results)), + ), + Effect.catch(() => Effect.succeed([] as const)), + ); + + // Upsert each GitHub result into the DB + for (const pr of ghResults) { + const login = pr.author.login.toLowerCase(); + const isBot = + login.endsWith("[bot]") || + login === "dependabot" || + login === "renovate" || + login === "github-actions" || + login === "greenkeeper" || + login === "snyk-bot" || + login === "mergify" || + login === "codecov" || + login === "allcontributors"; + yield* reviewRequestRepo + .upsert({ + prUrl: pr.url, + prNumber: pr.number, + prTitle: pr.title, + repoNameWithOwner: pr.repository.nameWithOwner, + authorLogin: pr.author.login, + isBot, + ...(pr.body ? { prBody: pr.body } : {}), + prLabels: pr.labels.map((l) => l.name), + }) + .pipe(Effect.ignore); + } + + // Auto-dismiss stale requests (PRs no longer open / no longer requesting review) + if (ghResults.length > 0) { + yield* reviewRequestRepo.dismissStale(ghResults.map((pr) => pr.url)).pipe(Effect.ignore); + } + + // Unlink thread references for deleted threads + yield* reviewRequestRepo.unlinkDeletedThreads().pipe(Effect.ignore); + + const reviewRequests = yield* reviewRequestRepo.listActive(); + return { reviewRequests }; + } + + case WS_METHODS.reviewRequestDismiss: { + const body = stripRequestTag(request.body); + yield* reviewRequestRepo.updateStatus({ id: body.id, status: "dismissed" }); + return {}; + } + + case WS_METHODS.reviewRequestLinkThread: { + const body = stripRequestTag(request.body); + yield* reviewRequestRepo.updateStatus({ + id: body.id, + status: "in_review", + threadId: body.threadId, + }); + return {}; + } + + case WS_METHODS.reviewRequestSubmit: { + const body = stripRequestTag(request.body); + + // Parse owner/repo/number from PR URL + const prUrlMatch = body.prUrl.match(/github\.com\/([\w.-]+)\/([\w.-]+)\/pull\/(\d+)/); + if (!prUrlMatch) { + return yield* new RouteRequestError({ + message: "Invalid PR URL format. Expected: https://github.com/owner/repo/pull/123", + }); + } + const [, owner, repo, prNumber] = prUrlMatch; + + // Submit the review via GitHub API + yield* gitHubCli.execute({ + cwd: process.cwd(), + args: [ + "api", + `repos/${owner}/${repo}/pulls/${prNumber}/reviews`, + "-X", + "POST", + "-f", + `event=${body.event}`, + "-f", + `body=${body.body ?? ""}`, + ], + timeoutMs: 15_000, + }); + + // Record the review outcome + yield* reviewRequestRepo.updateStatus({ + id: body.id, + status: body.event === "APPROVE" ? "approved" : "changes_requested", + }); + return {}; + } + + // ── Memory methods ────────────────────────────────────────── + case WS_METHODS.memoryCreate: { + const body = stripRequestTag(request.body); + const memory = yield* memoryRepo.create(body); + return { memory }; + } + + case WS_METHODS.memoryUpdate: { + const body = stripRequestTag(request.body); + yield* memoryRepo.update(body); + return {}; + } + + case WS_METHODS.memoryArchive: { + const body = stripRequestTag(request.body); + yield* memoryRepo.archive(body); + return {}; + } + + case WS_METHODS.memoryDelete: { + const body = stripRequestTag(request.body); + yield* memoryRepo.delete(body); + return {}; + } + + case WS_METHODS.memoryList: { + const body = stripRequestTag(request.body); + return yield* memoryRepo.listByProject(body); + } + + case WS_METHODS.memorySearch: { + const body = stripRequestTag(request.body); + const memories = yield* memoryRepo.search(body); + return { memories }; + } + + case WS_METHODS.memoryGetForThread: { + const body = stripRequestTag(request.body); + const memories = yield* memoryRepo.getRelevantForThread(body); + return { memories }; + } + + case WS_METHODS.memoryExtract: { + const body = stripRequestTag(request.body); + return yield* memoryExtraction.extract(body); + } + default: { const _exhaustiveCheck: never = request.body; return yield* new RouteRequestError({ @@ -907,10 +1556,14 @@ export const createServer = Effect.fn(function* (): Effect.fn.Return< }); } + // Extract the request id from the raw JSON before full schema validation + // so we can send an error response the client can match to its pending request. + const rawRequestId = extractRequestId(messageText); + const request = decodeWebSocketRequest(messageText); if (Result.isFailure(request)) { return yield* sendWsResponse({ - id: "unknown", + id: rawRequestId, error: { message: `Invalid request format: ${formatSchemaError(request.failure)}` }, }); } diff --git a/apps/web/index.html b/apps/web/index.html index 0322f2d019..b8f07932bb 100644 --- a/apps/web/index.html +++ b/apps/web/index.html @@ -2,9 +2,11 @@ - + - { ]), ).toEqual(["custom/internal-model"]); }); + + it("normalizes provider-specific aliases for claude and cursor", () => { + expect(normalizeCustomModelSlugs(["sonnet"], "claudeCode")).toEqual([]); + expect(normalizeCustomModelSlugs(["claude/custom-sonnet"], "claudeCode")).toEqual([ + "claude/custom-sonnet", + ]); + expect(normalizeCustomModelSlugs(["composer"], "cursor")).toEqual([]); + expect(normalizeCustomModelSlugs(["cursor/custom-model"], "cursor")).toEqual([ + "cursor/custom-model", + ]); + }); }); describe("getAppModelOptions", () => { @@ -45,6 +56,14 @@ describe("getAppModelOptions", () => { isCustom: true, }); }); + + it("keeps a saved custom provider model available as an exact slug option", () => { + const options = getAppModelOptions("claudeCode", ["claude/custom-opus"], "claude/custom-opus"); + + expect(options.some((option) => option.slug === "claude/custom-opus" && option.isCustom)).toBe( + true, + ); + }); }); describe("resolveAppModelSelection", () => { @@ -63,4 +82,12 @@ describe("timestamp format defaults", () => { it("defaults timestamp format to locale", () => { expect(DEFAULT_TIMESTAMP_FORMAT).toBe("locale"); }); + + it("includes provider-specific custom slugs in non-codex model lists", () => { + const claudeOptions = getAppModelOptions("claudeCode", ["claude/custom-opus"]); + const cursorOptions = getAppModelOptions("cursor", ["cursor/custom-model"]); + + expect(claudeOptions.some((option) => option.slug === "claude/custom-opus")).toBe(true); + expect(cursorOptions.some((option) => option.slug === "cursor/custom-model")).toBe(true); + }); }); diff --git a/apps/web/src/appSettings.ts b/apps/web/src/appSettings.ts index 18e76d2f92..3992a6ba48 100644 --- a/apps/web/src/appSettings.ts +++ b/apps/web/src/appSettings.ts @@ -12,6 +12,8 @@ export type TimestampFormat = (typeof TIMESTAMP_FORMAT_OPTIONS)[number]; export const DEFAULT_TIMESTAMP_FORMAT: TimestampFormat = "locale"; const BUILT_IN_MODEL_SLUGS_BY_PROVIDER: Record> = { codex: new Set(getModelOptions("codex").map((option) => option.slug)), + claudeCode: new Set(getModelOptions("claudeCode").map((option) => option.slug)), + cursor: new Set(getModelOptions("cursor").map((option) => option.slug)), }; const AppSettingsSchema = Schema.Struct({ @@ -21,6 +23,12 @@ const AppSettingsSchema = Schema.Struct({ codexHomePath: Schema.String.check(Schema.isMaxLength(4096)).pipe( Schema.withConstructorDefault(() => Option.some("")), ), + claudeBinaryPath: Schema.String.check(Schema.isMaxLength(4096)).pipe( + Schema.withConstructorDefault(() => Option.some("")), + ), + projectsWorkingDirectory: Schema.String.check(Schema.isMaxLength(4096)).pipe( + Schema.withConstructorDefault(() => Option.some("")), + ), defaultThreadEnvMode: Schema.Literals(["local", "worktree"]).pipe( Schema.withConstructorDefault(() => Option.some("local")), ), @@ -34,6 +42,15 @@ const AppSettingsSchema = Schema.Struct({ customCodexModels: Schema.Array(Schema.String).pipe( Schema.withConstructorDefault(() => Option.some([])), ), + customClaudeModels: Schema.Array(Schema.String).pipe( + Schema.withConstructorDefault(() => Option.some([])), + ), + customCursorModels: Schema.Array(Schema.String).pipe( + Schema.withConstructorDefault(() => Option.some([])), + ), + preferredTerminal: Schema.String.pipe( + Schema.withConstructorDefault(() => Option.some("terminal-app")), + ), }); export type AppSettings = typeof AppSettingsSchema.Type; export interface AppModelOption { @@ -73,6 +90,15 @@ export function normalizeCustomModelSlugs( return normalizedModels; } +function normalizeAppSettings(settings: AppSettings): AppSettings { + return { + ...settings, + customCodexModels: normalizeCustomModelSlugs(settings.customCodexModels, "codex"), + customClaudeModels: normalizeCustomModelSlugs(settings.customClaudeModels, "claudeCode"), + customCursorModels: normalizeCustomModelSlugs(settings.customCursorModels, "cursor"), + }; +} + export function getAppModelOptions( provider: ProviderKind, customModels: readonly string[], diff --git a/apps/web/src/authToken.ts b/apps/web/src/authToken.ts new file mode 100644 index 0000000000..b4b72a5ef3 --- /dev/null +++ b/apps/web/src/authToken.ts @@ -0,0 +1,25 @@ +const AUTH_TOKEN_STORAGE_KEY = "t3code:auth-token"; + +export function getAuthToken(): string | null { + if (typeof window === "undefined") return null; + return window.localStorage.getItem(AUTH_TOKEN_STORAGE_KEY); +} + +export function setAuthToken(token: string | null): void { + if (typeof window === "undefined") return; + if (token) { + window.localStorage.setItem(AUTH_TOKEN_STORAGE_KEY, token); + } else { + window.localStorage.removeItem(AUTH_TOKEN_STORAGE_KEY); + } +} + +/** + * Append the stored auth token (if any) to a WebSocket URL as a `?token=` query parameter. + */ +export function appendAuthTokenToUrl(url: string): string { + const token = getAuthToken(); + if (!token) return url; + const separator = url.includes("?") ? "&" : "?"; + return `${url}${separator}token=${encodeURIComponent(token)}`; +} diff --git a/apps/web/src/components/AnnotatedFileDiff.tsx b/apps/web/src/components/AnnotatedFileDiff.tsx new file mode 100644 index 0000000000..fbce22bc71 --- /dev/null +++ b/apps/web/src/components/AnnotatedFileDiff.tsx @@ -0,0 +1,214 @@ +/** + * Unified component for rendering annotated code context in the diff viewer. + * + * Handles two cases that were previously separate components: + * + * 1. **Annotation-only files** — files with annotations but no diff changes. + * Fetches the file content and generates a synthetic context-only patch + * so annotated lines are visible with their surrounding code. + * + * 2. **Unmatched annotations** — annotations on diff files whose target lines + * fall outside the visible hunks. Generates additional synthetic context + * hunks appended after the real diff. + * + * Both cases share the same core logic: fetch file content → build synthetic + * patch from annotation line ranges → render via `@pierre/diffs` `FileDiff` + * with the generic annotation pipeline. + */ + +import { parsePatchFiles } from "@pierre/diffs"; +import { FileDiff, type FileDiffMetadata } from "@pierre/diffs/react"; +import { useQuery } from "@tanstack/react-query"; +import { LoaderIcon } from "lucide-react"; +import { useMemo } from "react"; +import { + type DiffAnnotation, + normalizeFilePath, + toDiffLineAnnotations, +} from "../lib/diffAnnotations"; +import { DIFF_UNSAFE_CSS, resolveDiffThemeName } from "../lib/diffRendering"; +import { buildSyntheticContextPatch } from "../lib/diffAnnotations"; +import { ensureNativeApi } from "../nativeApi"; +import { renderDiffAnnotation } from "./DiffAnnotationCards"; +import { DiffFileHeader } from "./DiffFileHeader"; + +type DiffRenderMode = "stacked" | "split"; +type DiffThemeType = "light" | "dark"; + +// ── Shared hook: fetch file + build synthetic patch ───────────────── + +function useSyntheticFileDiff( + cwd: string, + file: string, + annotations: DiffAnnotation[], + enabled: boolean, +) { + const fileContentQuery = useQuery({ + queryKey: ["projects", "readFile", cwd, file] as const, + queryFn: async () => { + const api = ensureNativeApi(); + return api.projects.readFile({ cwd, relativePath: file }); + }, + enabled: enabled && cwd.length > 0, + staleTime: 30_000, + }); + + const fileDiff = useMemo(() => { + if (!fileContentQuery.data || annotations.length === 0) return null; + const allLines = fileContentQuery.data.content.split("\n"); + const lineRanges = annotations.map((a) => ({ + startLine: a.startLine, + endLine: a.endLine, + })); + const patch = buildSyntheticContextPatch(file, lineRanges, allLines); + if (patch.length === 0) return null; + try { + const parsed = parsePatchFiles(patch, `annotation-context:${file}`); + return parsed.flatMap((p) => p.files)[0] ?? null; + } catch { + return null; + } + }, [fileContentQuery.data, annotations, file]); + + return { fileContentQuery, fileDiff }; +} + +// ── 1. Annotation-only file entry ─────────────────────────────────── + +export interface AnnotationOnlyFileProps { + file: string; + annotations: DiffAnnotation[]; + cwd: string; + resolvedTheme: DiffThemeType; + diffRenderMode: DiffRenderMode; + isCollapsed: boolean; + onToggleCollapsed: () => void; +} + +/** + * Renders a file that has annotations but is NOT part of the actual diff. + * Fetches the file content and generates synthetic context hunks around + * each annotated line. + */ +export function AnnotationOnlyFile({ + file, + annotations, + cwd, + resolvedTheme, + diffRenderMode, + isCollapsed, + onToggleCollapsed, +}: AnnotationOnlyFileProps) { + const { fileContentQuery, fileDiff } = useSyntheticFileDiff(cwd, file, annotations, !isCollapsed); + + return ( +
+ + {!isCollapsed && fileDiff ? ( + + ) : !isCollapsed && fileContentQuery.isLoading ? ( +
+ + Loading file... +
+ ) : !isCollapsed && fileContentQuery.isError ? ( +
+

+ {fileContentQuery.error instanceof Error + ? fileContentQuery.error.message + : "Failed to read file."} +

+
+ ) : null} +
+ ); +} + +// ── 2. Unmatched annotations context ──────────────────────────────── + +export interface UnmatchedAnnotationsProps { + fileDiff: FileDiffMetadata; + annotations: DiffAnnotation[] | undefined; + cwd: string; + resolvedTheme: DiffThemeType; + diffRenderMode: DiffRenderMode; +} + +/** + * For files that ARE in the diff: renders additional synthetic context + * hunks for annotations whose target lines fall outside the visible + * diff hunks. + */ +export function UnmatchedAnnotations({ + fileDiff, + annotations, + cwd, + resolvedTheme, + diffRenderMode, +}: UnmatchedAnnotationsProps) { + // Collect all line numbers visible in the diff hunks + const visibleLines = useMemo(() => { + const lines = new Set(); + for (const hunk of fileDiff.hunks) { + const start = hunk.additionStart; + const count = hunk.additionCount; + for (let i = start; i < start + count; i++) lines.add(i); + } + return lines; + }, [fileDiff.hunks]); + + const unmatched = useMemo( + () => (annotations ?? []).filter((a) => !visibleLines.has(a.startLine)), + [annotations, visibleLines], + ); + + const file = normalizeFilePath(fileDiff.name ?? fileDiff.prevName ?? ""); + + const { fileDiff: syntheticFileDiff } = useSyntheticFileDiff( + cwd, + file, + unmatched, + unmatched.length > 0, + ); + + if (unmatched.length === 0 || !syntheticFileDiff) return null; + + return ( + + ); +} diff --git a/apps/web/src/components/ApprovalDiffView.tsx b/apps/web/src/components/ApprovalDiffView.tsx new file mode 100644 index 0000000000..b476f29eca --- /dev/null +++ b/apps/web/src/components/ApprovalDiffView.tsx @@ -0,0 +1,155 @@ +import { parsePatchFiles } from "@pierre/diffs"; +import { FileDiff, type FileDiffMetadata } from "@pierre/diffs/react"; +import { ChevronDownIcon, FileIcon, TerminalIcon } from "lucide-react"; +import { memo, useMemo, useState } from "react"; +import { useTheme } from "../hooks/useTheme"; +import { toolArgsToDiff } from "../lib/approvalDiff"; +import { buildPatchCacheKey, DIFF_UNSAFE_CSS, resolveDiffThemeName } from "../lib/diffRendering"; +import { cn } from "../lib/utils"; + +type DiffThemeType = "light" | "dark"; + +// --------------------------------------------------------------------------- +// Props +// --------------------------------------------------------------------------- + +interface ApprovalDiffViewProps { + args: unknown; + requestKind: "command" | "file-read" | "file-change"; +} + +// --------------------------------------------------------------------------- +// Component +// --------------------------------------------------------------------------- + +export const ApprovalDiffView = memo(function ApprovalDiffView({ + args, + requestKind, +}: ApprovalDiffViewProps) { + const { resolvedTheme } = useTheme(); + const [collapsed, setCollapsed] = useState(false); + + const result = useMemo(() => toolArgsToDiff(args, requestKind), [args, requestKind]); + + if (result.kind === "unknown") { + return null; + } + + if (result.kind === "command") { + return ( +
+ + {!collapsed && ( +
+            {result.command}
+          
+ )} +
+ ); + } + + if (result.kind === "file-read") { + return ( +
+ + {result.filePath} +
+ ); + } + + // result.kind === "diff" + return ( + setCollapsed((prev) => !prev)} + /> + ); +}); + +// --------------------------------------------------------------------------- +// File diff sub-component +// --------------------------------------------------------------------------- + +interface ApprovalFileDiffProps { + patch: string; + filePath: string; + resolvedTheme: string; + collapsed: boolean; + onToggleCollapsed: () => void; +} + +function parseApprovalPatch(patch: string, cacheScope: string): FileDiffMetadata[] | null { + try { + const normalizedPatch = patch.trim(); + if (normalizedPatch.length === 0) return null; + const parsed = parsePatchFiles( + normalizedPatch, + buildPatchCacheKey(normalizedPatch, cacheScope), + ); + const files = parsed.flatMap((p) => p.files); + return files.length > 0 ? files : null; + } catch { + return null; + } +} + +const ApprovalFileDiff = memo(function ApprovalFileDiff({ + patch, + filePath, + resolvedTheme, + collapsed, + onToggleCollapsed, +}: ApprovalFileDiffProps) { + const cacheScope = `approval-diff:${resolvedTheme}`; + const files = useMemo(() => parseApprovalPatch(patch, cacheScope), [patch, cacheScope]); + + return ( +
+ + {!collapsed && ( +
+ {files ? ( + files.map((fileDiff, index) => ( + + )) + ) : ( +
+              {patch}
+            
+ )} +
+ )} +
+ ); +}); diff --git a/apps/web/src/components/ChatView.browser.tsx b/apps/web/src/components/ChatView.browser.tsx index faecc7f51b..5c6876ea05 100644 --- a/apps/web/src/components/ChatView.browser.tsx +++ b/apps/web/src/components/ChatView.browser.tsx @@ -111,6 +111,7 @@ function createBaseServerConfig(): ServerConfig { }, ], availableEditors: [], + availableTerminals: [], }; } @@ -218,6 +219,7 @@ function createSnapshotForTargetUser(options: { createdAt: NOW_ISO, updatedAt: NOW_ISO, deletedAt: null, + linkedJiraTicket: null, messages, activities: [], proposedPlans: [], @@ -226,6 +228,7 @@ function createSnapshotForTargetUser(options: { threadId: THREAD_ID, status: options.sessionStatus ?? "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "full-access", activeTurnId: null, lastError: null, @@ -272,6 +275,7 @@ function addThreadToSnapshot( createdAt: NOW_ISO, updatedAt: NOW_ISO, deletedAt: null, + linkedJiraTicket: null, messages: [], activities: [], proposedPlans: [], @@ -280,6 +284,7 @@ function addThreadToSnapshot( threadId, status: "ready", providerName: "codex", + providerSessionId: null, runtimeMode: "full-access", activeTurnId: null, lastError: null, diff --git a/apps/web/src/components/ChatView.logic.ts b/apps/web/src/components/ChatView.logic.ts index 59e2904310..c8c8f35de3 100644 --- a/apps/web/src/components/ChatView.logic.ts +++ b/apps/web/src/components/ChatView.logic.ts @@ -35,6 +35,7 @@ export function buildLocalDraftThread( turnDiffSummaries: [], activities: [], proposedPlans: [], + linkedJiraTicket: null, }; } @@ -118,8 +119,12 @@ export function cloneComposerImageForRetry( export function getCustomModelOptionsByProvider(settings: { customCodexModels: readonly string[]; + customClaudeModels: readonly string[]; + customCursorModels: readonly string[]; }): Record> { return { codex: getAppModelOptions("codex", settings.customCodexModels), + claudeCode: getAppModelOptions("claudeCode", settings.customClaudeModels), + cursor: getAppModelOptions("cursor", settings.customCursorModels), }; } diff --git a/apps/web/src/components/ChatView.tsx b/apps/web/src/components/ChatView.tsx index 9f625762cf..155c46d7b4 100644 --- a/apps/web/src/components/ChatView.tsx +++ b/apps/web/src/components/ChatView.tsx @@ -23,6 +23,7 @@ import { } from "@t3tools/contracts"; import { getDefaultModel, + getClaudeContextWindowMode, getDefaultReasoningEffort, getReasoningEffortOptions, normalizeModelSlug, @@ -63,6 +64,7 @@ import { isScrollContainerNearBottom } from "../chat-scroll"; import { buildPendingUserInputAnswers, derivePendingUserInputProgress, + resolvePendingUserInputAnswer, setPendingUserInputCustomAnswer, type PendingUserInputDraftAnswer, } from "../pendingUserInput"; @@ -95,7 +97,9 @@ import { ChevronLeftIcon, ChevronRightIcon, CircleAlertIcon, + FolderXIcon, ListTodoIcon, + LoaderIcon, LockIcon, LockOpenIcon, XIcon, @@ -117,7 +121,7 @@ import { } from "~/projectScripts"; import { SidebarTrigger } from "./ui/sidebar"; import { newCommandId, newMessageId, newThreadId } from "~/lib/utils"; -import { readNativeApi } from "~/nativeApi"; +import { ensureNativeApi, readNativeApi } from "~/nativeApi"; import { resolveAppModelSelection, useAppSettings } from "../appSettings"; import { isTerminalFocused } from "../lib/terminalFocus"; import { @@ -137,6 +141,7 @@ import { buildExpandedImagePreview, ExpandedImagePreview } from "./chat/Expanded import { AVAILABLE_PROVIDER_OPTIONS, ProviderModelPicker } from "./chat/ProviderModelPicker"; import { ComposerCommandItem, ComposerCommandMenu } from "./chat/ComposerCommandMenu"; import { ComposerPendingApprovalActions } from "./chat/ComposerPendingApprovalActions"; +import { ClaudeTraitsPicker } from "./chat/ClaudeTraitsPicker"; import { CodexTraitsPicker } from "./chat/CodexTraitsPicker"; import { CompactComposerControlsMenu } from "./chat/CompactComposerControlsMenu"; import { ComposerPendingApprovalPanel } from "./chat/ComposerPendingApprovalPanel"; @@ -159,6 +164,8 @@ import { SendPhase, } from "./ChatView.logic"; import { useLocalStorage } from "~/hooks/useLocalStorage"; +import { useMessageQueue } from "../hooks/useMessageQueue"; +import { QueuedMessagesBanner } from "./chat/QueuedMessagesBanner"; const ATTACHMENT_PREVIEW_HANDOFF_TTL_MS = 5000; const IMAGE_SIZE_LIMIT_LABEL = `${Math.round(PROVIDER_SEND_TURN_MAX_IMAGE_BYTES / (1024 * 1024))}MB`; @@ -219,6 +226,9 @@ export default function ChatView({ threadId }: ChatViewProps) { ); const setComposerDraftEffort = useComposerDraftStore((store) => store.setEffort); const setComposerDraftCodexFastMode = useComposerDraftStore((store) => store.setCodexFastMode); + const setComposerDraftClaudeLargeContext = useComposerDraftStore( + (store) => store.setClaudeLargeContext, + ); const addComposerDraftImage = useComposerDraftStore((store) => store.addImage); const addComposerDraftImages = useComposerDraftStore((store) => store.addImages); const removeComposerDraftImage = useComposerDraftStore((store) => store.removeImage); @@ -315,6 +325,12 @@ export default function ChatView({ threadId }: ChatViewProps) { const attachmentPreviewHandoffTimeoutByMessageIdRef = useRef>({}); const sendInFlightRef = useRef(false); const dragDepthRef = useRef(0); + const { + queue: messageQueue, + enqueue: enqueueMessage, + drainAll: drainAllQueuedMessages, + removeById: removeQueuedMessage, + } = useMessageQueue(threadId); const terminalOpenByThreadRef = useRef>({}); const setMessagesScrollContainerRef = useCallback((element: HTMLDivElement | null) => { messagesScrollRef.current = element; @@ -474,7 +490,7 @@ export default function ChatView({ threadId }: ChatViewProps) { const lastVisitedAt = activeThread.lastVisitedAt ? Date.parse(activeThread.lastVisitedAt) : NaN; if (!Number.isNaN(lastVisitedAt) && lastVisitedAt >= turnCompletedAt) return; - markThreadVisited(activeThread.id); + markThreadVisited(activeThread.id, activeLatestTurn.completedAt); }, [ activeThread?.id, activeThread?.lastVisitedAt, @@ -494,7 +510,8 @@ export default function ChatView({ threadId }: ChatViewProps) { const lockedProvider: ProviderKind | null = hasThreadStarted ? (sessionProvider ?? selectedProviderByThreadId ?? null) : null; - const selectedProvider: ProviderKind = lockedProvider ?? selectedProviderByThreadId ?? "codex"; + const selectedProvider: ProviderKind = + lockedProvider ?? selectedProviderByThreadId ?? "claudeCode"; const baseThreadModel = resolveModelSlugForProvider( selectedProvider, activeThread?.model ?? activeProject?.model ?? getDefaultModel(selectedProvider), @@ -516,27 +533,54 @@ export default function ChatView({ threadId }: ChatViewProps) { const selectedEffort = composerDraft.effort ?? getDefaultReasoningEffort(selectedProvider); const selectedCodexFastModeEnabled = selectedProvider === "codex" ? composerDraft.codexFastMode : false; + // Claude large-context: only show toggle for models that need the beta header to unlock 1M. + // Native 1M models (Opus 4.6, Sonnet 4.6) already have 1M context — no opt-in needed. + const claudeContextWindowMode = + selectedProvider === "claudeCode" ? getClaudeContextWindowMode(selectedModel) : null; + const selectedClaudeLargeContextEnabled = + selectedProvider === "claudeCode" ? composerDraft.claudeLargeContext : false; const selectedModelOptionsForDispatch = useMemo(() => { - if (selectedProvider !== "codex") { - return undefined; + if (selectedProvider === "codex") { + const codexOptions = { + ...(supportsReasoningEffort && selectedEffort ? { reasoningEffort: selectedEffort } : {}), + ...(selectedCodexFastModeEnabled ? { fastMode: true } : {}), + }; + return Object.keys(codexOptions).length > 0 ? { codex: codexOptions } : undefined; } - const codexOptions = { - ...(supportsReasoningEffort && selectedEffort ? { reasoningEffort: selectedEffort } : {}), - ...(selectedCodexFastModeEnabled ? { fastMode: true } : {}), - }; - return Object.keys(codexOptions).length > 0 ? { codex: codexOptions } : undefined; - }, [selectedCodexFastModeEnabled, selectedEffort, selectedProvider, supportsReasoningEffort]); - const providerOptionsForDispatch = useMemo(() => { - if (!settings.codexBinaryPath && !settings.codexHomePath) { - return undefined; + if (selectedProvider === "claudeCode") { + const claudeOptions = { + ...(selectedClaudeLargeContextEnabled ? { largeContext: true } : {}), + }; + return Object.keys(claudeOptions).length > 0 ? { claudeCode: claudeOptions } : undefined; } + return undefined; + }, [ + selectedCodexFastModeEnabled, + selectedClaudeLargeContextEnabled, + selectedEffort, + selectedProvider, + supportsReasoningEffort, + ]); + const providerOptionsForDispatch = useMemo(() => { + const hasCodexOptions = settings.codexBinaryPath || settings.codexHomePath; + const hasClaudeOptions = settings.claudeBinaryPath; + if (!hasCodexOptions && !hasClaudeOptions) return undefined; return { - codex: { - ...(settings.codexBinaryPath ? { binaryPath: settings.codexBinaryPath } : {}), - ...(settings.codexHomePath ? { homePath: settings.codexHomePath } : {}), - }, + ...(hasCodexOptions + ? { + codex: { + ...(settings.codexBinaryPath ? { binaryPath: settings.codexBinaryPath } : {}), + ...(settings.codexHomePath ? { homePath: settings.codexHomePath } : {}), + }, + } + : {}), + ...(hasClaudeOptions + ? { + claudeCode: { binaryPath: settings.claudeBinaryPath }, + } + : {}), }; - }, [settings.codexBinaryPath, settings.codexHomePath]); + }, [settings.claudeBinaryPath, settings.codexBinaryPath, settings.codexHomePath]); const selectedModelForPicker = selectedModel; const modelOptionsByProvider = useMemo( () => getCustomModelOptionsByProvider(settings), @@ -577,8 +621,8 @@ export default function ChatView({ threadId }: ChatViewProps) { ); const threadActivities = activeThread?.activities ?? EMPTY_ACTIVITIES; const workLogEntries = useMemo( - () => deriveWorkLogEntries(threadActivities, activeLatestTurn?.turnId ?? undefined), - [activeLatestTurn?.turnId, threadActivities], + () => deriveWorkLogEntries(threadActivities, undefined), + [threadActivities], ); const latestTurnHasToolActivity = useMemo( () => hasToolActivityForTurn(threadActivities, activeLatestTurn?.turnId), @@ -1021,6 +1065,48 @@ export default function ChatView({ threadId }: ChatViewProps) { }, [activeProjectCwd, activeThreadWorktreePath]); // Default true while loading to avoid toolbar flicker. const isGitRepo = branchesQuery.data?.isRepo ?? true; + // Probe worktree existence: a lightweight git query that fails fast when the path is gone. + const worktreeProbeQuery = useQuery({ + queryKey: ["worktree-probe", activeThreadWorktreePath], + queryFn: async () => { + const api = ensureNativeApi(); + if (!activeThreadWorktreePath) throw new Error("No worktree path"); + // Use listBranches as a lightweight existence check — it fails if the cwd is gone. + await api.git.listBranches({ cwd: activeThreadWorktreePath }); + return { exists: true } as const; + }, + enabled: !!activeThreadWorktreePath, + retry: false, + staleTime: 5_000, + refetchInterval: 10_000, + refetchOnWindowFocus: "always", + }); + const isWorktreeMissing = + !!activeThreadWorktreePath && (worktreeProbeQuery.isError || branchesQuery.isError); + const handleRecreateWorktreeFromChat = useCallback(() => { + if (!activeProject || !activeThread?.branch || !activeThreadWorktreePath) return; + createWorktreeMutation.mutate( + { + cwd: activeProject.cwd, + branch: activeThread.branch, + newBranch: activeThread.branch, + path: activeThreadWorktreePath, + }, + { + onSuccess: () => { + void worktreeProbeQuery.refetch(); + void branchesQuery.refetch(); + }, + }, + ); + }, [ + activeProject, + activeThread?.branch, + activeThreadWorktreePath, + branchesQuery, + createWorktreeMutation, + worktreeProbeQuery, + ]); const splitTerminalShortcutLabel = useMemo( () => shortcutLabelForCommand(keybindings, "terminal.split"), [keybindings], @@ -1085,6 +1171,7 @@ export default function ChatView({ threadId }: ChatViewProps) { ); const focusComposer = useCallback(() => { + if (window.matchMedia("(pointer: coarse)").matches) return; composerEditorRef.current?.focusAtEnd(); }, []); const scheduleComposerFocus = useCallback(() => { @@ -1629,14 +1716,31 @@ export default function ChatView({ threadId }: ChatViewProps) { useLayoutEffect(() => { if (!activeThread?.id) return; shouldAutoScrollRef.current = true; - scheduleStickToBottom(); + const scrollContainer = messagesScrollRef.current; + if (scrollContainer) { + // Scroll synchronously before the browser paints so the first frame + // shows the correct (bottom) position rather than the previous thread's + // scroll offset. + scrollContainer.scrollTo({ top: scrollContainer.scrollHeight }); + lastKnownScrollTopRef.current = scrollContainer.scrollTop; + // Fade the messages area in so the thread switch feels smooth rather + // than an abrupt content swap. + scrollContainer.style.opacity = "0"; + scrollContainer.style.transition = "none"; + } + const fadeFrame = window.requestAnimationFrame(() => { + if (scrollContainer) { + scrollContainer.style.transition = "opacity 120ms ease-out"; + scrollContainer.style.opacity = "1"; + } + }); const timeout = window.setTimeout(() => { - const scrollContainer = messagesScrollRef.current; if (!scrollContainer) return; if (isScrollContainerNearBottom(scrollContainer)) return; scheduleStickToBottom(); }, 96); return () => { + window.cancelAnimationFrame(fadeFrame); window.clearTimeout(timeout); }; }, [activeThread?.id, scheduleStickToBottom]); @@ -1776,7 +1880,18 @@ export default function ChatView({ threadId }: ChatViewProps) { dragDepthRef.current = 0; setIsDragOverComposer(false); setExpandedImage(null); - }, [threadId]); + // Reset request-response state that is scoped to a thread session + setRespondingRequestIds([]); + setRespondingUserInputRequestIds([]); + setPendingUserInputAnswersByRequestId({}); + setPendingUserInputQuestionIndexByRequestId({}); + // Clean up attachment preview blob URLs and timeouts from the previous thread + clearAttachmentPreviewHandoffs(); + // Reset in-flight send guard so the new thread can send immediately + sendInFlightRef.current = false; + // Release any stuck composer select lock from the previous thread + composerSelectLockRef.current = false; + }, [threadId, clearAttachmentPreviewHandoffs]); useEffect(() => { let cancelled = false; @@ -2194,15 +2309,41 @@ export default function ChatView({ threadId }: ChatViewProps) { [activeThread, isConnecting, isRevertingCheckpoint, isSendBusy, phase, setThreadError], ); - const onSend = async (e?: { preventDefault: () => void }) => { + const onSend = async ( + e?: { preventDefault: () => void }, + queuedContent?: { text: string; images: ComposerImageAttachment[] }, + ) => { e?.preventDefault(); const api = readNativeApi(); - if (!api || !activeThread || isSendBusy || isConnecting || sendInFlightRef.current) return; + if ( + !api || + !activeThread || + isSendBusy || + isConnecting || + sendInFlightRef.current || + isWorktreeMissing + ) + return; if (activePendingProgress) { onAdvanceActivePendingUserInput(); return; } - const trimmed = prompt.trim(); + // If a turn is already running and this isn't a queued flush, queue the message instead + if (!queuedContent && !latestTurnSettled) { + // Let plan follow-up and slash commands fall through — they have their own flow + if (!activePendingProgress && !showPlanFollowUpPrompt) { + const text = prompt.trim(); + if (!text && composerImages.length === 0) return; + enqueueMessage(text, [...composerImages]); + promptRef.current = ""; + clearComposerDraftContent(activeThread.id); + setComposerHighlightedItemId(null); + setComposerCursor(0); + setComposerTrigger(null); + return; + } + } + const trimmed = (queuedContent?.text ?? prompt).trim(); if (showPlanFollowUpPrompt && activeProposedPlan) { const followUp = resolvePlanFollowUpSubmission({ draftText: trimmed, @@ -2219,8 +2360,9 @@ export default function ChatView({ threadId }: ChatViewProps) { }); return; } + const effectiveImages = queuedContent?.images ?? composerImages; const standaloneSlashCommand = - composerImages.length === 0 ? parseStandaloneComposerSlashCommand(trimmed) : null; + effectiveImages.length === 0 ? parseStandaloneComposerSlashCommand(trimmed) : null; if (standaloneSlashCommand) { await handleInteractionModeChange(standaloneSlashCommand); promptRef.current = ""; @@ -2230,7 +2372,7 @@ export default function ChatView({ threadId }: ChatViewProps) { setComposerTrigger(null); return; } - if (!trimmed && composerImages.length === 0) return; + if (!trimmed && effectiveImages.length === 0) return; if (!activeProject) return; const threadIdForSend = activeThread.id; const isFirstMessage = !isServerThread || activeThread.messages.length === 0; @@ -2254,7 +2396,7 @@ export default function ChatView({ threadId }: ChatViewProps) { sendInFlightRef.current = true; beginSendPhase(baseBranchForWorktree ? "preparing-worktree" : "sending-turn"); - const composerImagesSnapshot = [...composerImages]; + const composerImagesSnapshot = [...effectiveImages]; const messageIdForSend = newMessageId(); const messageCreatedAt = new Date().toISOString(); const turnAttachmentsPromise = Promise.all( @@ -2290,11 +2432,15 @@ export default function ChatView({ threadId }: ChatViewProps) { forceStickToBottom(); setThreadError(threadIdForSend, null); - promptRef.current = ""; - clearComposerDraftContent(threadIdForSend); - setComposerHighlightedItemId(null); - setComposerCursor(0); - setComposerTrigger(null); + // Only clear the composer when the user initiated the send directly. + // Queued flushes should not wipe what the user is currently typing. + if (!queuedContent) { + promptRef.current = ""; + clearComposerDraftContent(threadIdForSend); + setComposerHighlightedItemId(null); + setComposerCursor(0); + setComposerTrigger(null); + } let createdServerThreadForLocalDraft = false; let turnStartSucceeded = false; @@ -2472,6 +2618,26 @@ export default function ChatView({ threadId }: ChatViewProps) { } }; + // Keep a stable ref to onSend so the auto-flush effect doesn't need it as a dep + const onSendRef = useRef(onSend); + onSendRef.current = onSend; + + // Auto-flush queued messages when the AI becomes idle. + // All queued messages are drained and merged into a single turn. + useEffect(() => { + if (!latestTurnSettled) return; + if (messageQueue.length === 0) return; + if (isSendBusy || sendInFlightRef.current) return; + + const items = drainAllQueuedMessages(); + if (items.length === 0) return; + + const mergedText = items.map((m) => m.text).join("\n\n"); + const mergedImages = items.flatMap((m) => m.images); + + void onSendRef.current(undefined, { text: mergedText, images: mergedImages }); + }, [latestTurnSettled, messageQueue, isSendBusy, drainAllQueuedMessages]); + const onInterrupt = async () => { const api = readNativeApi(); if (!api || !activeThread) return; @@ -2488,6 +2654,9 @@ export default function ChatView({ threadId }: ChatViewProps) { const api = readNativeApi(); if (!api || !activeThreadId) return; + // Capture feedback from the composer when declining + const feedback = decision === "decline" ? promptRef.current.trim() || undefined : undefined; + setRespondingRequestIds((existing) => existing.includes(requestId) ? existing : [...existing, requestId], ); @@ -2498,6 +2667,7 @@ export default function ChatView({ threadId }: ChatViewProps) { threadId: activeThreadId, requestId, decision, + ...(feedback ? { feedback } : {}), createdAt: new Date().toISOString(), }) .catch((err: unknown) => { @@ -2506,9 +2676,15 @@ export default function ChatView({ threadId }: ChatViewProps) { err instanceof Error ? err.message : "Failed to submit approval decision.", ); }); + + // Clear composer after declining with feedback + if (feedback) { + setPrompt(""); + } + setRespondingRequestIds((existing) => existing.filter((id) => id !== requestId)); }, - [activeThreadId, setStoreThreadError], + [activeThreadId, setStoreThreadError, setPrompt], ); const onRespondToUserInput = useCallback( @@ -2608,12 +2784,14 @@ export default function ChatView({ threadId }: ChatViewProps) { if (!activePendingUserInput || !activePendingProgress) { return; } - if (activePendingProgress.isLastQuestion) { + if (activePendingProgress.isReviewStep) { + // Submit from the review step if (activePendingResolvedAnswers) { void onRespondToUserInput(activePendingUserInput.requestId, activePendingResolvedAnswers); } return; } + // Advance to next question, or to the review step (questionIndex === questions.length) setActivePendingUserInputQuestionIndex(activePendingProgress.questionIndex + 1); }, [ activePendingProgress, @@ -2624,11 +2802,16 @@ export default function ChatView({ threadId }: ChatViewProps) { ]); const onPreviousActivePendingUserInputQuestion = useCallback(() => { - if (!activePendingProgress) { + if (!activePendingProgress || !activePendingUserInput) { return; } - setActivePendingUserInputQuestionIndex(Math.max(activePendingProgress.questionIndex - 1, 0)); - }, [activePendingProgress, setActivePendingUserInputQuestionIndex]); + if (activePendingProgress.isReviewStep) { + // Go back to the last question from the review step + setActivePendingUserInputQuestionIndex(activePendingUserInput.questions.length - 1); + } else { + setActivePendingUserInputQuestionIndex(Math.max(activePendingProgress.questionIndex - 1, 0)); + } + }, [activePendingProgress, activePendingUserInput, setActivePendingUserInputQuestionIndex]); const onSubmitPlanFollowUp = useCallback( async ({ @@ -2907,6 +3090,13 @@ export default function ChatView({ threadId }: ChatViewProps) { }, [scheduleComposerFocus, setComposerDraftCodexFastMode, threadId], ); + const onClaudeLargeContextChange = useCallback( + (enabled: boolean) => { + setComposerDraftClaudeLargeContext(threadId, enabled); + scheduleComposerFocus(); + }, + [scheduleComposerFocus, setComposerDraftClaudeLargeContext, threadId], + ); const onEnvModeChange = useCallback( (mode: DraftThreadEnvMode) => { if (isLocalDraftThread) { @@ -3226,6 +3416,7 @@ export default function ChatView({ threadId }: ChatViewProps) { > { void runProjectScript(script); }} @@ -3251,10 +3446,49 @@ export default function ChatView({ threadId }: ChatViewProps) { {/* Error banner */} - setThreadError(activeThread.id, null)} - /> + {activeThread.session?.status !== "closed" && ( + setThreadError(activeThread.id, null)} + /> + )} + {activeThread.session?.status === "closed" && activeThread.session.lastError && ( +
+ +
+

Session disconnected

+

{activeThread.session.lastError}

+
+
+ )} + {isWorktreeMissing && ( +
+ +

+ The worktree for this thread no longer exists. Recreate it to continue working. +

+ +
+ )} {/* Main content area with optional plan sidebar */}
{/* Chat column */} @@ -3302,6 +3536,11 @@ export default function ChatView({ threadId }: ChatViewProps) { {/* Input bar */}
+ {messageQueue.length > 0 && ( +
+ +
+ )}
@@ -3469,6 +3707,7 @@ export default function ChatView({ threadId }: ChatViewProps) { 0} onRespondToApproval={onRespondToApproval} />
@@ -3509,9 +3748,12 @@ export default function ChatView({ threadId }: ChatViewProps) { selectedEffort={selectedEffort} selectedProvider={selectedProvider} selectedCodexFastModeEnabled={selectedCodexFastModeEnabled} + claudeContextWindowMode={claudeContextWindowMode} + selectedClaudeLargeContextEnabled={selectedClaudeLargeContextEnabled} reasoningOptions={reasoningOptions} onEffortSelect={onEffortSelect} onCodexFastModeChange={onCodexFastModeChange} + onClaudeLargeContextChange={onClaudeLargeContextChange} onToggleInteractionMode={toggleInteractionMode} onTogglePlanSidebar={togglePlanSidebar} onToggleRuntimeMode={toggleRuntimeMode} @@ -3534,6 +3776,20 @@ export default function ChatView({ threadId }: ChatViewProps) { ) : null} + {selectedProvider === "claudeCode" && claudeContextWindowMode !== null ? ( + <> + + + + ) : null} + - {activePendingProgress.questionIndex > 0 ? ( + {activePendingProgress.questionIndex > 0 || + activePendingProgress.isReviewStep ? ( @@ -3677,7 +3934,7 @@ export default function ChatView({ threadId }: ChatViewProps) { type="submit" size="sm" className="h-9 rounded-full px-4 sm:h-8" - disabled={isSendBusy || isConnecting} + disabled={isSendBusy || isConnecting || isWorktreeMissing} > {isConnecting || isSendBusy ? "Sending..." : "Refine"} @@ -3687,7 +3944,7 @@ export default function ChatView({ threadId }: ChatViewProps) { type="submit" size="sm" className="h-9 rounded-l-full rounded-r-none px-4 sm:h-8" - disabled={isSendBusy || isConnecting} + disabled={isSendBusy || isConnecting || isWorktreeMissing} > {isConnecting || isSendBusy ? "Sending..." : "Implement"} @@ -3699,7 +3956,7 @@ export default function ChatView({ threadId }: ChatViewProps) { variant="default" className="h-9 rounded-l-none rounded-r-full border-l-white/12 px-2 sm:h-8" aria-label="Implementation actions" - disabled={isSendBusy || isConnecting} + disabled={isSendBusy || isConnecting || isWorktreeMissing} /> } > @@ -3707,7 +3964,7 @@ export default function ChatView({ threadId }: ChatViewProps) { void onImplementPlanInNewThread()} > Implement in new thread @@ -3723,6 +3980,7 @@ export default function ChatView({ threadId }: ChatViewProps) { disabled={ isSendBusy || isConnecting || + isWorktreeMissing || (!prompt.trim() && composerImages.length === 0) } aria-label={ diff --git a/apps/web/src/components/CreateJiraTicketDialog.tsx b/apps/web/src/components/CreateJiraTicketDialog.tsx new file mode 100644 index 0000000000..17cf028bd1 --- /dev/null +++ b/apps/web/src/components/CreateJiraTicketDialog.tsx @@ -0,0 +1,380 @@ +import { useCallback, useState } from "react"; +import type { LinkedJiraTicket } from "@t3tools/contracts"; +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { + AlertCircleIcon, + ClipboardCopyIcon, + LoaderIcon, + SearchIcon, + SparklesIcon, +} from "lucide-react"; +import { + DialogDescription, + DialogFooter, + DialogHeader, + DialogPanel, + DialogTitle, +} from "~/components/ui/dialog"; +import { Button } from "~/components/ui/button"; +import { Input } from "~/components/ui/input"; +import { Textarea } from "~/components/ui/textarea"; +import { JiraIcon } from "./Icons"; +import { + Select, + SelectItem, + SelectPopup, + SelectTrigger, + SelectValue, +} from "~/components/ui/select"; +import { + jiraCreateIssueMutationOptions, + jiraViewIssueQueryOptions, + jiraMyOpenIssuesQueryOptions, + jiraGenerateTicketContentMutationOptions, +} from "~/lib/jiraReactQuery"; +import { readNativeApi } from "~/nativeApi"; +import { newCommandId } from "~/lib/utils"; + +interface CreateJiraTicketDialogProps { + threadId: string; + onClose: () => void; + onTicketLinked: (ticket: LinkedJiraTicket) => void; +} + +type Mode = "link" | "create"; + +const JIRA_PROJECT_KEY_STORAGE = "t3:jira:lastProjectKey"; + +function getLastProjectKey(): string { + try { + return localStorage.getItem(JIRA_PROJECT_KEY_STORAGE) ?? ""; + } catch { + return ""; + } +} + +function saveLastProjectKey(key: string) { + try { + localStorage.setItem(JIRA_PROJECT_KEY_STORAGE, key); + } catch { + // ignore + } +} + +export function CreateJiraTicketDialog({ + threadId, + onClose, + onTicketLinked, +}: CreateJiraTicketDialogProps) { + const [mode, setMode] = useState("link"); + const [keyInput, setKeyInput] = useState(""); + const [projectKey, setProjectKey] = useState(getLastProjectKey); + const [issueType, setIssueType] = useState("Task"); + const [summary, setSummary] = useState(""); + const [description, setDescription] = useState(""); + const [error, setError] = useState(null); + + const queryClient = useQueryClient(); + const createMutation = useMutation(jiraCreateIssueMutationOptions({ queryClient })); + const generateMutation = useMutation(jiraGenerateTicketContentMutationOptions()); + + const parsedKey = extractJiraKey(keyInput.trim()); + const issueQuery = useQuery(jiraViewIssueQueryOptions(parsedKey)); + const myIssuesQuery = useQuery(jiraMyOpenIssuesQueryOptions()); + + const dispatchLink = useCallback( + (ticket: LinkedJiraTicket) => { + const api = readNativeApi(); + if (api) { + void api.orchestration + .dispatchCommand({ + type: "thread.meta.update", + commandId: newCommandId(), + threadId: threadId as any, + linkedJiraTicket: ticket, + }) + .catch(() => undefined); + } + onTicketLinked(ticket); + onClose(); + }, + [threadId, onTicketLinked, onClose], + ); + + const linkExistingTicket = useCallback(() => { + if (!issueQuery.data) return; + // Remember the project key from the linked ticket (e.g. "PROJ" from "PROJ-123") + const linkedProjectKey = issueQuery.data.key.split("-")[0]; + if (linkedProjectKey) saveLastProjectKey(linkedProjectKey); + dispatchLink({ + key: issueQuery.data.key, + url: issueQuery.data.url, + title: issueQuery.data.summary, + status: "active", + linkedAt: new Date().toISOString(), + }); + }, [issueQuery.data, dispatchLink]); + + const handleCreate = useCallback(async () => { + if (!projectKey || !summary) return; + setError(null); + try { + const result = await createMutation.mutateAsync({ + projectKey, + type: issueType, + priority: "Medium", + summary, + description, + }); + saveLastProjectKey(projectKey); + dispatchLink({ + key: result.key, + url: result.url, + title: summary, + status: "active", + linkedAt: new Date().toISOString(), + }); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to create issue."); + } + }, [projectKey, issueType, summary, description, createMutation, dispatchLink]); + + const handleGenerate = useCallback(async () => { + if (!projectKey) return; + setError(null); + try { + const result = await generateMutation.mutateAsync({ + threadId: threadId as any, + projectKey, + }); + setSummary(result.summary); + setDescription(result.description); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to generate content."); + } + }, [projectKey, generateMutation]); + + return ( + <> + + + + Jira Ticket + + Link an existing ticket or create a new one. + + + +
+ {/* Mode tabs */} +
+ + +
+ + {mode === "link" ? ( +
+
+ setKeyInput(e.target.value)} + autoFocus + /> + {issueQuery.isLoading && ( +
+ +
+ )} +
+ + {issueQuery.data && ( +
+
+ +
+

+ {issueQuery.data.key}: {issueQuery.data.summary} +

+

+ {issueQuery.data.type} + · + {issueQuery.data.status} + · + {issueQuery.data.priority} +

+
+
+
+ )} + + {issueQuery.isError && ( +

Could not find issue.

+ )} + + {!parsedKey && myIssuesQuery.data && myIssuesQuery.data.issues.length > 0 && ( +
+

+ My open tickets +

+
+ {myIssuesQuery.data.issues.map((issue) => ( + + ))} +
+
+ )} + + {!parsedKey && myIssuesQuery.isLoading && ( +
+ + Loading your tickets... +
+ )} +
+ ) : ( +
+
+ setProjectKey(e.target.value.toUpperCase())} + className="flex-1" + autoFocus + /> + +
+ setSummary(e.target.value)} + /> +