Compare commits
1 Commits
691591d279
...
codex/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
| 422e8fe5a5 |
10
.env.example
10
.env.example
@@ -16,15 +16,6 @@ CLAUDE_CODE_OAUTH_TOKEN=
|
|||||||
ANTHROPIC_API_KEY=
|
ANTHROPIC_API_KEY=
|
||||||
CLAUDE_MODEL=
|
CLAUDE_MODEL=
|
||||||
CLAUDE_CODE_PATH=
|
CLAUDE_CODE_PATH=
|
||||||
# Claude binary observability: off | stdout | file | both
|
|
||||||
CLAUDE_OBSERVABILITY_MODE=off
|
|
||||||
# CLAUDE_OBSERVABILITY_VERBOSITY: summary | full
|
|
||||||
CLAUDE_OBSERVABILITY_VERBOSITY=summary
|
|
||||||
# Relative to repository workspace root in UI/provider runs.
|
|
||||||
CLAUDE_OBSERVABILITY_LOG_PATH=.ai_ops/events/claude-trace.ndjson
|
|
||||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=false
|
|
||||||
CLAUDE_OBSERVABILITY_DEBUG=false
|
|
||||||
CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH=
|
|
||||||
|
|
||||||
# Agent management limits
|
# Agent management limits
|
||||||
AGENT_MAX_CONCURRENT=4
|
AGENT_MAX_CONCURRENT=4
|
||||||
@@ -37,7 +28,6 @@ AGENT_PROJECT_CONTEXT_PATH=.ai_ops/project-context.json
|
|||||||
AGENT_TOPOLOGY_MAX_DEPTH=4
|
AGENT_TOPOLOGY_MAX_DEPTH=4
|
||||||
AGENT_TOPOLOGY_MAX_RETRIES=2
|
AGENT_TOPOLOGY_MAX_RETRIES=2
|
||||||
AGENT_RELATIONSHIP_MAX_CHILDREN=4
|
AGENT_RELATIONSHIP_MAX_CHILDREN=4
|
||||||
AGENT_MERGE_CONFLICT_MAX_ATTEMPTS=2
|
|
||||||
|
|
||||||
# Resource provisioning (hard + soft constraints)
|
# Resource provisioning (hard + soft constraints)
|
||||||
AGENT_WORKTREE_ROOT=.ai_ops/worktrees
|
AGENT_WORKTREE_ROOT=.ai_ops/worktrees
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,4 +4,3 @@ dist
|
|||||||
mcp.config.json
|
mcp.config.json
|
||||||
.ai_ops
|
.ai_ops
|
||||||
.agent-context
|
.agent-context
|
||||||
.workspace
|
|
||||||
@@ -29,7 +29,6 @@
|
|||||||
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
||||||
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
||||||
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
||||||
- `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`
|
|
||||||
- Provisioning/resource controls:
|
- Provisioning/resource controls:
|
||||||
- `AGENT_WORKTREE_ROOT`
|
- `AGENT_WORKTREE_ROOT`
|
||||||
- `AGENT_WORKTREE_BASE_REF`
|
- `AGENT_WORKTREE_BASE_REF`
|
||||||
|
|||||||
43
README.md
43
README.md
@@ -20,13 +20,10 @@ TypeScript runtime for deterministic multi-agent execution with:
|
|||||||
- Runtime events are emitted as best-effort side-channel telemetry and do not affect orchestration control flow.
|
- Runtime events are emitted as best-effort side-channel telemetry and do not affect orchestration control flow.
|
||||||
- `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required.
|
- `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required.
|
||||||
- Session state is persisted under `AGENT_STATE_ROOT`.
|
- Session state is persisted under `AGENT_STATE_ROOT`.
|
||||||
- Session lifecycle is explicit (`POST /api/sessions`, `POST /api/sessions/:id/run`, `POST /api/sessions/:id/close`) and each session is bound to a target project path.
|
- Project state is persisted under `AGENT_PROJECT_CONTEXT_PATH` with schema-versioned JSON (`schemaVersion`) and domains:
|
||||||
- Session project context is persisted as schema-versioned JSON (`schemaVersion`) with domains:
|
|
||||||
- `globalFlags`
|
- `globalFlags`
|
||||||
- `artifactPointers`
|
- `artifactPointers`
|
||||||
- `taskQueue`
|
- `taskQueue`
|
||||||
- each task record stores `taskId`, status, and optional `worktreePath` for task-scoped workspace ownership
|
|
||||||
- conflict-aware statuses are supported (`conflict`, `resolving_conflict`)
|
|
||||||
|
|
||||||
## Deep Dives
|
## Deep Dives
|
||||||
|
|
||||||
@@ -95,7 +92,6 @@ The UI provides:
|
|||||||
- graph visualizer with topology/retry rendering, edge trigger labels, node economics (duration/cost/tokens), and critical-path highlighting
|
- graph visualizer with topology/retry rendering, edge trigger labels, node economics (duration/cost/tokens), and critical-path highlighting
|
||||||
- node inspector with attempt metadata and injected `ResolvedExecutionContext` sandbox payload
|
- node inspector with attempt metadata and injected `ResolvedExecutionContext` sandbox payload
|
||||||
- live runtime event feed from `AGENT_RUNTIME_EVENT_LOG_PATH` with severity coloring (including security mirror events)
|
- live runtime event feed from `AGENT_RUNTIME_EVENT_LOG_PATH` with severity coloring (including security mirror events)
|
||||||
- Claude trace feed from `CLAUDE_OBSERVABILITY_LOG_PATH` (query lifecycle, SDK message types/subtypes, and errors)
|
|
||||||
- run trigger + kill switch backed by `SchemaDrivenExecutionEngine.runSession(...)`
|
- run trigger + kill switch backed by `SchemaDrivenExecutionEngine.runSession(...)`
|
||||||
- run mode selector: `provider` (real Codex/Claude execution) or `mock` (deterministic dry-run executor)
|
- run mode selector: `provider` (real Codex/Claude execution) or `mock` (deterministic dry-run executor)
|
||||||
- provider selector: `codex` or `claude`
|
- provider selector: `codex` or `claude`
|
||||||
@@ -109,7 +105,6 @@ Provider mode notes:
|
|||||||
- `provider=codex` uses existing OpenAI/Codex auth settings (`OPENAI_AUTH_MODE`, `CODEX_API_KEY`, `OPENAI_API_KEY`).
|
- `provider=codex` uses existing OpenAI/Codex auth settings (`OPENAI_AUTH_MODE`, `CODEX_API_KEY`, `OPENAI_API_KEY`).
|
||||||
- `provider=claude` uses Claude auth resolution (`CLAUDE_CODE_OAUTH_TOKEN` preferred, otherwise `ANTHROPIC_API_KEY`, or existing Claude Code login state).
|
- `provider=claude` uses Claude auth resolution (`CLAUDE_CODE_OAUTH_TOKEN` preferred, otherwise `ANTHROPIC_API_KEY`, or existing Claude Code login state).
|
||||||
- `CLAUDE_MODEL` should be a Claude model id/alias recognized by Claude Code (for example `claude-sonnet-4-6`); `anthropic/...` prefixes are normalized automatically.
|
- `CLAUDE_MODEL` should be a Claude model id/alias recognized by Claude Code (for example `claude-sonnet-4-6`); `anthropic/...` prefixes are normalized automatically.
|
||||||
- Claude provider runs can emit Claude SDK/CLI internals to stdout and/or NDJSON with `CLAUDE_OBSERVABILITY_*` settings.
|
|
||||||
|
|
||||||
## Manifest Semantics
|
## Manifest Semantics
|
||||||
|
|
||||||
@@ -133,9 +128,9 @@ Pipeline edges can route via:
|
|||||||
Domain events are typed and can trigger edges directly:
|
Domain events are typed and can trigger edges directly:
|
||||||
|
|
||||||
- planning: `requirements_defined`, `tasks_planned`
|
- planning: `requirements_defined`, `tasks_planned`
|
||||||
- execution: `code_committed`, `task_ready_for_review`, `task_blocked`
|
- execution: `code_committed`, `task_blocked`
|
||||||
- validation: `validation_passed`, `validation_failed`
|
- validation: `validation_passed`, `validation_failed`
|
||||||
- integration: `branch_merged`, `merge_conflict_detected`, `merge_conflict_resolved`, `merge_conflict_unresolved`, `merge_retry_started`
|
- integration: `branch_merged`
|
||||||
|
|
||||||
Actors can emit events in `ActorExecutionResult.events`. Pipeline status also emits default validation/execution events.
|
Actors can emit events in `ActorExecutionResult.events`. Pipeline status also emits default validation/execution events.
|
||||||
|
|
||||||
@@ -204,30 +199,6 @@ Notes:
|
|||||||
- `security.tool.invocation_allowed`
|
- `security.tool.invocation_allowed`
|
||||||
- `security.tool.invocation_blocked`
|
- `security.tool.invocation_blocked`
|
||||||
|
|
||||||
## Claude Observability
|
|
||||||
|
|
||||||
- `CLAUDE_OBSERVABILITY_MODE=stdout` prints structured Claude query internals (tool progress, system events, stderr, result lifecycle) to stdout as JSON lines prefixed with `[claude-trace]`.
|
|
||||||
- `CLAUDE_OBSERVABILITY_MODE=file` appends the same records to `CLAUDE_OBSERVABILITY_LOG_PATH`.
|
|
||||||
- `CLAUDE_OBSERVABILITY_MODE=both` enables both outputs.
|
|
||||||
- Output samples high-frequency `tool_progress` events to avoid log flooding while retaining suppression counters.
|
|
||||||
- `assistant` and `user` message records are retained so turn flow is inspectable end-to-end.
|
|
||||||
- `CLAUDE_OBSERVABILITY_VERBOSITY=summary` stores compact metadata; `full` stores redacted full SDK message payloads.
|
|
||||||
- `CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=true` enables and emits sampled partial assistant stream events from the SDK.
|
|
||||||
- `CLAUDE_OBSERVABILITY_DEBUG=true` enables Claude SDK debug mode.
|
|
||||||
- `CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH` writes Claude SDK debug output to a file (also enables debug mode).
|
|
||||||
- In UI/provider mode, `CLAUDE_OBSERVABILITY_LOG_PATH` resolves relative to the repo workspace root.
|
|
||||||
- UI API: `GET /api/claude-trace?limit=<n>&sessionId=<id>` reads filtered Claude trace records.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
CLAUDE_OBSERVABILITY_MODE=both
|
|
||||||
CLAUDE_OBSERVABILITY_VERBOSITY=summary
|
|
||||||
CLAUDE_OBSERVABILITY_LOG_PATH=.ai_ops/events/claude-trace.ndjson
|
|
||||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=false
|
|
||||||
CLAUDE_OBSERVABILITY_DEBUG=false
|
|
||||||
```
|
|
||||||
|
|
||||||
### Analytics Quick Start
|
### Analytics Quick Start
|
||||||
|
|
||||||
Inspect latest events:
|
Inspect latest events:
|
||||||
@@ -267,6 +238,7 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
|||||||
- Every actor execution input now includes `security` helpers (`rulesEngine`, `createCommandExecutor(...)`) so executors can enforce shell/tool policy at the execution boundary.
|
- Every actor execution input now includes `security` helpers (`rulesEngine`, `createCommandExecutor(...)`) so executors can enforce shell/tool policy at the execution boundary.
|
||||||
- Every actor execution input now includes `mcp` helpers (`resolvedConfig`, `resolveConfig(...)`, `filterToolsForProvider(...)`, `createClaudeCanUseTool()`) so provider adapters are filtered against `executionContext.allowedTools` before SDK calls.
|
- Every actor execution input now includes `mcp` helpers (`resolvedConfig`, `resolveConfig(...)`, `filterToolsForProvider(...)`, `createClaudeCanUseTool()`) so provider adapters are filtered against `executionContext.allowedTools` before SDK calls.
|
||||||
- For Claude-based executors, pass `input.mcp.filterToolsForProvider(...)` and `input.mcp.createClaudeCanUseTool()` into the SDK call path so unauthorized tools are never exposed and runtime bypass attempts trigger security violations.
|
- For Claude-based executors, pass `input.mcp.filterToolsForProvider(...)` and `input.mcp.createClaudeCanUseTool()` into the SDK call path so unauthorized tools are never exposed and runtime bypass attempts trigger security violations.
|
||||||
|
- Claude `canUseTool` permission checks normalize provider casing (`Bash` vs `bash`) before enforcing persona allowlists.
|
||||||
- Pipeline behavior on `SecurityViolationError` is configurable:
|
- Pipeline behavior on `SecurityViolationError` is configurable:
|
||||||
- `hard_abort` (default)
|
- `hard_abort` (default)
|
||||||
- `validation_fail` (retry-unrolled remediation)
|
- `validation_fail` (retry-unrolled remediation)
|
||||||
@@ -284,12 +256,6 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
|||||||
- `ANTHROPIC_API_KEY` (used when `CLAUDE_CODE_OAUTH_TOKEN` is unset)
|
- `ANTHROPIC_API_KEY` (used when `CLAUDE_CODE_OAUTH_TOKEN` is unset)
|
||||||
- `CLAUDE_MODEL`
|
- `CLAUDE_MODEL`
|
||||||
- `CLAUDE_CODE_PATH`
|
- `CLAUDE_CODE_PATH`
|
||||||
- `CLAUDE_OBSERVABILITY_MODE` (`off`, `stdout`, `file`, or `both`)
|
|
||||||
- `CLAUDE_OBSERVABILITY_VERBOSITY` (`summary` or `full`)
|
|
||||||
- `CLAUDE_OBSERVABILITY_LOG_PATH`
|
|
||||||
- `CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL` (`true` or `false`)
|
|
||||||
- `CLAUDE_OBSERVABILITY_DEBUG` (`true` or `false`)
|
|
||||||
- `CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH`
|
|
||||||
- `MCP_CONFIG_PATH`
|
- `MCP_CONFIG_PATH`
|
||||||
|
|
||||||
### Agent Manager Limits
|
### Agent Manager Limits
|
||||||
@@ -305,7 +271,6 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
|||||||
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
||||||
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
||||||
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
||||||
- `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`
|
|
||||||
|
|
||||||
### Provisioning / Resource Controls
|
### Provisioning / Resource Controls
|
||||||
|
|
||||||
|
|||||||
@@ -52,16 +52,10 @@ This keeps orchestration policy resolution separate from executor enforcement. E
|
|||||||
- planning: `requirements_defined`, `tasks_planned`
|
- planning: `requirements_defined`, `tasks_planned`
|
||||||
- execution: `code_committed`, `task_blocked`
|
- execution: `code_committed`, `task_blocked`
|
||||||
- validation: `validation_passed`, `validation_failed`
|
- validation: `validation_passed`, `validation_failed`
|
||||||
- integration: `branch_merged`, `merge_conflict_detected`, `merge_conflict_resolved`, `merge_conflict_unresolved`, `merge_retry_started`
|
- integration: `branch_merged`
|
||||||
- Pipeline edges can trigger on domain events (`edge.event`) in addition to legacy status triggers (`edge.on`).
|
- Pipeline edges can trigger on domain events (`edge.event`) in addition to legacy status triggers (`edge.on`).
|
||||||
- `history_has_event` route conditions evaluate persisted domain event history entries (`validation_failed`, `task_blocked`, etc.).
|
- `history_has_event` route conditions evaluate persisted domain event history entries (`validation_failed`, `task_blocked`, etc.).
|
||||||
|
|
||||||
## Merge conflict orchestration
|
|
||||||
|
|
||||||
- Task merge/close merge operations return structured outcomes (`success`, `conflict`, `fatal_error`) instead of throwing for conflicts.
|
|
||||||
- Task state supports conflict workflows (`conflict`, `resolving_conflict`) and conflict metadata is persisted under `task.metadata.mergeConflict`.
|
|
||||||
- Conflict retries are bounded by `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`; exhaustion emits `merge_conflict_unresolved` and the session continues without crashing.
|
|
||||||
|
|
||||||
## Security note
|
## Security note
|
||||||
|
|
||||||
Security enforcement now lives in `src/security`:
|
Security enforcement now lives in `src/security`:
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ This middleware provides a first-pass hardening layer for agent-executed shell c
|
|||||||
- `registry`: resolved runtime `McpRegistry`
|
- `registry`: resolved runtime `McpRegistry`
|
||||||
- `resolveConfig(...)`: centralized MCP config resolution with persona tool-clearance applied
|
- `resolveConfig(...)`: centralized MCP config resolution with persona tool-clearance applied
|
||||||
- `createClaudeCanUseTool()`: helper for Claude SDK `canUseTool` callback so each tool invocation is allowlist/banlist-enforced before execution
|
- `createClaudeCanUseTool()`: helper for Claude SDK `canUseTool` callback so each tool invocation is allowlist/banlist-enforced before execution
|
||||||
|
- Tool matching is case-insensitive at invocation time to handle provider-emitted names like `Bash` versus allowlist entries like `bash`.
|
||||||
|
|
||||||
## Known limits and TODOs
|
## Known limits and TODOs
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,9 @@ import { randomUUID } from "node:crypto";
|
|||||||
import type { JsonObject } from "./types.js";
|
import type { JsonObject } from "./types.js";
|
||||||
|
|
||||||
export type PlanningDomainEventType = "requirements_defined" | "tasks_planned";
|
export type PlanningDomainEventType = "requirements_defined" | "tasks_planned";
|
||||||
export type ExecutionDomainEventType = "code_committed" | "task_blocked" | "task_ready_for_review";
|
export type ExecutionDomainEventType = "code_committed" | "task_blocked";
|
||||||
export type ValidationDomainEventType = "validation_passed" | "validation_failed";
|
export type ValidationDomainEventType = "validation_passed" | "validation_failed";
|
||||||
export type IntegrationDomainEventType =
|
export type IntegrationDomainEventType = "branch_merged";
|
||||||
| "branch_merged"
|
|
||||||
| "merge_conflict_detected"
|
|
||||||
| "merge_conflict_resolved"
|
|
||||||
| "merge_conflict_unresolved"
|
|
||||||
| "merge_retry_started";
|
|
||||||
|
|
||||||
export type DomainEventType =
|
export type DomainEventType =
|
||||||
| PlanningDomainEventType
|
| PlanningDomainEventType
|
||||||
@@ -51,14 +46,9 @@ const DOMAIN_EVENT_TYPES = new Set<DomainEventType>([
|
|||||||
"tasks_planned",
|
"tasks_planned",
|
||||||
"code_committed",
|
"code_committed",
|
||||||
"task_blocked",
|
"task_blocked",
|
||||||
"task_ready_for_review",
|
|
||||||
"validation_passed",
|
"validation_passed",
|
||||||
"validation_failed",
|
"validation_failed",
|
||||||
"branch_merged",
|
"branch_merged",
|
||||||
"merge_conflict_detected",
|
|
||||||
"merge_conflict_resolved",
|
|
||||||
"merge_conflict_unresolved",
|
|
||||||
"merge_retry_started",
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export function isDomainEventType(value: string): value is DomainEventType {
|
export function isDomainEventType(value: string): value is DomainEventType {
|
||||||
|
|||||||
@@ -50,14 +50,10 @@ function toNodeAttemptSeverity(status: ActorResultStatus): RuntimeEventSeverity
|
|||||||
}
|
}
|
||||||
|
|
||||||
function toDomainEventSeverity(type: DomainEventType): RuntimeEventSeverity {
|
function toDomainEventSeverity(type: DomainEventType): RuntimeEventSeverity {
|
||||||
if (type === "task_blocked" || type === "merge_conflict_unresolved") {
|
if (type === "task_blocked") {
|
||||||
return "critical";
|
return "critical";
|
||||||
}
|
}
|
||||||
if (
|
if (type === "validation_failed") {
|
||||||
type === "validation_failed" ||
|
|
||||||
type === "merge_conflict_detected" ||
|
|
||||||
type === "merge_retry_started"
|
|
||||||
) {
|
|
||||||
return "warning";
|
return "warning";
|
||||||
}
|
}
|
||||||
return "info";
|
return "info";
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { resolve } from "node:path";
|
|||||||
import { getConfig, loadConfig, type AppConfig } from "../config.js";
|
import { getConfig, loadConfig, type AppConfig } from "../config.js";
|
||||||
import { createDefaultMcpRegistry, loadMcpConfigFromEnv, McpRegistry } from "../mcp.js";
|
import { createDefaultMcpRegistry, loadMcpConfigFromEnv, McpRegistry } from "../mcp.js";
|
||||||
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
|
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
|
||||||
import type { DomainEventEmission } from "./domain-events.js";
|
|
||||||
import { AgentManager } from "./manager.js";
|
import { AgentManager } from "./manager.js";
|
||||||
import {
|
import {
|
||||||
PersonaRegistry,
|
PersonaRegistry,
|
||||||
@@ -14,16 +13,10 @@ import {
|
|||||||
type ActorExecutionSecurityContext,
|
type ActorExecutionSecurityContext,
|
||||||
type ActorExecutor,
|
type ActorExecutor,
|
||||||
type PipelineRunSummary,
|
type PipelineRunSummary,
|
||||||
type TaskExecutionLifecycle,
|
|
||||||
} from "./pipeline.js";
|
} from "./pipeline.js";
|
||||||
import {
|
import { FileSystemProjectContextStore } from "./project-context.js";
|
||||||
FileSystemProjectContextStore,
|
|
||||||
type ProjectTask,
|
|
||||||
type ProjectTaskStatus,
|
|
||||||
} from "./project-context.js";
|
|
||||||
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
|
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
|
||||||
import type { JsonObject } from "./types.js";
|
import type { JsonObject } from "./types.js";
|
||||||
import { SessionWorktreeManager, type SessionMetadata } from "./session-lifecycle.js";
|
|
||||||
import {
|
import {
|
||||||
SecureCommandExecutor,
|
SecureCommandExecutor,
|
||||||
type SecurityAuditEvent,
|
type SecurityAuditEvent,
|
||||||
@@ -45,7 +38,6 @@ export type OrchestrationSettings = {
|
|||||||
maxDepth: number;
|
maxDepth: number;
|
||||||
maxRetries: number;
|
maxRetries: number;
|
||||||
maxChildren: number;
|
maxChildren: number;
|
||||||
mergeConflictMaxAttempts: number;
|
|
||||||
securityViolationHandling: "hard_abort" | "validation_fail";
|
securityViolationHandling: "hard_abort" | "validation_fail";
|
||||||
runtimeContext: Record<string, string | number | boolean>;
|
runtimeContext: Record<string, string | number | boolean>;
|
||||||
};
|
};
|
||||||
@@ -64,7 +56,6 @@ export function loadOrchestrationSettingsFromEnv(
|
|||||||
maxDepth: config.orchestration.maxDepth,
|
maxDepth: config.orchestration.maxDepth,
|
||||||
maxRetries: config.orchestration.maxRetries,
|
maxRetries: config.orchestration.maxRetries,
|
||||||
maxChildren: config.orchestration.maxChildren,
|
maxChildren: config.orchestration.maxChildren,
|
||||||
mergeConflictMaxAttempts: config.orchestration.mergeConflictMaxAttempts,
|
|
||||||
securityViolationHandling: config.security.violationHandling,
|
securityViolationHandling: config.security.violationHandling,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -190,9 +181,6 @@ function createActorSecurityContext(input: {
|
|||||||
type: `security.${event.type}`,
|
type: `security.${event.type}`,
|
||||||
severity: mapSecurityAuditSeverity(event),
|
severity: mapSecurityAuditSeverity(event),
|
||||||
message: toSecurityAuditMessage(event),
|
message: toSecurityAuditMessage(event),
|
||||||
...(event.sessionId ? { sessionId: event.sessionId } : {}),
|
|
||||||
...(event.nodeId ? { nodeId: event.nodeId } : {}),
|
|
||||||
...(typeof event.attempt === "number" ? { attempt: event.attempt } : {}),
|
|
||||||
metadata: toSecurityAuditMetadata(event),
|
metadata: toSecurityAuditMetadata(event),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -233,57 +221,6 @@ function createActorSecurityContext(input: {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveSessionProjectContextPath(stateRoot: string, sessionId: string): string {
|
|
||||||
return resolve(stateRoot, sessionId, "project-context.json");
|
|
||||||
}
|
|
||||||
|
|
||||||
function readTaskIdFromPayload(payload: JsonObject, fallback: string): string {
|
|
||||||
const candidates = [payload.taskId, payload.task_id, payload.task];
|
|
||||||
for (const candidate of candidates) {
|
|
||||||
if (typeof candidate === "string" && candidate.trim().length > 0) {
|
|
||||||
return candidate.trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTaskStatusForFailure(
|
|
||||||
resultStatus: "validation_fail" | "failure",
|
|
||||||
statusAtStart: string,
|
|
||||||
): ProjectTaskStatus {
|
|
||||||
if (resultStatus === "failure") {
|
|
||||||
return "failed";
|
|
||||||
}
|
|
||||||
if (statusAtStart === "conflict" || statusAtStart === "resolving_conflict") {
|
|
||||||
return "conflict";
|
|
||||||
}
|
|
||||||
return "in_progress";
|
|
||||||
}
|
|
||||||
|
|
||||||
function shouldMergeFromStatus(statusAtStart: string): boolean {
|
|
||||||
return statusAtStart === "review" || statusAtStart === "resolving_conflict";
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTaskIdLabel(task: ProjectTask): string {
|
|
||||||
return task.taskId || task.id || "task";
|
|
||||||
}
|
|
||||||
|
|
||||||
function toJsonObject(value: unknown): JsonObject | undefined {
|
|
||||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return value as JsonObject;
|
|
||||||
}
|
|
||||||
|
|
||||||
function readMergeConflictAttempts(metadata: JsonObject | undefined): number {
|
|
||||||
const record = toJsonObject(metadata?.mergeConflict);
|
|
||||||
const attempts = record?.attempts;
|
|
||||||
if (typeof attempts === "number" && Number.isInteger(attempts) && attempts >= 0) {
|
|
||||||
return attempts;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SchemaDrivenExecutionEngine {
|
export class SchemaDrivenExecutionEngine {
|
||||||
private readonly manifest: AgentManifest;
|
private readonly manifest: AgentManifest;
|
||||||
private readonly personaRegistry = new PersonaRegistry();
|
private readonly personaRegistry = new PersonaRegistry();
|
||||||
@@ -297,7 +234,6 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
private readonly mcpRegistry: McpRegistry;
|
private readonly mcpRegistry: McpRegistry;
|
||||||
private readonly runtimeEventPublisher: RuntimeEventPublisher;
|
private readonly runtimeEventPublisher: RuntimeEventPublisher;
|
||||||
private readonly securityContext: ActorExecutionSecurityContext;
|
private readonly securityContext: ActorExecutionSecurityContext;
|
||||||
private readonly sessionWorktreeManager: SessionWorktreeManager;
|
|
||||||
|
|
||||||
constructor(input: {
|
constructor(input: {
|
||||||
manifest: AgentManifest | unknown;
|
manifest: AgentManifest | unknown;
|
||||||
@@ -324,8 +260,6 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
|
maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
|
||||||
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
|
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
|
||||||
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
|
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
|
||||||
mergeConflictMaxAttempts:
|
|
||||||
input.settings?.mergeConflictMaxAttempts ?? config.orchestration.mergeConflictMaxAttempts,
|
|
||||||
securityViolationHandling:
|
securityViolationHandling:
|
||||||
input.settings?.securityViolationHandling ?? config.security.violationHandling,
|
input.settings?.securityViolationHandling ?? config.security.violationHandling,
|
||||||
runtimeContext: {
|
runtimeContext: {
|
||||||
@@ -339,10 +273,6 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
this.projectContextStore = new FileSystemProjectContextStore({
|
this.projectContextStore = new FileSystemProjectContextStore({
|
||||||
filePath: this.settings.projectContextPath,
|
filePath: this.settings.projectContextPath,
|
||||||
});
|
});
|
||||||
this.sessionWorktreeManager = new SessionWorktreeManager({
|
|
||||||
worktreeRoot: resolve(this.settings.workspaceRoot, this.config.provisioning.gitWorktree.rootDirectory),
|
|
||||||
baseRef: this.config.provisioning.gitWorktree.baseRef,
|
|
||||||
});
|
|
||||||
|
|
||||||
this.actorExecutors = toExecutorMap(input.actorExecutors);
|
this.actorExecutors = toExecutorMap(input.actorExecutors);
|
||||||
this.manager =
|
this.manager =
|
||||||
@@ -422,22 +352,9 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
initialPayload: JsonObject;
|
initialPayload: JsonObject;
|
||||||
initialState?: Partial<StoredSessionState>;
|
initialState?: Partial<StoredSessionState>;
|
||||||
signal?: AbortSignal;
|
signal?: AbortSignal;
|
||||||
sessionMetadata?: SessionMetadata;
|
|
||||||
}): Promise<PipelineRunSummary> {
|
}): Promise<PipelineRunSummary> {
|
||||||
const managerSessionId = `${input.sessionId}__pipeline`;
|
const managerSessionId = `${input.sessionId}__pipeline`;
|
||||||
const managerSession = this.manager.createSession(managerSessionId);
|
const managerSession = this.manager.createSession(managerSessionId);
|
||||||
const workspaceRoot = input.sessionMetadata?.baseWorkspacePath ?? this.settings.workspaceRoot;
|
|
||||||
const projectContextStore = input.sessionMetadata
|
|
||||||
? new FileSystemProjectContextStore({
|
|
||||||
filePath: resolveSessionProjectContextPath(this.settings.stateRoot, input.sessionId),
|
|
||||||
})
|
|
||||||
: this.projectContextStore;
|
|
||||||
const taskLifecycle = input.sessionMetadata
|
|
||||||
? this.createTaskExecutionLifecycle({
|
|
||||||
session: input.sessionMetadata,
|
|
||||||
projectContextStore,
|
|
||||||
})
|
|
||||||
: undefined;
|
|
||||||
|
|
||||||
const executor = new PipelineExecutor(
|
const executor = new PipelineExecutor(
|
||||||
this.manifest,
|
this.manifest,
|
||||||
@@ -445,26 +362,25 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
this.stateManager,
|
this.stateManager,
|
||||||
this.actorExecutors,
|
this.actorExecutors,
|
||||||
{
|
{
|
||||||
workspaceRoot,
|
workspaceRoot: this.settings.workspaceRoot,
|
||||||
runtimeContext: this.settings.runtimeContext,
|
runtimeContext: this.settings.runtimeContext,
|
||||||
defaultModelConstraint: this.config.provider.claudeModel,
|
defaultModelConstraint: this.config.provider.claudeModel,
|
||||||
resolvedExecutionSecurityConstraints: {
|
resolvedExecutionSecurityConstraints: {
|
||||||
dropUid: this.config.security.dropUid !== undefined,
|
dropUid: this.config.security.dropUid !== undefined,
|
||||||
dropGid: this.config.security.dropGid !== undefined,
|
dropGid: this.config.security.dropGid !== undefined,
|
||||||
worktreePath: workspaceRoot,
|
worktreePath: this.settings.workspaceRoot,
|
||||||
violationMode: this.settings.securityViolationHandling,
|
violationMode: this.settings.securityViolationHandling,
|
||||||
},
|
},
|
||||||
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
|
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
|
||||||
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
|
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
|
||||||
manager: this.manager,
|
manager: this.manager,
|
||||||
managerSessionId,
|
managerSessionId,
|
||||||
projectContextStore,
|
projectContextStore: this.projectContextStore,
|
||||||
resolveMcpConfig: ({ providerHint, prompt, toolClearance, workingDirectory }) =>
|
resolveMcpConfig: ({ providerHint, prompt, toolClearance }) =>
|
||||||
loadMcpConfigFromEnv(
|
loadMcpConfigFromEnv(
|
||||||
{
|
{
|
||||||
providerHint,
|
providerHint,
|
||||||
prompt,
|
prompt,
|
||||||
...(workingDirectory ? { workingDirectory } : {}),
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
config: this.config,
|
config: this.config,
|
||||||
@@ -475,7 +391,6 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
securityViolationHandling: this.settings.securityViolationHandling,
|
securityViolationHandling: this.settings.securityViolationHandling,
|
||||||
securityContext: this.securityContext,
|
securityContext: this.securityContext,
|
||||||
runtimeEventPublisher: this.runtimeEventPublisher,
|
runtimeEventPublisher: this.runtimeEventPublisher,
|
||||||
...(taskLifecycle ? { taskLifecycle } : {}),
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
try {
|
try {
|
||||||
@@ -490,334 +405,6 @@ export class SchemaDrivenExecutionEngine {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private createTaskExecutionLifecycle(input: {
|
|
||||||
session: SessionMetadata;
|
|
||||||
projectContextStore: FileSystemProjectContextStore;
|
|
||||||
}): TaskExecutionLifecycle {
|
|
||||||
return {
|
|
||||||
prepareTaskExecution: async ({ node, context }) => {
|
|
||||||
const taskId = readTaskIdFromPayload(context.handoff.payload, node.id);
|
|
||||||
const projectContext = await input.projectContextStore.readState();
|
|
||||||
const existing = projectContext.taskQueue.find(
|
|
||||||
(task) => toTaskIdLabel(task) === taskId,
|
|
||||||
);
|
|
||||||
|
|
||||||
const ensured = await this.sessionWorktreeManager.ensureTaskWorktree({
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
taskId,
|
|
||||||
baseWorkspacePath: input.session.baseWorkspacePath,
|
|
||||||
...(existing?.worktreePath ? { existingWorktreePath: existing.worktreePath } : {}),
|
|
||||||
});
|
|
||||||
|
|
||||||
const statusAtStart: ProjectTaskStatus =
|
|
||||||
existing?.status === "review" ||
|
|
||||||
existing?.status === "conflict" ||
|
|
||||||
existing?.status === "resolving_conflict"
|
|
||||||
? existing.status
|
|
||||||
: "in_progress";
|
|
||||||
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId,
|
|
||||||
id: taskId,
|
|
||||||
status: statusAtStart,
|
|
||||||
worktreePath: ensured.taskWorktreePath,
|
|
||||||
...(existing?.title ? { title: existing.title } : { title: taskId }),
|
|
||||||
...(existing?.metadata ? { metadata: existing.metadata } : {}),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
taskId,
|
|
||||||
worktreePath: ensured.taskWorktreePath,
|
|
||||||
statusAtStart,
|
|
||||||
...(existing?.metadata ? { metadata: existing.metadata } : {}),
|
|
||||||
};
|
|
||||||
},
|
|
||||||
finalizeTaskExecution: async ({ task, result, domainEvents }) => {
|
|
||||||
const emittedTypes = new Set(domainEvents.map((event) => event.type));
|
|
||||||
const additionalEvents: DomainEventEmission[] = [];
|
|
||||||
const emitEvent = (
|
|
||||||
type: DomainEventEmission["type"],
|
|
||||||
payload?: DomainEventEmission["payload"],
|
|
||||||
): void => {
|
|
||||||
if (emittedTypes.has(type)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
emittedTypes.add(type);
|
|
||||||
additionalEvents.push(payload ? { type, payload } : { type });
|
|
||||||
};
|
|
||||||
|
|
||||||
if (result.status === "failure" || result.status === "validation_fail") {
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: toTaskStatusForFailure(result.status, task.statusAtStart),
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
title: task.taskId,
|
|
||||||
...(task.metadata ? { metadata: task.metadata } : {}),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (task.statusAtStart === "conflict") {
|
|
||||||
const attempts = readMergeConflictAttempts(task.metadata);
|
|
||||||
const metadata: JsonObject = {
|
|
||||||
...(task.metadata ?? {}),
|
|
||||||
mergeConflict: {
|
|
||||||
attempts,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
status: "resolved",
|
|
||||||
resolvedAt: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: "resolving_conflict",
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
title: task.taskId,
|
|
||||||
metadata,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
emitEvent("merge_conflict_resolved", {
|
|
||||||
summary: `Merge conflicts resolved for task "${task.taskId}".`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
attempts,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
additionalEvents,
|
|
||||||
handoffPayloadPatch: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
mergeConflictStatus: "resolved",
|
|
||||||
mergeConflictAttempts: attempts,
|
|
||||||
} as JsonObject,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shouldMergeFromStatus(task.statusAtStart)) {
|
|
||||||
const attemptsBeforeMerge = readMergeConflictAttempts(task.metadata);
|
|
||||||
if (task.statusAtStart === "resolving_conflict") {
|
|
||||||
emitEvent("merge_retry_started", {
|
|
||||||
summary: `Retrying merge for task "${task.taskId}".`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
nextAttempt: attemptsBeforeMerge + 1,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergeOutcome = await this.sessionWorktreeManager.mergeTaskIntoBase({
|
|
||||||
taskId: task.taskId,
|
|
||||||
baseWorkspacePath: input.session.baseWorkspacePath,
|
|
||||||
taskWorktreePath: task.worktreePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (mergeOutcome.kind === "success") {
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: "merged",
|
|
||||||
title: task.taskId,
|
|
||||||
metadata: {
|
|
||||||
...(task.metadata ?? {}),
|
|
||||||
mergeConflict: {
|
|
||||||
attempts: attemptsBeforeMerge,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
status: "merged",
|
|
||||||
mergedAt: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
emitEvent("branch_merged", {
|
|
||||||
summary: `Task "${task.taskId}" merged into session base branch.`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
additionalEvents,
|
|
||||||
handoffPayloadPatch: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
mergeStatus: "merged",
|
|
||||||
} as JsonObject,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mergeOutcome.kind === "conflict") {
|
|
||||||
const attempts = attemptsBeforeMerge + 1;
|
|
||||||
const exhausted = attempts >= this.settings.mergeConflictMaxAttempts;
|
|
||||||
const metadata: JsonObject = {
|
|
||||||
...(task.metadata ?? {}),
|
|
||||||
mergeConflict: {
|
|
||||||
attempts,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
status: exhausted ? "unresolved" : "conflict",
|
|
||||||
conflictFiles: mergeOutcome.conflictFiles,
|
|
||||||
worktreePath: mergeOutcome.worktreePath,
|
|
||||||
detectedAt: new Date().toISOString(),
|
|
||||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: "conflict",
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
title: task.taskId,
|
|
||||||
metadata,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
emitEvent("merge_conflict_detected", {
|
|
||||||
summary: `Merge conflict detected for task "${task.taskId}".`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: mergeOutcome.worktreePath,
|
|
||||||
conflictFiles: mergeOutcome.conflictFiles,
|
|
||||||
attempts,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (exhausted) {
|
|
||||||
emitEvent("merge_conflict_unresolved", {
|
|
||||||
summary:
|
|
||||||
`Merge conflict attempts exhausted for task "${task.taskId}" ` +
|
|
||||||
`(${String(attempts)}/${String(this.settings.mergeConflictMaxAttempts)}).`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: mergeOutcome.worktreePath,
|
|
||||||
conflictFiles: mergeOutcome.conflictFiles,
|
|
||||||
attempts,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
additionalEvents,
|
|
||||||
handoffPayloadPatch: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
mergeConflictStatus: exhausted ? "unresolved" : "conflict",
|
|
||||||
mergeConflictAttempts: attempts,
|
|
||||||
mergeConflictMaxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
mergeConflictFiles: mergeOutcome.conflictFiles,
|
|
||||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
|
||||||
} as JsonObject,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: "failed",
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
title: task.taskId,
|
|
||||||
metadata: {
|
|
||||||
...(task.metadata ?? {}),
|
|
||||||
mergeConflict: {
|
|
||||||
attempts: attemptsBeforeMerge,
|
|
||||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
|
||||||
status: "fatal_error",
|
|
||||||
error: mergeOutcome.error,
|
|
||||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
emitEvent("merge_conflict_unresolved", {
|
|
||||||
summary: `Fatal merge error for task "${task.taskId}".`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: mergeOutcome.worktreePath,
|
|
||||||
error: mergeOutcome.error,
|
|
||||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
emitEvent("task_blocked", {
|
|
||||||
summary: `Task "${task.taskId}" blocked due to fatal merge error.`,
|
|
||||||
details: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
error: mergeOutcome.error,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
additionalEvents,
|
|
||||||
handoffPayloadPatch: {
|
|
||||||
taskId: task.taskId,
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
mergeStatus: "fatal_error",
|
|
||||||
mergeError: mergeOutcome.error,
|
|
||||||
} as JsonObject,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextMetadata = task.metadata
|
|
||||||
? {
|
|
||||||
...task.metadata,
|
|
||||||
}
|
|
||||||
: undefined;
|
|
||||||
|
|
||||||
await input.projectContextStore.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: task.taskId,
|
|
||||||
id: task.taskId,
|
|
||||||
status: "review",
|
|
||||||
worktreePath: task.worktreePath,
|
|
||||||
title: task.taskId,
|
|
||||||
...(nextMetadata ? { metadata: nextMetadata } : {}),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
if (additionalEvents.length > 0) {
|
|
||||||
return {
|
|
||||||
additionalEvents,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private assertRelationshipConstraints(): void {
|
private assertRelationshipConstraints(): void {
|
||||||
for (const [parent, edges] of this.childrenByParent.entries()) {
|
for (const [parent, edges] of this.childrenByParent.entries()) {
|
||||||
if (edges.length > this.settings.maxChildren) {
|
if (edges.length > this.settings.maxChildren) {
|
||||||
|
|||||||
@@ -107,8 +107,6 @@ export type ResolvedExecutionContext = {
|
|||||||
|
|
||||||
export type ActorExecutionInput = {
|
export type ActorExecutionInput = {
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
attempt: number;
|
|
||||||
depth: number;
|
|
||||||
node: PipelineNode;
|
node: PipelineNode;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
context: NodeExecutionContext;
|
context: NodeExecutionContext;
|
||||||
@@ -155,7 +153,6 @@ export type PipelineExecutorOptions = {
|
|||||||
securityViolationHandling?: SecurityViolationHandling;
|
securityViolationHandling?: SecurityViolationHandling;
|
||||||
securityContext?: ActorExecutionSecurityContext;
|
securityContext?: ActorExecutionSecurityContext;
|
||||||
runtimeEventPublisher?: RuntimeEventPublisher;
|
runtimeEventPublisher?: RuntimeEventPublisher;
|
||||||
taskLifecycle?: TaskExecutionLifecycle;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ActorExecutionSecurityContext = {
|
export type ActorExecutionSecurityContext = {
|
||||||
@@ -169,34 +166,6 @@ export type ActorExecutionSecurityContext = {
|
|||||||
}) => SecureCommandExecutor;
|
}) => SecureCommandExecutor;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TaskExecutionResolution = {
|
|
||||||
taskId: string;
|
|
||||||
worktreePath: string;
|
|
||||||
statusAtStart: string;
|
|
||||||
metadata?: JsonObject;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type TaskExecutionLifecycle = {
|
|
||||||
prepareTaskExecution: (input: {
|
|
||||||
sessionId: string;
|
|
||||||
node: PipelineNode;
|
|
||||||
context: NodeExecutionContext;
|
|
||||||
}) => Promise<TaskExecutionResolution>;
|
|
||||||
finalizeTaskExecution: (input: {
|
|
||||||
sessionId: string;
|
|
||||||
node: PipelineNode;
|
|
||||||
task: TaskExecutionResolution;
|
|
||||||
result: ActorExecutionResult;
|
|
||||||
domainEvents: DomainEvent[];
|
|
||||||
}) => Promise<
|
|
||||||
| void
|
|
||||||
| {
|
|
||||||
additionalEvents?: DomainEventEmission[];
|
|
||||||
handoffPayloadPatch?: JsonObject;
|
|
||||||
}
|
|
||||||
>;
|
|
||||||
};
|
|
||||||
|
|
||||||
type QueueItem = {
|
type QueueItem = {
|
||||||
nodeId: string;
|
nodeId: string;
|
||||||
depth: number;
|
depth: number;
|
||||||
@@ -489,6 +458,38 @@ function toToolNameCandidates(toolName: string): string[] {
|
|||||||
return dedupeStrings(candidates);
|
return dedupeStrings(candidates);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function buildCaseInsensitiveToolLookup(tools: readonly string[]): Map<string, string> {
|
||||||
|
const lookup = new Map<string, string>();
|
||||||
|
for (const tool of tools) {
|
||||||
|
const normalized = tool.trim().toLowerCase();
|
||||||
|
if (!normalized || lookup.has(normalized)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
lookup.set(normalized, tool);
|
||||||
|
}
|
||||||
|
return lookup;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveAllowedToolMatch(input: {
|
||||||
|
candidates: readonly string[];
|
||||||
|
allowset: ReadonlySet<string>;
|
||||||
|
caseInsensitiveLookup: ReadonlyMap<string, string>;
|
||||||
|
}): string | undefined {
|
||||||
|
const direct = input.candidates.find((candidate) => input.allowset.has(candidate));
|
||||||
|
if (direct) {
|
||||||
|
return direct;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const candidate of input.candidates) {
|
||||||
|
const match = input.caseInsensitiveLookup.get(candidate.toLowerCase());
|
||||||
|
if (match) {
|
||||||
|
return match;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload {
|
function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload {
|
||||||
if (status === "success") {
|
if (status === "success") {
|
||||||
return {
|
return {
|
||||||
@@ -611,11 +612,9 @@ export class PipelineExecutor {
|
|||||||
globalFlags: { ...projectContext.globalFlags },
|
globalFlags: { ...projectContext.globalFlags },
|
||||||
artifactPointers: { ...projectContext.artifactPointers },
|
artifactPointers: { ...projectContext.artifactPointers },
|
||||||
taskQueue: projectContext.taskQueue.map((task) => ({
|
taskQueue: projectContext.taskQueue.map((task) => ({
|
||||||
taskId: task.taskId,
|
id: task.id,
|
||||||
id: task.id ?? task.taskId,
|
title: task.title,
|
||||||
...(task.title ? { title: task.title } : {}),
|
|
||||||
status: task.status,
|
status: task.status,
|
||||||
...(task.worktreePath ? { worktreePath: task.worktreePath } : {}),
|
|
||||||
...(task.assignee ? { assignee: task.assignee } : {}),
|
...(task.assignee ? { assignee: task.assignee } : {}),
|
||||||
...(task.metadata ? { metadata: task.metadata } : {}),
|
...(task.metadata ? { metadata: task.metadata } : {}),
|
||||||
})),
|
})),
|
||||||
@@ -887,13 +886,6 @@ export class PipelineExecutor {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
const context = await this.stateManager.buildFreshNodeContext(sessionId, node.id);
|
const context = await this.stateManager.buildFreshNodeContext(sessionId, node.id);
|
||||||
const taskResolution = this.options.taskLifecycle
|
|
||||||
? await this.options.taskLifecycle.prepareTaskExecution({
|
|
||||||
sessionId,
|
|
||||||
node,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
: undefined;
|
|
||||||
const prompt = this.personaRegistry.renderSystemPrompt({
|
const prompt = this.personaRegistry.renderSystemPrompt({
|
||||||
personaId: node.personaId,
|
personaId: node.personaId,
|
||||||
runtimeContext: {
|
runtimeContext: {
|
||||||
@@ -909,13 +901,10 @@ export class PipelineExecutor {
|
|||||||
node,
|
node,
|
||||||
toolClearance,
|
toolClearance,
|
||||||
prompt,
|
prompt,
|
||||||
worktreePathOverride: taskResolution?.worktreePath,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const result = await this.invokeActorExecutor({
|
const result = await this.invokeActorExecutor({
|
||||||
sessionId,
|
sessionId,
|
||||||
attempt,
|
|
||||||
depth: recursiveDepth,
|
|
||||||
node,
|
node,
|
||||||
prompt,
|
prompt,
|
||||||
context,
|
context,
|
||||||
@@ -932,49 +921,12 @@ export class PipelineExecutor {
|
|||||||
customEvents: result.events,
|
customEvents: result.events,
|
||||||
});
|
});
|
||||||
const topologyKind: NodeTopologyKind = node.topology?.kind ?? "sequential";
|
const topologyKind: NodeTopologyKind = node.topology?.kind ?? "sequential";
|
||||||
let payloadForNext: JsonObject = {
|
const payloadForNext = result.payload ?? context.handoff.payload;
|
||||||
...context.handoff.payload,
|
|
||||||
...(result.payload ?? {}),
|
|
||||||
...(taskResolution
|
|
||||||
? {
|
|
||||||
taskId: taskResolution.taskId,
|
|
||||||
worktreePath: taskResolution.worktreePath,
|
|
||||||
}
|
|
||||||
: {}),
|
|
||||||
};
|
|
||||||
const shouldRetry =
|
const shouldRetry =
|
||||||
result.status === "validation_fail" &&
|
result.status === "validation_fail" &&
|
||||||
this.shouldRetryValidation(node) &&
|
this.shouldRetryValidation(node) &&
|
||||||
attempt <= maxRetriesForNode;
|
attempt <= maxRetriesForNode;
|
||||||
|
|
||||||
if (taskResolution && this.options.taskLifecycle) {
|
|
||||||
const finalization = await this.options.taskLifecycle.finalizeTaskExecution({
|
|
||||||
sessionId,
|
|
||||||
node,
|
|
||||||
task: taskResolution,
|
|
||||||
result,
|
|
||||||
domainEvents,
|
|
||||||
});
|
|
||||||
for (const eventEmission of finalization?.additionalEvents ?? []) {
|
|
||||||
domainEvents.push(
|
|
||||||
createDomainEvent({
|
|
||||||
type: eventEmission.type,
|
|
||||||
source: "pipeline",
|
|
||||||
sessionId,
|
|
||||||
nodeId: node.id,
|
|
||||||
attempt,
|
|
||||||
payload: eventEmission.payload,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (finalization?.handoffPayloadPatch) {
|
|
||||||
payloadForNext = {
|
|
||||||
...payloadForNext,
|
|
||||||
...finalization.handoffPayloadPatch,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.lifecycleObserver.onNodeAttempt({
|
await this.lifecycleObserver.onNodeAttempt({
|
||||||
sessionId,
|
sessionId,
|
||||||
node,
|
node,
|
||||||
@@ -1069,8 +1021,6 @@ export class PipelineExecutor {
|
|||||||
|
|
||||||
private async invokeActorExecutor(input: {
|
private async invokeActorExecutor(input: {
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
attempt: number;
|
|
||||||
depth: number;
|
|
||||||
node: PipelineNode;
|
node: PipelineNode;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
context: NodeExecutionContext;
|
context: NodeExecutionContext;
|
||||||
@@ -1083,20 +1033,12 @@ export class PipelineExecutor {
|
|||||||
|
|
||||||
return await input.executor({
|
return await input.executor({
|
||||||
sessionId: input.sessionId,
|
sessionId: input.sessionId,
|
||||||
attempt: input.attempt,
|
|
||||||
depth: input.depth,
|
|
||||||
node: input.node,
|
node: input.node,
|
||||||
prompt: input.prompt,
|
prompt: input.prompt,
|
||||||
context: input.context,
|
context: input.context,
|
||||||
signal: input.signal,
|
signal: input.signal,
|
||||||
executionContext: input.executionContext,
|
executionContext: input.executionContext,
|
||||||
mcp: this.buildActorMcpContext({
|
mcp: this.buildActorMcpContext(input.executionContext, input.prompt),
|
||||||
sessionId: input.sessionId,
|
|
||||||
nodeId: input.node.id,
|
|
||||||
attempt: input.attempt,
|
|
||||||
executionContext: input.executionContext,
|
|
||||||
prompt: input.prompt,
|
|
||||||
}),
|
|
||||||
security: this.securityContext,
|
security: this.securityContext,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1137,15 +1079,9 @@ export class PipelineExecutor {
|
|||||||
node: PipelineNode;
|
node: PipelineNode;
|
||||||
toolClearance: ToolClearancePolicy;
|
toolClearance: ToolClearancePolicy;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
worktreePathOverride?: string;
|
|
||||||
}): ResolvedExecutionContext {
|
}): ResolvedExecutionContext {
|
||||||
const normalizedToolClearance = parseToolClearancePolicy(input.toolClearance);
|
const normalizedToolClearance = parseToolClearancePolicy(input.toolClearance);
|
||||||
const worktreePath = input.worktreePathOverride ?? this.options.resolvedExecutionSecurityConstraints.worktreePath;
|
const toolUniverse = this.resolveAvailableToolsForAttempt(normalizedToolClearance, input.prompt);
|
||||||
const toolUniverse = this.resolveAvailableToolsForAttempt({
|
|
||||||
toolClearance: normalizedToolClearance,
|
|
||||||
prompt: input.prompt,
|
|
||||||
worktreePath,
|
|
||||||
});
|
|
||||||
const allowedTools = this.resolveAllowedToolsForAttempt({
|
const allowedTools = this.resolveAllowedToolsForAttempt({
|
||||||
toolClearance: normalizedToolClearance,
|
toolClearance: normalizedToolClearance,
|
||||||
toolUniverse,
|
toolUniverse,
|
||||||
@@ -1161,7 +1097,6 @@ export class PipelineExecutor {
|
|||||||
allowedTools,
|
allowedTools,
|
||||||
security: {
|
security: {
|
||||||
...this.options.resolvedExecutionSecurityConstraints,
|
...this.options.resolvedExecutionSecurityConstraints,
|
||||||
worktreePath,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -1184,20 +1119,15 @@ export class PipelineExecutor {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
private resolveAvailableToolsForAttempt(input: {
|
private resolveAvailableToolsForAttempt(toolClearance: ToolClearancePolicy, prompt: string): string[] {
|
||||||
toolClearance: ToolClearancePolicy;
|
|
||||||
prompt: string;
|
|
||||||
worktreePath: string;
|
|
||||||
}): string[] {
|
|
||||||
if (!this.options.resolveMcpConfig) {
|
if (!this.options.resolveMcpConfig) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const resolved = this.options.resolveMcpConfig({
|
const resolved = this.options.resolveMcpConfig({
|
||||||
providerHint: "codex",
|
providerHint: "codex",
|
||||||
prompt: input.prompt,
|
prompt,
|
||||||
workingDirectory: input.worktreePath,
|
toolClearance,
|
||||||
toolClearance: input.toolClearance,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const rawServers = resolved.codexConfig?.mcp_servers;
|
const rawServers = resolved.codexConfig?.mcp_servers;
|
||||||
@@ -1217,14 +1147,10 @@ export class PipelineExecutor {
|
|||||||
return dedupeStrings(tools);
|
return dedupeStrings(tools);
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildActorMcpContext(input: {
|
private buildActorMcpContext(
|
||||||
sessionId: string;
|
executionContext: ResolvedExecutionContext,
|
||||||
nodeId: string;
|
prompt: string,
|
||||||
attempt: number;
|
): ActorExecutionMcpContext {
|
||||||
executionContext: ResolvedExecutionContext;
|
|
||||||
prompt: string;
|
|
||||||
}): ActorExecutionMcpContext {
|
|
||||||
const { executionContext, prompt } = input;
|
|
||||||
const toolPolicy = toAllowedToolPolicy(executionContext.allowedTools);
|
const toolPolicy = toAllowedToolPolicy(executionContext.allowedTools);
|
||||||
const filterToolsForProvider = (tools: string[]): string[] => {
|
const filterToolsForProvider = (tools: string[]): string[] => {
|
||||||
const deduped = dedupeStrings(tools);
|
const deduped = dedupeStrings(tools);
|
||||||
@@ -1235,7 +1161,6 @@ export class PipelineExecutor {
|
|||||||
? this.options.resolveMcpConfig({
|
? this.options.resolveMcpConfig({
|
||||||
providerHint: "both",
|
providerHint: "both",
|
||||||
prompt,
|
prompt,
|
||||||
workingDirectory: executionContext.security.worktreePath,
|
|
||||||
toolClearance: toolPolicy,
|
toolClearance: toolPolicy,
|
||||||
})
|
})
|
||||||
: {};
|
: {};
|
||||||
@@ -1244,12 +1169,7 @@ export class PipelineExecutor {
|
|||||||
executionContext.allowedTools,
|
executionContext.allowedTools,
|
||||||
);
|
);
|
||||||
const resolveConfig = (context: McpLoadContext = {}): LoadedMcpConfig => {
|
const resolveConfig = (context: McpLoadContext = {}): LoadedMcpConfig => {
|
||||||
const withWorkingDirectory: McpLoadContext = {
|
if (context.providerHint === "codex") {
|
||||||
...context,
|
|
||||||
...(context.workingDirectory ? {} : { workingDirectory: executionContext.security.worktreePath }),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (withWorkingDirectory.providerHint === "codex") {
|
|
||||||
return {
|
return {
|
||||||
...(resolvedConfig.codexConfig ? { codexConfig: cloneMcpConfig(resolvedConfig).codexConfig } : {}),
|
...(resolvedConfig.codexConfig ? { codexConfig: cloneMcpConfig(resolvedConfig).codexConfig } : {}),
|
||||||
...(resolvedConfig.sourcePath ? { sourcePath: resolvedConfig.sourcePath } : {}),
|
...(resolvedConfig.sourcePath ? { sourcePath: resolvedConfig.sourcePath } : {}),
|
||||||
@@ -1259,7 +1179,7 @@ export class PipelineExecutor {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (withWorkingDirectory.providerHint === "claude") {
|
if (context.providerHint === "claude") {
|
||||||
return {
|
return {
|
||||||
...(resolvedConfig.claudeMcpServers
|
...(resolvedConfig.claudeMcpServers
|
||||||
? { claudeMcpServers: cloneMcpConfig(resolvedConfig).claudeMcpServers }
|
? { claudeMcpServers: cloneMcpConfig(resolvedConfig).claudeMcpServers }
|
||||||
@@ -1275,12 +1195,7 @@ export class PipelineExecutor {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const createToolPermissionHandler = (): ActorToolPermissionHandler =>
|
const createToolPermissionHandler = (): ActorToolPermissionHandler =>
|
||||||
this.createToolPermissionHandler({
|
this.createToolPermissionHandler(executionContext.allowedTools);
|
||||||
allowedTools: executionContext.allowedTools,
|
|
||||||
sessionId: input.sessionId,
|
|
||||||
nodeId: input.nodeId,
|
|
||||||
attempt: input.attempt,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
allowedTools: [...executionContext.allowedTools],
|
allowedTools: [...executionContext.allowedTools],
|
||||||
@@ -1292,20 +1207,11 @@ export class PipelineExecutor {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private createToolPermissionHandler(input: {
|
private createToolPermissionHandler(allowedTools: readonly string[]): ActorToolPermissionHandler {
|
||||||
allowedTools: readonly string[];
|
const allowset = new Set(allowedTools);
|
||||||
sessionId: string;
|
const caseInsensitiveAllowLookup = buildCaseInsensitiveToolLookup(allowedTools);
|
||||||
nodeId: string;
|
|
||||||
attempt: number;
|
|
||||||
}): ActorToolPermissionHandler {
|
|
||||||
const allowset = new Set(input.allowedTools);
|
|
||||||
const rulesEngine = this.securityContext?.rulesEngine;
|
const rulesEngine = this.securityContext?.rulesEngine;
|
||||||
const toolPolicy = toAllowedToolPolicy(input.allowedTools);
|
const toolPolicy = toAllowedToolPolicy(allowedTools);
|
||||||
const toolAuditContext = {
|
|
||||||
sessionId: input.sessionId,
|
|
||||||
nodeId: input.nodeId,
|
|
||||||
attempt: input.attempt,
|
|
||||||
};
|
|
||||||
|
|
||||||
return async (toolName, _input, options) => {
|
return async (toolName, _input, options) => {
|
||||||
const toolUseID = options.toolUseID;
|
const toolUseID = options.toolUseID;
|
||||||
@@ -1319,12 +1225,15 @@ export class PipelineExecutor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const candidates = toToolNameCandidates(toolName);
|
const candidates = toToolNameCandidates(toolName);
|
||||||
const allowMatch = candidates.find((candidate) => allowset.has(candidate));
|
const allowMatch = resolveAllowedToolMatch({
|
||||||
|
candidates,
|
||||||
|
allowset,
|
||||||
|
caseInsensitiveLookup: caseInsensitiveAllowLookup,
|
||||||
|
});
|
||||||
if (!allowMatch) {
|
if (!allowMatch) {
|
||||||
rulesEngine?.assertToolInvocationAllowed({
|
rulesEngine?.assertToolInvocationAllowed({
|
||||||
tool: candidates[0] ?? toolName,
|
tool: candidates[0] ?? toolName,
|
||||||
toolClearance: toolPolicy,
|
toolClearance: toolPolicy,
|
||||||
context: toolAuditContext,
|
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
behavior: "deny",
|
behavior: "deny",
|
||||||
@@ -1337,7 +1246,6 @@ export class PipelineExecutor {
|
|||||||
rulesEngine?.assertToolInvocationAllowed({
|
rulesEngine?.assertToolInvocationAllowed({
|
||||||
tool: allowMatch,
|
tool: allowMatch,
|
||||||
toolClearance: toolPolicy,
|
toolClearance: toolPolicy,
|
||||||
context: toolAuditContext,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -5,23 +5,12 @@ import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./type
|
|||||||
|
|
||||||
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
|
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
|
||||||
|
|
||||||
export type ProjectTaskStatus =
|
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
|
||||||
| "pending"
|
|
||||||
| "in_progress"
|
|
||||||
| "review"
|
|
||||||
| "conflict"
|
|
||||||
| "resolving_conflict"
|
|
||||||
| "merged"
|
|
||||||
| "failed"
|
|
||||||
| "blocked"
|
|
||||||
| "done";
|
|
||||||
|
|
||||||
export type ProjectTask = {
|
export type ProjectTask = {
|
||||||
taskId: string;
|
id: string;
|
||||||
id?: string;
|
title: string;
|
||||||
title?: string;
|
|
||||||
status: ProjectTaskStatus;
|
status: ProjectTaskStatus;
|
||||||
worktreePath?: string;
|
|
||||||
assignee?: string;
|
assignee?: string;
|
||||||
metadata?: JsonObject;
|
metadata?: JsonObject;
|
||||||
};
|
};
|
||||||
@@ -63,17 +52,7 @@ function toJsonObject(value: unknown, label: string): JsonObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function toTaskStatus(value: unknown, label: string): ProjectTaskStatus {
|
function toTaskStatus(value: unknown, label: string): ProjectTaskStatus {
|
||||||
if (
|
if (value === "pending" || value === "in_progress" || value === "blocked" || value === "done") {
|
||||||
value === "pending" ||
|
|
||||||
value === "in_progress" ||
|
|
||||||
value === "review" ||
|
|
||||||
value === "conflict" ||
|
|
||||||
value === "resolving_conflict" ||
|
|
||||||
value === "merged" ||
|
|
||||||
value === "failed" ||
|
|
||||||
value === "blocked" ||
|
|
||||||
value === "done"
|
|
||||||
) {
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
throw new Error(`${label} has unsupported status "${String(value)}".`);
|
throw new Error(`${label} has unsupported status "${String(value)}".`);
|
||||||
@@ -89,28 +68,10 @@ function toProjectTask(value: unknown, label: string): ProjectTask {
|
|||||||
throw new Error(`${label}.assignee must be a non-empty string when provided.`);
|
throw new Error(`${label}.assignee must be a non-empty string when provided.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const taskIdCandidate = value.taskId ?? value.id;
|
|
||||||
const taskId = assertNonEmptyString(taskIdCandidate, `${label}.taskId`);
|
|
||||||
|
|
||||||
const titleRaw = value.title;
|
|
||||||
if (titleRaw !== undefined && (typeof titleRaw !== "string" || titleRaw.trim().length === 0)) {
|
|
||||||
throw new Error(`${label}.title must be a non-empty string when provided.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const worktreePathRaw = value.worktreePath;
|
|
||||||
if (
|
|
||||||
worktreePathRaw !== undefined &&
|
|
||||||
(typeof worktreePathRaw !== "string" || worktreePathRaw.trim().length === 0)
|
|
||||||
) {
|
|
||||||
throw new Error(`${label}.worktreePath must be a non-empty string when provided.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
taskId,
|
id: assertNonEmptyString(value.id, `${label}.id`),
|
||||||
id: taskId,
|
title: assertNonEmptyString(value.title, `${label}.title`),
|
||||||
...(typeof titleRaw === "string" ? { title: titleRaw.trim() } : {}),
|
|
||||||
status: toTaskStatus(value.status, `${label}.status`),
|
status: toTaskStatus(value.status, `${label}.status`),
|
||||||
...(typeof worktreePathRaw === "string" ? { worktreePath: worktreePathRaw.trim() } : {}),
|
|
||||||
...(typeof assignee === "string" ? { assignee: assignee.trim() } : {}),
|
...(typeof assignee === "string" ? { assignee: assignee.trim() } : {}),
|
||||||
...(value.metadata !== undefined
|
...(value.metadata !== undefined
|
||||||
? { metadata: toJsonObject(value.metadata, `${label}.metadata`) }
|
? { metadata: toJsonObject(value.metadata, `${label}.metadata`) }
|
||||||
@@ -196,10 +157,10 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
|
|||||||
|
|
||||||
const byId = new Map<string, ProjectTask>();
|
const byId = new Map<string, ProjectTask>();
|
||||||
for (const task of current) {
|
for (const task of current) {
|
||||||
byId.set(task.taskId, task);
|
byId.set(task.id, task);
|
||||||
}
|
}
|
||||||
for (const task of upserts) {
|
for (const task of upserts) {
|
||||||
byId.set(task.taskId, task);
|
byId.set(task.id, task);
|
||||||
}
|
}
|
||||||
|
|
||||||
return [...byId.values()];
|
return [...byId.values()];
|
||||||
|
|||||||
@@ -197,9 +197,9 @@ export class ResourceProvisioningOrchestrator {
|
|||||||
async provisionSession(input: {
|
async provisionSession(input: {
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
resources: ResourceRequest[];
|
resources: ResourceRequest[];
|
||||||
workspaceRoot: string;
|
workspaceRoot?: string;
|
||||||
}): Promise<ProvisionedResources> {
|
}): Promise<ProvisionedResources> {
|
||||||
const workspaceRoot = resolve(input.workspaceRoot);
|
const workspaceRoot = resolve(input.workspaceRoot ?? process.cwd());
|
||||||
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
|
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
|
||||||
const releases: ProvisionedResourcesState["releases"] = [];
|
const releases: ProvisionedResourcesState["releases"] = [];
|
||||||
const env: Record<string, string> = {};
|
const env: Record<string, string> = {};
|
||||||
|
|||||||
@@ -1,783 +0,0 @@
|
|||||||
import { execFile } from "node:child_process";
|
|
||||||
import { randomUUID } from "node:crypto";
|
|
||||||
import { mkdir, readFile, readdir, stat } from "node:fs/promises";
|
|
||||||
import { dirname, isAbsolute, resolve } from "node:path";
|
|
||||||
import { promisify } from "node:util";
|
|
||||||
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
|
|
||||||
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
const SESSION_METADATA_FILE_NAME = "session-metadata.json";
|
|
||||||
|
|
||||||
export type SessionStatus = "active" | "suspended" | "closed" | "closed_with_conflicts";
|
|
||||||
|
|
||||||
export type SessionMetadata = {
|
|
||||||
sessionId: string;
|
|
||||||
projectPath: string;
|
|
||||||
sessionStatus: SessionStatus;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
createdAt: string;
|
|
||||||
updatedAt: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type CreateSessionRequest = {
|
|
||||||
projectPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type MergeTaskIntoBaseOutcome =
|
|
||||||
| {
|
|
||||||
kind: "success";
|
|
||||||
taskId: string;
|
|
||||||
worktreePath: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
kind: "conflict";
|
|
||||||
taskId: string;
|
|
||||||
worktreePath: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
conflictFiles: string[];
|
|
||||||
mergeBase?: string;
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
kind: "fatal_error";
|
|
||||||
taskId: string;
|
|
||||||
worktreePath: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
error: string;
|
|
||||||
mergeBase?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type CloseSessionOutcome =
|
|
||||||
| {
|
|
||||||
kind: "success";
|
|
||||||
sessionId: string;
|
|
||||||
mergedToProject: boolean;
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
kind: "conflict";
|
|
||||||
sessionId: string;
|
|
||||||
worktreePath: string;
|
|
||||||
conflictFiles: string[];
|
|
||||||
mergeBase?: string;
|
|
||||||
baseBranch?: string;
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
kind: "fatal_error";
|
|
||||||
sessionId: string;
|
|
||||||
error: string;
|
|
||||||
baseBranch?: string;
|
|
||||||
mergeBase?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GitExecutionResult = {
|
|
||||||
exitCode: number;
|
|
||||||
stdout: string;
|
|
||||||
stderr: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GitWorktreeRecord = {
|
|
||||||
path: string;
|
|
||||||
branchRef?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
function toErrorMessage(error: unknown): string {
|
|
||||||
if (error instanceof Error) {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
return String(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertAbsolutePath(path: string, label: string): string {
|
|
||||||
if (!isAbsolute(path)) {
|
|
||||||
throw new Error(`${label} must be an absolute path.`);
|
|
||||||
}
|
|
||||||
return resolve(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertNonEmptyString(value: unknown, label: string): string {
|
|
||||||
if (typeof value !== "string" || value.trim().length === 0) {
|
|
||||||
throw new Error(`${label} must be a non-empty string.`);
|
|
||||||
}
|
|
||||||
return value.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function toSessionStatus(value: unknown): SessionStatus {
|
|
||||||
if (
|
|
||||||
value === "active" ||
|
|
||||||
value === "suspended" ||
|
|
||||||
value === "closed" ||
|
|
||||||
value === "closed_with_conflicts"
|
|
||||||
) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
throw new Error(`Session status "${String(value)}" is not supported.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toSessionMetadata(value: unknown): SessionMetadata {
|
|
||||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
||||||
throw new Error("Session metadata file is malformed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw = value as Record<string, unknown>;
|
|
||||||
|
|
||||||
return {
|
|
||||||
sessionId: assertNonEmptyString(raw.sessionId, "sessionId"),
|
|
||||||
projectPath: assertAbsolutePath(assertNonEmptyString(raw.projectPath, "projectPath"), "projectPath"),
|
|
||||||
baseWorkspacePath: assertAbsolutePath(
|
|
||||||
assertNonEmptyString(raw.baseWorkspacePath, "baseWorkspacePath"),
|
|
||||||
"baseWorkspacePath",
|
|
||||||
),
|
|
||||||
sessionStatus: toSessionStatus(raw.sessionStatus),
|
|
||||||
createdAt: assertNonEmptyString(raw.createdAt, "createdAt"),
|
|
||||||
updatedAt: assertNonEmptyString(raw.updatedAt, "updatedAt"),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runGit(args: string[]): Promise<string> {
|
|
||||||
const result = await runGitWithResult(args);
|
|
||||||
if (result.exitCode !== 0) {
|
|
||||||
throw new Error(`git ${args.join(" ")} failed: ${result.stderr || result.stdout || "unknown git error"}`);
|
|
||||||
}
|
|
||||||
return result.stdout.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runGitWithResult(args: string[]): Promise<GitExecutionResult> {
|
|
||||||
try {
|
|
||||||
const { stdout, stderr } = await execFileAsync("git", args, {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
exitCode: 0,
|
|
||||||
stdout: stdout.trim(),
|
|
||||||
stderr: stderr.trim(),
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const failure = error as {
|
|
||||||
code?: number | string;
|
|
||||||
stdout?: string;
|
|
||||||
stderr?: string;
|
|
||||||
};
|
|
||||||
if (typeof failure.code === "number") {
|
|
||||||
return {
|
|
||||||
exitCode: failure.code,
|
|
||||||
stdout: String(failure.stdout ?? "").trim(),
|
|
||||||
stderr: String(failure.stderr ?? "").trim(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
throw new Error(`git ${args.join(" ")} failed: ${toErrorMessage(error)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function pathExists(path: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await stat(path);
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeSegment(value: string, fallback: string): string {
|
|
||||||
const normalized = value
|
|
||||||
.trim()
|
|
||||||
.replace(/[^a-zA-Z0-9_-]/g, "-")
|
|
||||||
.replace(/-+/g, "-")
|
|
||||||
.replace(/^-+/, "")
|
|
||||||
.replace(/-+$/, "");
|
|
||||||
return normalized || fallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toGitFailureMessage(result: GitExecutionResult): string {
|
|
||||||
const details = result.stderr || result.stdout || "unknown git error";
|
|
||||||
return `git command failed with exit code ${String(result.exitCode)}: ${details}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toStringLines(value: string): string[] {
|
|
||||||
return value
|
|
||||||
.split("\n")
|
|
||||||
.map((line) => line.trim())
|
|
||||||
.filter((line) => line.length > 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseGitWorktreeRecords(value: string): GitWorktreeRecord[] {
|
|
||||||
const lines = value.split("\n");
|
|
||||||
const records: GitWorktreeRecord[] = [];
|
|
||||||
let current: GitWorktreeRecord | undefined;
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
if (!line.trim()) {
|
|
||||||
if (current) {
|
|
||||||
records.push(current);
|
|
||||||
current = undefined;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (line.startsWith("worktree ")) {
|
|
||||||
if (current) {
|
|
||||||
records.push(current);
|
|
||||||
}
|
|
||||||
current = {
|
|
||||||
path: line.slice("worktree ".length).trim(),
|
|
||||||
};
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (line.startsWith("branch ") && current) {
|
|
||||||
current.branchRef = line.slice("branch ".length).trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (current) {
|
|
||||||
records.push(current);
|
|
||||||
}
|
|
||||||
|
|
||||||
return records;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class FileSystemSessionMetadataStore {
|
|
||||||
private readonly stateRoot: string;
|
|
||||||
|
|
||||||
constructor(input: { stateRoot: string }) {
|
|
||||||
this.stateRoot = resolve(input.stateRoot);
|
|
||||||
}
|
|
||||||
|
|
||||||
getStateRoot(): string {
|
|
||||||
return this.stateRoot;
|
|
||||||
}
|
|
||||||
|
|
||||||
getSessionDirectory(sessionId: string): string {
|
|
||||||
return resolve(this.stateRoot, sessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
getSessionMetadataPath(sessionId: string): string {
|
|
||||||
return resolve(this.getSessionDirectory(sessionId), SESSION_METADATA_FILE_NAME);
|
|
||||||
}
|
|
||||||
|
|
||||||
getSessionProjectContextPath(sessionId: string): string {
|
|
||||||
return resolve(this.getSessionDirectory(sessionId), "project-context.json");
|
|
||||||
}
|
|
||||||
|
|
||||||
async createSession(input: {
|
|
||||||
projectPath: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
sessionId?: string;
|
|
||||||
}): Promise<SessionMetadata> {
|
|
||||||
const sessionId = input.sessionId?.trim() || randomUUID();
|
|
||||||
const now = new Date().toISOString();
|
|
||||||
const metadata: SessionMetadata = {
|
|
||||||
sessionId,
|
|
||||||
projectPath: assertAbsolutePath(input.projectPath, "projectPath"),
|
|
||||||
baseWorkspacePath: assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath"),
|
|
||||||
sessionStatus: "active",
|
|
||||||
createdAt: now,
|
|
||||||
updatedAt: now,
|
|
||||||
};
|
|
||||||
|
|
||||||
const sessionDirectory = this.getSessionDirectory(sessionId);
|
|
||||||
await mkdir(sessionDirectory, { recursive: true });
|
|
||||||
await this.writeSessionMetadata(metadata);
|
|
||||||
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
|
|
||||||
const metadataPath = this.getSessionMetadataPath(sessionId);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const content = await readFile(metadataPath, "utf8");
|
|
||||||
return toSessionMetadata(JSON.parse(content) as unknown);
|
|
||||||
} catch (error) {
|
|
||||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async listSessions(): Promise<SessionMetadata[]> {
|
|
||||||
try {
|
|
||||||
const entries = await readdir(this.stateRoot, { withFileTypes: true });
|
|
||||||
const sessions: SessionMetadata[] = [];
|
|
||||||
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (!entry.isDirectory()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const metadata = await this.readSession(entry.name);
|
|
||||||
if (metadata) {
|
|
||||||
sessions.push(metadata);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sessions.sort((left, right) => right.createdAt.localeCompare(left.createdAt));
|
|
||||||
return sessions;
|
|
||||||
} catch (error) {
|
|
||||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateSession(
|
|
||||||
sessionId: string,
|
|
||||||
patch: Partial<Pick<SessionMetadata, "projectPath" | "baseWorkspacePath" | "sessionStatus">>,
|
|
||||||
): Promise<SessionMetadata> {
|
|
||||||
const current = await this.readSession(sessionId);
|
|
||||||
if (!current) {
|
|
||||||
throw new Error(`Session "${sessionId}" does not exist.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const next: SessionMetadata = {
|
|
||||||
...current,
|
|
||||||
...(patch.projectPath ? { projectPath: assertAbsolutePath(patch.projectPath, "projectPath") } : {}),
|
|
||||||
...(patch.baseWorkspacePath
|
|
||||||
? { baseWorkspacePath: assertAbsolutePath(patch.baseWorkspacePath, "baseWorkspacePath") }
|
|
||||||
: {}),
|
|
||||||
...(patch.sessionStatus ? { sessionStatus: patch.sessionStatus } : {}),
|
|
||||||
updatedAt: new Date().toISOString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.writeSessionMetadata(next);
|
|
||||||
return next;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async writeSessionMetadata(metadata: SessionMetadata): Promise<void> {
|
|
||||||
const metadataPath = this.getSessionMetadataPath(metadata.sessionId);
|
|
||||||
await mkdir(dirname(metadataPath), { recursive: true });
|
|
||||||
await withFileLock(`${metadataPath}.lock`, async () => {
|
|
||||||
await writeUtf8FileAtomic(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SessionWorktreeManager {
|
|
||||||
private readonly worktreeRoot: string;
|
|
||||||
private readonly baseRef: string;
|
|
||||||
|
|
||||||
constructor(input: {
|
|
||||||
worktreeRoot: string;
|
|
||||||
baseRef: string;
|
|
||||||
}) {
|
|
||||||
this.worktreeRoot = assertAbsolutePath(input.worktreeRoot, "worktreeRoot");
|
|
||||||
this.baseRef = assertNonEmptyString(input.baseRef, "baseRef");
|
|
||||||
}
|
|
||||||
|
|
||||||
resolveBaseWorkspacePath(sessionId: string): string {
|
|
||||||
const scoped = sanitizeSegment(sessionId, "session");
|
|
||||||
return resolve(this.worktreeRoot, scoped, "base");
|
|
||||||
}
|
|
||||||
|
|
||||||
resolveTaskWorktreePath(sessionId: string, taskId: string): string {
|
|
||||||
const scopedSession = sanitizeSegment(sessionId, "session");
|
|
||||||
const scopedTask = sanitizeSegment(taskId, "task");
|
|
||||||
return resolve(this.worktreeRoot, scopedSession, "tasks", scopedTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
private resolveBaseBranchName(sessionId: string): string {
|
|
||||||
const scoped = sanitizeSegment(sessionId, "session");
|
|
||||||
return `ai-ops/${scoped}/base`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private resolveTaskBranchName(sessionId: string, taskId: string): string {
|
|
||||||
const scopedSession = sanitizeSegment(sessionId, "session");
|
|
||||||
const scopedTask = sanitizeSegment(taskId, "task");
|
|
||||||
return `ai-ops/${scopedSession}/task/${scopedTask}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async initializeSessionBaseWorkspace(input: {
|
|
||||||
sessionId: string;
|
|
||||||
projectPath: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
}): Promise<void> {
|
|
||||||
const projectPath = assertAbsolutePath(input.projectPath, "projectPath");
|
|
||||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
|
||||||
|
|
||||||
await mkdir(dirname(baseWorkspacePath), { recursive: true });
|
|
||||||
|
|
||||||
const alreadyExists = await pathExists(baseWorkspacePath);
|
|
||||||
if (alreadyExists) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
|
|
||||||
const branchName = this.resolveBaseBranchName(input.sessionId);
|
|
||||||
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
|
|
||||||
}
|
|
||||||
|
|
||||||
async ensureTaskWorktree(input: {
|
|
||||||
sessionId: string;
|
|
||||||
taskId: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
existingWorktreePath?: string;
|
|
||||||
}): Promise<{
|
|
||||||
taskWorktreePath: string;
|
|
||||||
}> {
|
|
||||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
|
||||||
const maybeExisting = input.existingWorktreePath?.trim();
|
|
||||||
const worktreePath = maybeExisting
|
|
||||||
? assertAbsolutePath(maybeExisting, "existingWorktreePath")
|
|
||||||
: this.resolveTaskWorktreePath(input.sessionId, input.taskId);
|
|
||||||
const branchName = this.resolveTaskBranchName(input.sessionId, input.taskId);
|
|
||||||
const attachedWorktree = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
|
|
||||||
|
|
||||||
if (attachedWorktree && attachedWorktree !== worktreePath) {
|
|
||||||
throw new Error(
|
|
||||||
`Task branch "${branchName}" is already attached to worktree "${attachedWorktree}", ` +
|
|
||||||
`expected "${worktreePath}".`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await pathExists(worktreePath))) {
|
|
||||||
await runGit(["-C", baseWorkspacePath, "worktree", "prune", "--expire", "now"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await pathExists(worktreePath))) {
|
|
||||||
await mkdir(dirname(worktreePath), { recursive: true });
|
|
||||||
const addResult = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
baseWorkspacePath,
|
|
||||||
"worktree",
|
|
||||||
"add",
|
|
||||||
"-B",
|
|
||||||
branchName,
|
|
||||||
worktreePath,
|
|
||||||
"HEAD",
|
|
||||||
]);
|
|
||||||
if (addResult.exitCode !== 0) {
|
|
||||||
const attachedAfterFailure = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
|
|
||||||
if (attachedAfterFailure === worktreePath && (await pathExists(worktreePath))) {
|
|
||||||
return {
|
|
||||||
taskWorktreePath: worktreePath,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
throw new Error(
|
|
||||||
`git -C ${baseWorkspacePath} worktree add -B ${branchName} ${worktreePath} HEAD failed: ` +
|
|
||||||
`${toGitFailureMessage(addResult)}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
taskWorktreePath: worktreePath,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async mergeTaskIntoBase(input: {
|
|
||||||
taskId: string;
|
|
||||||
baseWorkspacePath: string;
|
|
||||||
taskWorktreePath: string;
|
|
||||||
}): Promise<MergeTaskIntoBaseOutcome> {
|
|
||||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
|
||||||
const taskWorktreePath = assertAbsolutePath(input.taskWorktreePath, "taskWorktreePath");
|
|
||||||
const taskId = input.taskId;
|
|
||||||
|
|
||||||
if (!(await pathExists(baseWorkspacePath))) {
|
|
||||||
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
|
|
||||||
}
|
|
||||||
if (!(await pathExists(taskWorktreePath))) {
|
|
||||||
throw new Error(`Task worktree "${taskWorktreePath}" does not exist.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mergeBase: string | undefined;
|
|
||||||
try {
|
|
||||||
await runGit(["-C", taskWorktreePath, "add", "-A"]);
|
|
||||||
const hasPending = await this.hasStagedChanges(taskWorktreePath);
|
|
||||||
if (hasPending) {
|
|
||||||
await runGit(["-C", taskWorktreePath, "commit", "-m", `ai_ops: finalize task ${taskId}`]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const branchName = await runGit(["-C", taskWorktreePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
|
||||||
const baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
|
||||||
mergeBase = await this.tryReadMergeBase(baseWorkspacePath, baseBranch, branchName);
|
|
||||||
|
|
||||||
if (await this.hasOngoingMerge(taskWorktreePath)) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
conflictFiles: await this.readConflictFiles(taskWorktreePath),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const syncTaskBranch = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
taskWorktreePath,
|
|
||||||
"merge",
|
|
||||||
"--no-ff",
|
|
||||||
"--no-edit",
|
|
||||||
baseBranch,
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (syncTaskBranch.exitCode === 1) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
conflictFiles: await this.readConflictFiles(taskWorktreePath),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (syncTaskBranch.exitCode !== 0) {
|
|
||||||
return {
|
|
||||||
kind: "fatal_error",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
error: toGitFailureMessage(syncTaskBranch),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await this.hasOngoingMerge(baseWorkspacePath)) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
taskId,
|
|
||||||
worktreePath: baseWorkspacePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergeIntoBase = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
baseWorkspacePath,
|
|
||||||
"merge",
|
|
||||||
"--no-ff",
|
|
||||||
"--no-edit",
|
|
||||||
branchName,
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (mergeIntoBase.exitCode === 1) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
taskId,
|
|
||||||
worktreePath: baseWorkspacePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (mergeIntoBase.exitCode !== 0) {
|
|
||||||
return {
|
|
||||||
kind: "fatal_error",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
error: toGitFailureMessage(mergeIntoBase),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.removeWorktree({
|
|
||||||
repoPath: baseWorkspacePath,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
kind: "success",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
kind: "fatal_error",
|
|
||||||
taskId,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
error: toErrorMessage(error),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async closeSession(input: {
|
|
||||||
session: SessionMetadata;
|
|
||||||
taskWorktreePaths: string[];
|
|
||||||
mergeBaseIntoProject?: boolean;
|
|
||||||
}): Promise<CloseSessionOutcome> {
|
|
||||||
const projectPath = assertAbsolutePath(input.session.projectPath, "projectPath");
|
|
||||||
const baseWorkspacePath = assertAbsolutePath(input.session.baseWorkspacePath, "baseWorkspacePath");
|
|
||||||
if (!(await pathExists(projectPath))) {
|
|
||||||
throw new Error(`Project path "${projectPath}" does not exist.`);
|
|
||||||
}
|
|
||||||
if (!(await pathExists(baseWorkspacePath))) {
|
|
||||||
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
let baseBranch: string | undefined;
|
|
||||||
let mergeBase: string | undefined;
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (const taskWorktreePath of input.taskWorktreePaths) {
|
|
||||||
if (!taskWorktreePath.trim()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.removeWorktree({
|
|
||||||
repoPath: baseWorkspacePath,
|
|
||||||
worktreePath: taskWorktreePath,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (input.mergeBaseIntoProject) {
|
|
||||||
baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
|
||||||
mergeBase = await this.tryReadMergeBase(projectPath, "HEAD", baseBranch);
|
|
||||||
|
|
||||||
if (await this.hasOngoingMerge(projectPath)) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
worktreePath: projectPath,
|
|
||||||
conflictFiles: await this.readConflictFiles(projectPath),
|
|
||||||
...(baseBranch ? { baseBranch } : {}),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergeResult = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
projectPath,
|
|
||||||
"merge",
|
|
||||||
"--no-ff",
|
|
||||||
"--no-edit",
|
|
||||||
baseBranch,
|
|
||||||
]);
|
|
||||||
if (mergeResult.exitCode === 1) {
|
|
||||||
return {
|
|
||||||
kind: "conflict",
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
worktreePath: projectPath,
|
|
||||||
conflictFiles: await this.readConflictFiles(projectPath),
|
|
||||||
...(baseBranch ? { baseBranch } : {}),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (mergeResult.exitCode !== 0) {
|
|
||||||
return {
|
|
||||||
kind: "fatal_error",
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
error: toGitFailureMessage(mergeResult),
|
|
||||||
...(baseBranch ? { baseBranch } : {}),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.removeWorktree({
|
|
||||||
repoPath: projectPath,
|
|
||||||
worktreePath: baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
kind: "success",
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
mergedToProject: input.mergeBaseIntoProject === true,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
kind: "fatal_error",
|
|
||||||
sessionId: input.session.sessionId,
|
|
||||||
error: toErrorMessage(error),
|
|
||||||
...(baseBranch ? { baseBranch } : {}),
|
|
||||||
...(mergeBase ? { mergeBase } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async removeWorktree(input: {
|
|
||||||
repoPath: string;
|
|
||||||
worktreePath: string;
|
|
||||||
}): Promise<void> {
|
|
||||||
if (!(await pathExists(input.worktreePath))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await runGit(["-C", input.repoPath, "worktree", "remove", "--force", input.worktreePath]);
|
|
||||||
await runGit(["-C", input.repoPath, "worktree", "prune"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async hasStagedChanges(worktreePath: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await execFileAsync("git", ["-C", worktreePath, "diff", "--cached", "--quiet"], {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
} catch (error) {
|
|
||||||
const exitCode = (error as { code?: number }).code;
|
|
||||||
if (exitCode === 1) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
throw new Error(`Unable to inspect staged changes: ${toErrorMessage(error)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async hasOngoingMerge(worktreePath: string): Promise<boolean> {
|
|
||||||
const result = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
worktreePath,
|
|
||||||
"rev-parse",
|
|
||||||
"-q",
|
|
||||||
"--verify",
|
|
||||||
"MERGE_HEAD",
|
|
||||||
]);
|
|
||||||
return result.exitCode === 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async readConflictFiles(worktreePath: string): Promise<string[]> {
|
|
||||||
const result = await runGitWithResult([
|
|
||||||
"-C",
|
|
||||||
worktreePath,
|
|
||||||
"diff",
|
|
||||||
"--name-only",
|
|
||||||
"--diff-filter=U",
|
|
||||||
]);
|
|
||||||
if (result.exitCode !== 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return toStringLines(result.stdout);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async tryReadMergeBase(
|
|
||||||
repoPath: string,
|
|
||||||
leftRef: string,
|
|
||||||
rightRef: string,
|
|
||||||
): Promise<string | undefined> {
|
|
||||||
const result = await runGitWithResult(["-C", repoPath, "merge-base", leftRef, rightRef]);
|
|
||||||
if (result.exitCode !== 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const mergeBase = result.stdout.trim();
|
|
||||||
return mergeBase || undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async findWorktreePathForBranch(
|
|
||||||
repoPath: string,
|
|
||||||
branchName: string,
|
|
||||||
): Promise<string | undefined> {
|
|
||||||
const branchRef = `refs/heads/${branchName}`;
|
|
||||||
const records = await this.listWorktreeRecords(repoPath);
|
|
||||||
const matched = records.find((record) => record.branchRef === branchRef);
|
|
||||||
if (!matched) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return resolve(matched.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async listWorktreeRecords(repoPath: string): Promise<GitWorktreeRecord[]> {
|
|
||||||
const result = await runGitWithResult(["-C", repoPath, "worktree", "list", "--porcelain"]);
|
|
||||||
if (result.exitCode !== 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return parseGitWorktreeRecords(result.stdout);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -16,21 +16,9 @@ export type ProviderRuntimeConfig = {
|
|||||||
anthropicApiKey?: string;
|
anthropicApiKey?: string;
|
||||||
claudeModel?: string;
|
claudeModel?: string;
|
||||||
claudeCodePath?: string;
|
claudeCodePath?: string;
|
||||||
claudeObservability: ClaudeObservabilityRuntimeConfig;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type OpenAiAuthMode = "auto" | "chatgpt" | "api_key";
|
export type OpenAiAuthMode = "auto" | "chatgpt" | "api_key";
|
||||||
export type ClaudeObservabilityMode = "off" | "stdout" | "file" | "both";
|
|
||||||
export type ClaudeObservabilityVerbosity = "summary" | "full";
|
|
||||||
|
|
||||||
export type ClaudeObservabilityRuntimeConfig = {
|
|
||||||
mode: ClaudeObservabilityMode;
|
|
||||||
verbosity: ClaudeObservabilityVerbosity;
|
|
||||||
logPath: string;
|
|
||||||
includePartialMessages: boolean;
|
|
||||||
debug: boolean;
|
|
||||||
debugLogPath?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type McpRuntimeConfig = {
|
export type McpRuntimeConfig = {
|
||||||
configPath: string;
|
configPath: string;
|
||||||
@@ -42,7 +30,6 @@ export type OrchestrationRuntimeConfig = {
|
|||||||
maxDepth: number;
|
maxDepth: number;
|
||||||
maxRetries: number;
|
maxRetries: number;
|
||||||
maxChildren: number;
|
maxChildren: number;
|
||||||
mergeConflictMaxAttempts: number;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DiscoveryRuntimeConfig = {
|
export type DiscoveryRuntimeConfig = {
|
||||||
@@ -90,7 +77,6 @@ const DEFAULT_ORCHESTRATION: OrchestrationRuntimeConfig = {
|
|||||||
maxDepth: 4,
|
maxDepth: 4,
|
||||||
maxRetries: 2,
|
maxRetries: 2,
|
||||||
maxChildren: 4,
|
maxChildren: 4,
|
||||||
mergeConflictMaxAttempts: 2,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_PROVISIONING: BuiltInProvisioningConfig = {
|
const DEFAULT_PROVISIONING: BuiltInProvisioningConfig = {
|
||||||
@@ -127,15 +113,6 @@ const DEFAULT_RUNTIME_EVENTS: RuntimeEventRuntimeConfig = {
|
|||||||
discordAlwaysNotifyTypes: ["session.started", "session.completed", "session.failed"],
|
discordAlwaysNotifyTypes: ["session.started", "session.completed", "session.failed"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_CLAUDE_OBSERVABILITY: ClaudeObservabilityRuntimeConfig = {
|
|
||||||
mode: "off",
|
|
||||||
verbosity: "summary",
|
|
||||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
|
||||||
includePartialMessages: false,
|
|
||||||
debug: false,
|
|
||||||
debugLogPath: undefined,
|
|
||||||
};
|
|
||||||
|
|
||||||
function readOptionalString(
|
function readOptionalString(
|
||||||
env: NodeJS.ProcessEnv,
|
env: NodeJS.ProcessEnv,
|
||||||
key: string,
|
key: string,
|
||||||
@@ -295,26 +272,6 @@ function parseOpenAiAuthMode(raw: string): OpenAiAuthMode {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseClaudeObservabilityMode(raw: string): ClaudeObservabilityMode {
|
|
||||||
if (raw === "off" || raw === "stdout" || raw === "file" || raw === "both") {
|
|
||||||
return raw;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
'Environment variable CLAUDE_OBSERVABILITY_MODE must be one of: "off", "stdout", "file", "both".',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseClaudeObservabilityVerbosity(raw: string): ClaudeObservabilityVerbosity {
|
|
||||||
if (raw === "summary" || raw === "full") {
|
|
||||||
return raw;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
'Environment variable CLAUDE_OBSERVABILITY_VERBOSITY must be one of: "summary", "full".',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function deepFreeze<T>(value: T): Readonly<T> {
|
function deepFreeze<T>(value: T): Readonly<T> {
|
||||||
if (value === null || typeof value !== "object") {
|
if (value === null || typeof value !== "object") {
|
||||||
return value;
|
return value;
|
||||||
@@ -401,38 +358,6 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
|
|||||||
anthropicApiKey,
|
anthropicApiKey,
|
||||||
claudeModel: normalizeClaudeModel(readOptionalString(env, "CLAUDE_MODEL")),
|
claudeModel: normalizeClaudeModel(readOptionalString(env, "CLAUDE_MODEL")),
|
||||||
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
|
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
|
||||||
claudeObservability: {
|
|
||||||
mode: parseClaudeObservabilityMode(
|
|
||||||
readStringWithFallback(
|
|
||||||
env,
|
|
||||||
"CLAUDE_OBSERVABILITY_MODE",
|
|
||||||
DEFAULT_CLAUDE_OBSERVABILITY.mode,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
verbosity: parseClaudeObservabilityVerbosity(
|
|
||||||
readStringWithFallback(
|
|
||||||
env,
|
|
||||||
"CLAUDE_OBSERVABILITY_VERBOSITY",
|
|
||||||
DEFAULT_CLAUDE_OBSERVABILITY.verbosity,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
logPath: readStringWithFallback(
|
|
||||||
env,
|
|
||||||
"CLAUDE_OBSERVABILITY_LOG_PATH",
|
|
||||||
DEFAULT_CLAUDE_OBSERVABILITY.logPath,
|
|
||||||
),
|
|
||||||
includePartialMessages: readBooleanWithFallback(
|
|
||||||
env,
|
|
||||||
"CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL",
|
|
||||||
DEFAULT_CLAUDE_OBSERVABILITY.includePartialMessages,
|
|
||||||
),
|
|
||||||
debug: readBooleanWithFallback(
|
|
||||||
env,
|
|
||||||
"CLAUDE_OBSERVABILITY_DEBUG",
|
|
||||||
DEFAULT_CLAUDE_OBSERVABILITY.debug,
|
|
||||||
),
|
|
||||||
debugLogPath: readOptionalString(env, "CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH"),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
mcp: {
|
mcp: {
|
||||||
configPath: readStringWithFallback(env, "MCP_CONFIG_PATH", "./mcp.config.json"),
|
configPath: readStringWithFallback(env, "MCP_CONFIG_PATH", "./mcp.config.json"),
|
||||||
@@ -486,12 +411,6 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
|
|||||||
DEFAULT_ORCHESTRATION.maxChildren,
|
DEFAULT_ORCHESTRATION.maxChildren,
|
||||||
{ min: 1 },
|
{ min: 1 },
|
||||||
),
|
),
|
||||||
mergeConflictMaxAttempts: readIntegerWithBounds(
|
|
||||||
env,
|
|
||||||
"AGENT_MERGE_CONFLICT_MAX_ATTEMPTS",
|
|
||||||
DEFAULT_ORCHESTRATION.mergeConflictMaxAttempts,
|
|
||||||
{ min: 1 },
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
provisioning: {
|
provisioning: {
|
||||||
gitWorktree: {
|
gitWorktree: {
|
||||||
|
|||||||
@@ -85,7 +85,6 @@ export async function runClaudePrompt(
|
|||||||
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
||||||
const sessionContext = await createSessionContextFn("claude", {
|
const sessionContext = await createSessionContextFn("claude", {
|
||||||
prompt,
|
prompt,
|
||||||
workspaceRoot: process.cwd(),
|
|
||||||
config,
|
config,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ export async function runCodexPrompt(
|
|||||||
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
||||||
const sessionContext = await createSessionContextFn("codex", {
|
const sessionContext = await createSessionContextFn("codex", {
|
||||||
prompt,
|
prompt,
|
||||||
workspaceRoot: process.cwd(),
|
|
||||||
config,
|
config,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ export async function createSessionContext(
|
|||||||
provider: SessionProvider,
|
provider: SessionProvider,
|
||||||
input: {
|
input: {
|
||||||
prompt: string;
|
prompt: string;
|
||||||
workspaceRoot: string;
|
|
||||||
config?: Readonly<AppConfig>;
|
config?: Readonly<AppConfig>;
|
||||||
mcpRegistry?: McpRegistry;
|
mcpRegistry?: McpRegistry;
|
||||||
},
|
},
|
||||||
@@ -59,7 +58,6 @@ export async function createSessionContext(
|
|||||||
provisionedResources = await resourceProvisioning.provisionSession({
|
provisionedResources = await resourceProvisioning.provisionSession({
|
||||||
sessionId: agentSession.id,
|
sessionId: agentSession.id,
|
||||||
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
|
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
|
||||||
workspaceRoot: input.workspaceRoot,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const providerAuthEnv =
|
const providerAuthEnv =
|
||||||
@@ -84,7 +82,6 @@ export async function createSessionContext(
|
|||||||
{
|
{
|
||||||
providerHint: provider,
|
providerHint: provider,
|
||||||
prompt: input.prompt,
|
prompt: input.prompt,
|
||||||
workingDirectory: runtimeInjection.workingDirectory,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
config,
|
config,
|
||||||
|
|||||||
18
src/mcp.ts
18
src/mcp.ts
@@ -1,5 +1,5 @@
|
|||||||
import { existsSync, readFileSync } from "node:fs";
|
import { existsSync, readFileSync } from "node:fs";
|
||||||
import { isAbsolute, resolve } from "node:path";
|
import { resolve } from "node:path";
|
||||||
import type { CodexOptions } from "@openai/codex-sdk";
|
import type { CodexOptions } from "@openai/codex-sdk";
|
||||||
import { getConfig, type AppConfig } from "./config.js";
|
import { getConfig, type AppConfig } from "./config.js";
|
||||||
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
|
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
|
||||||
@@ -23,17 +23,12 @@ import type {
|
|||||||
import { parseMcpConfig } from "./mcp/types.js";
|
import { parseMcpConfig } from "./mcp/types.js";
|
||||||
import type { ToolClearancePolicy } from "./security/schemas.js";
|
import type { ToolClearancePolicy } from "./security/schemas.js";
|
||||||
|
|
||||||
function readConfigFile(input: {
|
function readConfigFile(configPath: string): {
|
||||||
configPath: string;
|
|
||||||
workingDirectory?: string;
|
|
||||||
}): {
|
|
||||||
config?: SharedMcpConfigFile;
|
config?: SharedMcpConfigFile;
|
||||||
sourcePath?: string;
|
sourcePath?: string;
|
||||||
} {
|
} {
|
||||||
const candidatePath = input.configPath.trim() || "./mcp.config.json";
|
const candidatePath = configPath.trim() || "./mcp.config.json";
|
||||||
const resolvedPath = isAbsolute(candidatePath)
|
const resolvedPath = resolve(process.cwd(), candidatePath);
|
||||||
? candidatePath
|
|
||||||
: resolve(input.workingDirectory ?? process.cwd(), candidatePath);
|
|
||||||
|
|
||||||
if (!existsSync(resolvedPath)) {
|
if (!existsSync(resolvedPath)) {
|
||||||
if (candidatePath !== "./mcp.config.json") {
|
if (candidatePath !== "./mcp.config.json") {
|
||||||
@@ -88,10 +83,7 @@ export function loadMcpConfigFromEnv(
|
|||||||
const registry = options?.registry ?? defaultMcpRegistry;
|
const registry = options?.registry ?? defaultMcpRegistry;
|
||||||
const warn = options?.warn ?? ((message: string) => console.warn(message));
|
const warn = options?.warn ?? ((message: string) => console.warn(message));
|
||||||
|
|
||||||
const { config, sourcePath } = readConfigFile({
|
const { config, sourcePath } = readConfigFile(runtimeConfig.mcp.configPath);
|
||||||
configPath: runtimeConfig.mcp.configPath,
|
|
||||||
workingDirectory: context.workingDirectory,
|
|
||||||
});
|
|
||||||
if (!config) {
|
if (!config) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,7 +50,6 @@ export type SharedMcpConfigFile = {
|
|||||||
export type McpLoadContext = {
|
export type McpLoadContext = {
|
||||||
providerHint?: "codex" | "claude" | "both";
|
providerHint?: "codex" | "claude" | "both";
|
||||||
prompt?: string;
|
prompt?: string;
|
||||||
workingDirectory?: string;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type LoadedMcpConfig = {
|
export type LoadedMcpConfig = {
|
||||||
|
|||||||
@@ -13,43 +13,41 @@ import {
|
|||||||
} from "./schemas.js";
|
} from "./schemas.js";
|
||||||
|
|
||||||
export type SecurityAuditEvent =
|
export type SecurityAuditEvent =
|
||||||
| ({
|
| {
|
||||||
type: "shell.command_profiled";
|
type: "shell.command_profiled";
|
||||||
|
timestamp: string;
|
||||||
command: string;
|
command: string;
|
||||||
cwd: string;
|
cwd: string;
|
||||||
parsed: ParsedShellScript;
|
parsed: ParsedShellScript;
|
||||||
} & SecurityAuditContext)
|
}
|
||||||
| ({
|
| {
|
||||||
type: "shell.command_allowed";
|
type: "shell.command_allowed";
|
||||||
|
timestamp: string;
|
||||||
command: string;
|
command: string;
|
||||||
cwd: string;
|
cwd: string;
|
||||||
commandCount: number;
|
commandCount: number;
|
||||||
} & SecurityAuditContext)
|
}
|
||||||
| ({
|
| {
|
||||||
type: "shell.command_blocked";
|
type: "shell.command_blocked";
|
||||||
|
timestamp: string;
|
||||||
command: string;
|
command: string;
|
||||||
cwd: string;
|
cwd: string;
|
||||||
reason: string;
|
reason: string;
|
||||||
code: string;
|
code: string;
|
||||||
details?: Record<string, unknown>;
|
details?: Record<string, unknown>;
|
||||||
} & SecurityAuditContext)
|
}
|
||||||
| ({
|
| {
|
||||||
type: "tool.invocation_allowed";
|
type: "tool.invocation_allowed";
|
||||||
|
timestamp: string;
|
||||||
tool: string;
|
tool: string;
|
||||||
} & SecurityAuditContext)
|
}
|
||||||
| ({
|
| {
|
||||||
type: "tool.invocation_blocked";
|
type: "tool.invocation_blocked";
|
||||||
|
timestamp: string;
|
||||||
tool: string;
|
tool: string;
|
||||||
reason: string;
|
reason: string;
|
||||||
code: string;
|
code: string;
|
||||||
} & SecurityAuditContext);
|
};
|
||||||
|
|
||||||
export type SecurityAuditContext = {
|
|
||||||
timestamp: string;
|
|
||||||
sessionId?: string;
|
|
||||||
nodeId?: string;
|
|
||||||
attempt?: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type SecurityAuditSink = (event: SecurityAuditEvent) => void;
|
export type SecurityAuditSink = (event: SecurityAuditEvent) => void;
|
||||||
|
|
||||||
@@ -104,28 +102,6 @@ function toNow(): string {
|
|||||||
return new Date().toISOString();
|
return new Date().toISOString();
|
||||||
}
|
}
|
||||||
|
|
||||||
function toAuditContext(input?: {
|
|
||||||
sessionId?: string;
|
|
||||||
nodeId?: string;
|
|
||||||
attempt?: number;
|
|
||||||
}): SecurityAuditContext {
|
|
||||||
const output: SecurityAuditContext = {
|
|
||||||
timestamp: toNow(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (input?.sessionId) {
|
|
||||||
output.sessionId = input.sessionId;
|
|
||||||
}
|
|
||||||
if (input?.nodeId) {
|
|
||||||
output.nodeId = input.nodeId;
|
|
||||||
}
|
|
||||||
if (typeof input?.attempt === "number" && Number.isInteger(input.attempt) && input.attempt >= 1) {
|
|
||||||
output.attempt = input.attempt;
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SecurityRulesEngine {
|
export class SecurityRulesEngine {
|
||||||
private readonly policy: ShellValidationPolicy;
|
private readonly policy: ShellValidationPolicy;
|
||||||
private readonly allowedBinaries: Set<string>;
|
private readonly allowedBinaries: Set<string>;
|
||||||
@@ -160,11 +136,6 @@ export class SecurityRulesEngine {
|
|||||||
command: string;
|
command: string;
|
||||||
cwd: string;
|
cwd: string;
|
||||||
toolClearance?: ToolClearancePolicy;
|
toolClearance?: ToolClearancePolicy;
|
||||||
context?: {
|
|
||||||
sessionId?: string;
|
|
||||||
nodeId?: string;
|
|
||||||
attempt?: number;
|
|
||||||
};
|
|
||||||
}): Promise<ValidatedShellCommand> {
|
}): Promise<ValidatedShellCommand> {
|
||||||
const resolvedCwd = resolve(input.cwd);
|
const resolvedCwd = resolve(input.cwd);
|
||||||
|
|
||||||
@@ -176,22 +147,22 @@ export class SecurityRulesEngine {
|
|||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "shell.command_profiled",
|
type: "shell.command_profiled",
|
||||||
|
timestamp: toNow(),
|
||||||
command: input.command,
|
command: input.command,
|
||||||
cwd: resolvedCwd,
|
cwd: resolvedCwd,
|
||||||
parsed,
|
parsed,
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const command of parsed.commands) {
|
for (const command of parsed.commands) {
|
||||||
this.assertBinaryAllowed(command, toolClearance, input.context);
|
this.assertBinaryAllowed(command, toolClearance);
|
||||||
this.assertAssignmentsAllowed(command);
|
this.assertAssignmentsAllowed(command);
|
||||||
this.assertArgumentPaths(command, resolvedCwd);
|
this.assertArgumentPaths(command, resolvedCwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "shell.command_allowed",
|
type: "shell.command_allowed",
|
||||||
|
timestamp: toNow(),
|
||||||
command: input.command,
|
command: input.command,
|
||||||
cwd: resolvedCwd,
|
cwd: resolvedCwd,
|
||||||
commandCount: parsed.commandCount,
|
commandCount: parsed.commandCount,
|
||||||
@@ -204,8 +175,8 @@ export class SecurityRulesEngine {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof SecurityViolationError) {
|
if (error instanceof SecurityViolationError) {
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "shell.command_blocked",
|
type: "shell.command_blocked",
|
||||||
|
timestamp: toNow(),
|
||||||
command: input.command,
|
command: input.command,
|
||||||
cwd: resolvedCwd,
|
cwd: resolvedCwd,
|
||||||
reason: error.message,
|
reason: error.message,
|
||||||
@@ -225,18 +196,13 @@ export class SecurityRulesEngine {
|
|||||||
assertToolInvocationAllowed(input: {
|
assertToolInvocationAllowed(input: {
|
||||||
tool: string;
|
tool: string;
|
||||||
toolClearance: ToolClearancePolicy;
|
toolClearance: ToolClearancePolicy;
|
||||||
context?: {
|
|
||||||
sessionId?: string;
|
|
||||||
nodeId?: string;
|
|
||||||
attempt?: number;
|
|
||||||
};
|
|
||||||
}): void {
|
}): void {
|
||||||
const policy = parseToolClearancePolicy(input.toolClearance);
|
const policy = parseToolClearancePolicy(input.toolClearance);
|
||||||
|
|
||||||
if (policy.banlist.includes(input.tool)) {
|
if (policy.banlist.includes(input.tool)) {
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "tool.invocation_blocked",
|
type: "tool.invocation_blocked",
|
||||||
|
timestamp: toNow(),
|
||||||
tool: input.tool,
|
tool: input.tool,
|
||||||
reason: `Tool "${input.tool}" is explicitly banned by policy.`,
|
reason: `Tool "${input.tool}" is explicitly banned by policy.`,
|
||||||
code: "TOOL_BANNED",
|
code: "TOOL_BANNED",
|
||||||
@@ -254,8 +220,8 @@ export class SecurityRulesEngine {
|
|||||||
|
|
||||||
if (policy.allowlist.length > 0 && !policy.allowlist.includes(input.tool)) {
|
if (policy.allowlist.length > 0 && !policy.allowlist.includes(input.tool)) {
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "tool.invocation_blocked",
|
type: "tool.invocation_blocked",
|
||||||
|
timestamp: toNow(),
|
||||||
tool: input.tool,
|
tool: input.tool,
|
||||||
reason: `Tool "${input.tool}" is not present in allowlist.`,
|
reason: `Tool "${input.tool}" is not present in allowlist.`,
|
||||||
code: "TOOL_NOT_ALLOWED",
|
code: "TOOL_NOT_ALLOWED",
|
||||||
@@ -272,8 +238,8 @@ export class SecurityRulesEngine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.emit({
|
this.emit({
|
||||||
...toAuditContext(input.context),
|
|
||||||
type: "tool.invocation_allowed",
|
type: "tool.invocation_allowed",
|
||||||
|
timestamp: toNow(),
|
||||||
tool: input.tool,
|
tool: input.tool,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -324,11 +290,6 @@ export class SecurityRulesEngine {
|
|||||||
private assertBinaryAllowed(
|
private assertBinaryAllowed(
|
||||||
command: ParsedShellCommand,
|
command: ParsedShellCommand,
|
||||||
toolClearance?: ToolClearancePolicy,
|
toolClearance?: ToolClearancePolicy,
|
||||||
context?: {
|
|
||||||
sessionId?: string;
|
|
||||||
nodeId?: string;
|
|
||||||
attempt?: number;
|
|
||||||
},
|
|
||||||
): void {
|
): void {
|
||||||
const binaryToken = normalizeToken(command.binary);
|
const binaryToken = normalizeToken(command.binary);
|
||||||
const binaryName = basename(binaryToken);
|
const binaryName = basename(binaryToken);
|
||||||
@@ -352,7 +313,6 @@ export class SecurityRulesEngine {
|
|||||||
this.assertToolInvocationAllowed({
|
this.assertToolInvocationAllowed({
|
||||||
tool: binaryName,
|
tool: binaryName,
|
||||||
toolClearance,
|
toolClearance,
|
||||||
context,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,821 +0,0 @@
|
|||||||
import { randomUUID } from "node:crypto";
|
|
||||||
import { appendFile, mkdir } from "node:fs/promises";
|
|
||||||
import { dirname, resolve } from "node:path";
|
|
||||||
import type { Options, SDKMessage } from "@anthropic-ai/claude-agent-sdk";
|
|
||||||
import type {
|
|
||||||
ClaudeObservabilityMode,
|
|
||||||
ClaudeObservabilityRuntimeConfig,
|
|
||||||
ClaudeObservabilityVerbosity,
|
|
||||||
} from "../config.js";
|
|
||||||
import type { JsonObject, JsonValue } from "../agents/types.js";
|
|
||||||
|
|
||||||
const MAX_STRING_LENGTH = 320;
|
|
||||||
const MAX_ARRAY_ITEMS = 20;
|
|
||||||
const MAX_OBJECT_KEYS = 60;
|
|
||||||
const MAX_DEPTH = 6;
|
|
||||||
|
|
||||||
const NON_SECRET_TOKEN_KEYS = new Set([
|
|
||||||
"input_tokens",
|
|
||||||
"output_tokens",
|
|
||||||
"total_tokens",
|
|
||||||
"cache_creation_input_tokens",
|
|
||||||
"cache_read_input_tokens",
|
|
||||||
"ephemeral_1h_input_tokens",
|
|
||||||
"ephemeral_5m_input_tokens",
|
|
||||||
"token_input",
|
|
||||||
"token_output",
|
|
||||||
"token_total",
|
|
||||||
"tokencount",
|
|
||||||
"token_count",
|
|
||||||
"tool_use_id",
|
|
||||||
"parent_tool_use_id",
|
|
||||||
"task_id",
|
|
||||||
"session_id",
|
|
||||||
]);
|
|
||||||
|
|
||||||
type ClaudeTraceContext = {
|
|
||||||
sessionId: string;
|
|
||||||
nodeId: string;
|
|
||||||
attempt: number;
|
|
||||||
depth: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
type ClaudeTraceRecord = {
|
|
||||||
id: string;
|
|
||||||
timestamp: string;
|
|
||||||
source: "claude_sdk";
|
|
||||||
stage:
|
|
||||||
| "query.started"
|
|
||||||
| "query.message"
|
|
||||||
| "query.stderr"
|
|
||||||
| "query.completed"
|
|
||||||
| "query.error";
|
|
||||||
message: string;
|
|
||||||
sessionId: string;
|
|
||||||
nodeId: string;
|
|
||||||
attempt: number;
|
|
||||||
depth: number;
|
|
||||||
sdkSessionId?: string;
|
|
||||||
sdkMessageType?: string;
|
|
||||||
sdkMessageSubtype?: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
};
|
|
||||||
|
|
||||||
function truncate(value: string, maxLength = MAX_STRING_LENGTH): string {
|
|
||||||
if (value.length <= maxLength) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
return `${value.slice(0, maxLength)}...`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isSensitiveKey(key: string): boolean {
|
|
||||||
const normalized = key.trim().toLowerCase();
|
|
||||||
if (!normalized) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (NON_SECRET_TOKEN_KEYS.has(normalized)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (/(api[_-]?key|secret|password|authorization|cookie)/i.test(key)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (/(auth[_-]?token|access[_-]?token|refresh[_-]?token|id[_-]?token|oauth)/i.test(key)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return normalized === "token";
|
|
||||||
}
|
|
||||||
|
|
||||||
function toJsonPrimitive(value: unknown): JsonValue {
|
|
||||||
if (value === null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (typeof value === "string") {
|
|
||||||
return truncate(value);
|
|
||||||
}
|
|
||||||
if (typeof value === "number") {
|
|
||||||
return Number.isFinite(value) ? value : String(value);
|
|
||||||
}
|
|
||||||
if (typeof value === "boolean") {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
if (typeof value === "bigint") {
|
|
||||||
return String(value);
|
|
||||||
}
|
|
||||||
if (typeof value === "undefined") {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return truncate(String(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeJsonValue(value: unknown, depth = 0): JsonValue {
|
|
||||||
if (depth >= MAX_DEPTH) {
|
|
||||||
return "[depth_limit]";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
value === null ||
|
|
||||||
typeof value === "string" ||
|
|
||||||
typeof value === "number" ||
|
|
||||||
typeof value === "boolean" ||
|
|
||||||
typeof value === "bigint" ||
|
|
||||||
typeof value === "undefined"
|
|
||||||
) {
|
|
||||||
return toJsonPrimitive(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(value)) {
|
|
||||||
const output = value.slice(0, MAX_ARRAY_ITEMS).map((entry) => sanitizeJsonValue(entry, depth + 1));
|
|
||||||
if (value.length > MAX_ARRAY_ITEMS) {
|
|
||||||
output.push(`[+${String(value.length - MAX_ARRAY_ITEMS)} more]`);
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof value === "object") {
|
|
||||||
const output: JsonObject = {};
|
|
||||||
const entries = Object.entries(value as Record<string, unknown>);
|
|
||||||
const limited = entries.slice(0, MAX_OBJECT_KEYS);
|
|
||||||
for (const [key, entryValue] of limited) {
|
|
||||||
if (isSensitiveKey(key)) {
|
|
||||||
output[key] = "[redacted]";
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
output[key] = sanitizeJsonValue(entryValue, depth + 1);
|
|
||||||
}
|
|
||||||
if (entries.length > MAX_OBJECT_KEYS) {
|
|
||||||
output.__truncated_keys = entries.length - MAX_OBJECT_KEYS;
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
return truncate(String(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
function readString(value: unknown): string | undefined {
|
|
||||||
if (typeof value !== "string") {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const trimmed = value.trim();
|
|
||||||
return trimmed.length > 0 ? trimmed : undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function readNumber(value: unknown): number | undefined {
|
|
||||||
return typeof value === "number" && Number.isFinite(value) ? value : undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function readBoolean(value: unknown): boolean | undefined {
|
|
||||||
return typeof value === "boolean" ? value : undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toMessageRecord(message: SDKMessage): Record<string, unknown> {
|
|
||||||
return message as unknown as Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toMessageSubtype(message: SDKMessage): string | undefined {
|
|
||||||
return readString(toMessageRecord(message).subtype);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toMessageSessionId(message: SDKMessage): string | undefined {
|
|
||||||
return readString(toMessageRecord(message).session_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTaskNotificationSummary(message: SDKMessage): {
|
|
||||||
summary: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
} {
|
|
||||||
const raw = toMessageRecord(message);
|
|
||||||
const status = readString(raw.status) ?? "unknown";
|
|
||||||
const data: JsonObject = {
|
|
||||||
status,
|
|
||||||
};
|
|
||||||
|
|
||||||
const taskId = readString(raw.task_id);
|
|
||||||
if (taskId) {
|
|
||||||
data.taskId = taskId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const summaryText = readString(raw.summary);
|
|
||||||
if (summaryText) {
|
|
||||||
data.summary = truncate(summaryText);
|
|
||||||
}
|
|
||||||
|
|
||||||
const outputFile = readString(raw.output_file);
|
|
||||||
if (outputFile) {
|
|
||||||
data.outputFile = outputFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (raw.usage !== undefined) {
|
|
||||||
data.usage = sanitizeJsonValue(raw.usage);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
summary: `Task notification: ${status}.`,
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTaskStartedSummary(message: SDKMessage): {
|
|
||||||
summary: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
} {
|
|
||||||
const raw = toMessageRecord(message);
|
|
||||||
const data: JsonObject = {};
|
|
||||||
|
|
||||||
const taskId = readString(raw.task_id);
|
|
||||||
if (taskId) {
|
|
||||||
data.taskId = taskId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const description = readString(raw.description);
|
|
||||||
if (description) {
|
|
||||||
data.description = truncate(description);
|
|
||||||
}
|
|
||||||
|
|
||||||
const taskType = readString(raw.task_type);
|
|
||||||
if (taskType) {
|
|
||||||
data.taskType = taskType;
|
|
||||||
}
|
|
||||||
|
|
||||||
const toolUseId = readString(raw.tool_use_id);
|
|
||||||
if (toolUseId) {
|
|
||||||
data.toolUseId = toolUseId;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
summary: "Task started.",
|
|
||||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function toMessageSummary(message: SDKMessage): {
|
|
||||||
summary: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
} {
|
|
||||||
const subtype = toMessageSubtype(message);
|
|
||||||
const raw = toMessageRecord(message);
|
|
||||||
|
|
||||||
if (message.type === "result") {
|
|
||||||
if (message.subtype === "success") {
|
|
||||||
return {
|
|
||||||
summary: "Claude query result success.",
|
|
||||||
data: {
|
|
||||||
stopReason: message.stop_reason ?? null,
|
|
||||||
numTurns: message.num_turns,
|
|
||||||
usage: sanitizeJsonValue(message.usage) as JsonObject,
|
|
||||||
totalCostUsd: message.total_cost_usd,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
summary: `Claude query result ${message.subtype}.`,
|
|
||||||
data: {
|
|
||||||
stopReason: message.stop_reason ?? null,
|
|
||||||
numTurns: message.num_turns,
|
|
||||||
usage: sanitizeJsonValue(message.usage) as JsonObject,
|
|
||||||
totalCostUsd: message.total_cost_usd,
|
|
||||||
errors: sanitizeJsonValue(message.errors),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "tool_progress") {
|
|
||||||
return {
|
|
||||||
summary: `Tool progress: ${message.tool_name}.`,
|
|
||||||
data: {
|
|
||||||
toolName: message.tool_name,
|
|
||||||
toolUseId: message.tool_use_id,
|
|
||||||
elapsedTimeSeconds: message.elapsed_time_seconds,
|
|
||||||
parentToolUseId: message.parent_tool_use_id ?? null,
|
|
||||||
...(message.task_id ? { taskId: message.task_id } : {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "tool_use_summary") {
|
|
||||||
return {
|
|
||||||
summary: "Tool use summary emitted.",
|
|
||||||
data: {
|
|
||||||
summary: truncate(message.summary),
|
|
||||||
precedingToolUseIds: sanitizeJsonValue(message.preceding_tool_use_ids),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "stream_event") {
|
|
||||||
const data: JsonObject = {};
|
|
||||||
const eventType = readString((raw.event as Record<string, unknown> | undefined)?.type);
|
|
||||||
if (eventType) {
|
|
||||||
data.eventType = eventType;
|
|
||||||
}
|
|
||||||
const parentToolUseId = readString(raw.parent_tool_use_id);
|
|
||||||
if (parentToolUseId) {
|
|
||||||
data.parentToolUseId = parentToolUseId;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
summary: "Partial assistant stream event emitted.",
|
|
||||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "auth_status") {
|
|
||||||
return {
|
|
||||||
summary: message.isAuthenticating ? "Authentication in progress." : "Authentication status update.",
|
|
||||||
data: {
|
|
||||||
isAuthenticating: message.isAuthenticating,
|
|
||||||
output: sanitizeJsonValue(message.output),
|
|
||||||
...(message.error ? { error: truncate(message.error) } : {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "assistant") {
|
|
||||||
return {
|
|
||||||
summary: "Assistant message emitted.",
|
|
||||||
data: {
|
|
||||||
parentToolUseId: message.parent_tool_use_id ?? null,
|
|
||||||
...(message.error ? { error: message.error } : {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "user") {
|
|
||||||
const data: JsonObject = {
|
|
||||||
parentToolUseId: (message as { parent_tool_use_id?: string | null }).parent_tool_use_id ?? null,
|
|
||||||
};
|
|
||||||
const isSynthetic = readBoolean(raw.isSynthetic);
|
|
||||||
if (isSynthetic !== undefined) {
|
|
||||||
data.isSynthetic = isSynthetic;
|
|
||||||
}
|
|
||||||
const isReplay = readBoolean(raw.isReplay);
|
|
||||||
if (isReplay !== undefined) {
|
|
||||||
data.isReplay = isReplay;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
summary: "User message emitted.",
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (subtype === "task_notification") {
|
|
||||||
return toTaskNotificationSummary(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (subtype === "task_started") {
|
|
||||||
return toTaskStartedSummary(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "system" && subtype === "files_persisted") {
|
|
||||||
const files = Array.isArray(raw.files) ? raw.files : [];
|
|
||||||
const failed = Array.isArray(raw.failed) ? raw.failed : [];
|
|
||||||
return {
|
|
||||||
summary: "System event: files_persisted.",
|
|
||||||
data: {
|
|
||||||
persistedFileCount: files.length,
|
|
||||||
failedFileCount: failed.length,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "system" && subtype === "compact_boundary") {
|
|
||||||
return {
|
|
||||||
summary: "System event: compact_boundary.",
|
|
||||||
data: {
|
|
||||||
compactMetadata: sanitizeJsonValue(raw.compact_metadata),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "system" && subtype === "status") {
|
|
||||||
const data: JsonObject = {
|
|
||||||
status: readString(raw.status) ?? "none",
|
|
||||||
};
|
|
||||||
const permissionMode = readString(raw.permissionMode);
|
|
||||||
if (permissionMode) {
|
|
||||||
data.permissionMode = permissionMode;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
summary: "System event: status.",
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "system" && (subtype === "hook_started" || subtype === "hook_progress" || subtype === "hook_response")) {
|
|
||||||
const data: JsonObject = {
|
|
||||||
...(subtype ? { subtype } : {}),
|
|
||||||
...(readString(raw.hook_id) ? { hookId: readString(raw.hook_id) } : {}),
|
|
||||||
...(readString(raw.hook_name) ? { hookName: readString(raw.hook_name) } : {}),
|
|
||||||
...(readString(raw.hook_event) ? { hookEvent: readString(raw.hook_event) } : {}),
|
|
||||||
...(readString(raw.outcome) ? { outcome: readString(raw.outcome) } : {}),
|
|
||||||
};
|
|
||||||
if (raw.exit_code !== undefined) {
|
|
||||||
data.exitCode = sanitizeJsonValue(raw.exit_code);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
summary: `System event: ${subtype}.`,
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "system") {
|
|
||||||
return {
|
|
||||||
summary: subtype ? `System event: ${subtype}.` : "System event emitted.",
|
|
||||||
data: subtype ? { subtype } : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "rate_limit") {
|
|
||||||
return {
|
|
||||||
summary: "Rate limit event emitted.",
|
|
||||||
data: sanitizeJsonValue(raw) as JsonObject,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.type === "prompt_suggestion") {
|
|
||||||
const data: JsonObject = {
|
|
||||||
...(readString(raw.prompt) ? { prompt: truncate(readString(raw.prompt) as string) } : {}),
|
|
||||||
...(readString(raw.suggestion) ? { suggestion: truncate(readString(raw.suggestion) as string) } : {}),
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
summary: "Prompt suggestion emitted.",
|
|
||||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
summary: `Claude SDK message received (${message.type}).`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function toRecord(input: {
|
|
||||||
stage: ClaudeTraceRecord["stage"];
|
|
||||||
message: string;
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
sdkMessageType?: string;
|
|
||||||
sdkMessageSubtype?: string;
|
|
||||||
sdkSessionId?: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
}): ClaudeTraceRecord {
|
|
||||||
return {
|
|
||||||
id: randomUUID(),
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
source: "claude_sdk",
|
|
||||||
stage: input.stage,
|
|
||||||
message: input.message,
|
|
||||||
sessionId: input.context.sessionId,
|
|
||||||
nodeId: input.context.nodeId,
|
|
||||||
attempt: input.context.attempt,
|
|
||||||
depth: input.context.depth,
|
|
||||||
...(input.sdkMessageType ? { sdkMessageType: input.sdkMessageType } : {}),
|
|
||||||
...(input.sdkMessageSubtype ? { sdkMessageSubtype: input.sdkMessageSubtype } : {}),
|
|
||||||
...(input.sdkSessionId ? { sdkSessionId: input.sdkSessionId } : {}),
|
|
||||||
...(input.data ? { data: input.data } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function summarizeClaudeMessage(
|
|
||||||
message: SDKMessage,
|
|
||||||
verbosity: ClaudeObservabilityVerbosity,
|
|
||||||
): {
|
|
||||||
messageType: string;
|
|
||||||
messageSubtype?: string;
|
|
||||||
sdkSessionId?: string;
|
|
||||||
summary: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
} {
|
|
||||||
const messageSubtype = toMessageSubtype(message);
|
|
||||||
const sdkSessionId = toMessageSessionId(message);
|
|
||||||
const summary = toMessageSummary(message);
|
|
||||||
if (verbosity === "full") {
|
|
||||||
return {
|
|
||||||
messageType: message.type,
|
|
||||||
...(messageSubtype ? { messageSubtype } : {}),
|
|
||||||
...(sdkSessionId ? { sdkSessionId } : {}),
|
|
||||||
summary: summary.summary,
|
|
||||||
data: {
|
|
||||||
message: sanitizeJsonValue(message) as JsonObject,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
messageType: message.type,
|
|
||||||
...(messageSubtype ? { messageSubtype } : {}),
|
|
||||||
...(sdkSessionId ? { sdkSessionId } : {}),
|
|
||||||
summary: summary.summary,
|
|
||||||
...(summary.data ? { data: summary.data } : {}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ClaudeObservabilityLogger {
|
|
||||||
private readonly mode: ClaudeObservabilityMode;
|
|
||||||
private readonly verbosity: ClaudeObservabilityVerbosity;
|
|
||||||
private readonly logPath: string;
|
|
||||||
private readonly includePartialMessages: boolean;
|
|
||||||
private readonly debug: boolean;
|
|
||||||
private readonly debugLogPath?: string;
|
|
||||||
private readonly pendingWrites = new Set<Promise<void>>();
|
|
||||||
private readonly stdoutProgressByKey = new Map<string, {
|
|
||||||
lastEmittedAt: number;
|
|
||||||
suppressed: number;
|
|
||||||
}>();
|
|
||||||
private readonly fileProgressByKey = new Map<string, {
|
|
||||||
lastEmittedAt: number;
|
|
||||||
suppressed: number;
|
|
||||||
}>();
|
|
||||||
private readonly stdoutStreamByKey = new Map<string, {
|
|
||||||
lastEmittedAt: number;
|
|
||||||
suppressed: number;
|
|
||||||
}>();
|
|
||||||
private readonly fileStreamByKey = new Map<string, {
|
|
||||||
lastEmittedAt: number;
|
|
||||||
suppressed: number;
|
|
||||||
}>();
|
|
||||||
private fileWriteFailureCount = 0;
|
|
||||||
|
|
||||||
constructor(input: {
|
|
||||||
workspaceRoot: string;
|
|
||||||
config: ClaudeObservabilityRuntimeConfig;
|
|
||||||
}) {
|
|
||||||
this.mode = input.config.mode;
|
|
||||||
this.verbosity = input.config.verbosity;
|
|
||||||
this.logPath = resolve(input.workspaceRoot, input.config.logPath);
|
|
||||||
this.includePartialMessages = input.config.includePartialMessages;
|
|
||||||
this.debug = input.config.debug;
|
|
||||||
this.debugLogPath = input.config.debugLogPath
|
|
||||||
? resolve(input.workspaceRoot, input.config.debugLogPath)
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
isEnabled(): boolean {
|
|
||||||
return this.mode !== "off";
|
|
||||||
}
|
|
||||||
|
|
||||||
toOptionOverrides(input: {
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
}): Pick<Options, "includePartialMessages" | "debug" | "debugFile" | "stderr"> {
|
|
||||||
return {
|
|
||||||
includePartialMessages: this.includePartialMessages,
|
|
||||||
debug: this.debug || this.debugLogPath !== undefined,
|
|
||||||
...(this.debugLogPath ? { debugFile: this.debugLogPath } : {}),
|
|
||||||
stderr: (data: string): void => {
|
|
||||||
this.record({
|
|
||||||
stage: "query.stderr",
|
|
||||||
message: "Claude SDK stderr output.",
|
|
||||||
context: input.context,
|
|
||||||
data: {
|
|
||||||
stderr: sanitizeJsonValue(data),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
recordQueryStarted(input: {
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
data?: JsonObject;
|
|
||||||
}): void {
|
|
||||||
this.record({
|
|
||||||
stage: "query.started",
|
|
||||||
message: "Claude query started.",
|
|
||||||
context: input.context,
|
|
||||||
...(input.data ? { data: input.data } : {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
recordMessage(input: {
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
message: SDKMessage;
|
|
||||||
}): void {
|
|
||||||
const summarized = summarizeClaudeMessage(input.message, this.verbosity);
|
|
||||||
this.record({
|
|
||||||
stage: "query.message",
|
|
||||||
message: summarized.summary,
|
|
||||||
context: input.context,
|
|
||||||
sdkMessageType: summarized.messageType,
|
|
||||||
sdkMessageSubtype: summarized.messageSubtype,
|
|
||||||
sdkSessionId: summarized.sdkSessionId,
|
|
||||||
...(summarized.data ? { data: summarized.data } : {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
recordQueryCompleted(input: {
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
data?: JsonObject;
|
|
||||||
}): void {
|
|
||||||
this.record({
|
|
||||||
stage: "query.completed",
|
|
||||||
message: "Claude query completed.",
|
|
||||||
context: input.context,
|
|
||||||
...(input.data ? { data: input.data } : {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
recordQueryError(input: {
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
error: unknown;
|
|
||||||
}): void {
|
|
||||||
const errorMessage = input.error instanceof Error ? input.error.message : String(input.error);
|
|
||||||
this.record({
|
|
||||||
stage: "query.error",
|
|
||||||
message: "Claude query failed.",
|
|
||||||
context: input.context,
|
|
||||||
data: {
|
|
||||||
error: truncate(errorMessage),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async close(): Promise<void> {
|
|
||||||
await Promise.all([...this.pendingWrites]);
|
|
||||||
}
|
|
||||||
|
|
||||||
private record(input: {
|
|
||||||
stage: ClaudeTraceRecord["stage"];
|
|
||||||
message: string;
|
|
||||||
context: ClaudeTraceContext;
|
|
||||||
sdkMessageType?: string;
|
|
||||||
sdkMessageSubtype?: string;
|
|
||||||
sdkSessionId?: string;
|
|
||||||
data?: JsonObject;
|
|
||||||
}): void {
|
|
||||||
if (!this.isEnabled()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const record = toRecord(input);
|
|
||||||
|
|
||||||
if (this.mode === "stdout" || this.mode === "both") {
|
|
||||||
const stdoutRecord = this.toStdoutRecord(record);
|
|
||||||
if (stdoutRecord) {
|
|
||||||
console.log(`[claude-trace] ${JSON.stringify(stdoutRecord)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.mode === "file" || this.mode === "both") {
|
|
||||||
const fileRecord = this.toFileRecord(record);
|
|
||||||
if (!fileRecord) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const line = JSON.stringify(fileRecord);
|
|
||||||
const write = mkdir(dirname(this.logPath), { recursive: true })
|
|
||||||
.then(() => appendFile(this.logPath, `${line}\n`, "utf8"))
|
|
||||||
.catch((error: unknown) => {
|
|
||||||
this.reportFileWriteFailure(error);
|
|
||||||
})
|
|
||||||
.finally(() => {
|
|
||||||
this.pendingWrites.delete(write);
|
|
||||||
});
|
|
||||||
this.pendingWrites.add(write);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private toStdoutRecord(record: ClaudeTraceRecord): ClaudeTraceRecord | undefined {
|
|
||||||
return this.toFilteredMessageRecord(record, "stdout");
|
|
||||||
}
|
|
||||||
|
|
||||||
private toFileRecord(record: ClaudeTraceRecord): ClaudeTraceRecord | undefined {
|
|
||||||
return this.toFilteredMessageRecord(record, "file");
|
|
||||||
}
|
|
||||||
|
|
||||||
private toFilteredMessageRecord(
|
|
||||||
record: ClaudeTraceRecord,
|
|
||||||
destination: "stdout" | "file",
|
|
||||||
): ClaudeTraceRecord | undefined {
|
|
||||||
if (record.stage !== "query.message") {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!record.sdkMessageType) {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (record.sdkMessageType === "tool_progress") {
|
|
||||||
return this.toSampledToolProgressRecord(record, destination);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (record.sdkMessageType === "stream_event") {
|
|
||||||
if (!this.includePartialMessages) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return this.toSampledStreamEventRecord(record, destination);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (record.sdkMessageType === "auth_status") {
|
|
||||||
const data = record.data;
|
|
||||||
const isAuthenticating = data?.isAuthenticating === true;
|
|
||||||
const hasError = typeof data?.error === "string" && data.error.trim().length > 0;
|
|
||||||
if (hasError || !isAuthenticating) {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
private toSampledToolProgressRecord(
|
|
||||||
record: ClaudeTraceRecord,
|
|
||||||
destination: "stdout" | "file",
|
|
||||||
): ClaudeTraceRecord | undefined {
|
|
||||||
const now = Date.now();
|
|
||||||
const minIntervalMs = destination === "stdout" ? 1000 : 2000;
|
|
||||||
const rawToolUseId = record.data?.toolUseId;
|
|
||||||
const toolUseId = typeof rawToolUseId === "string" ? rawToolUseId : "unknown";
|
|
||||||
const key = `${destination}:${record.sessionId}:${record.nodeId}:${toolUseId}`;
|
|
||||||
const progressByKey = destination === "stdout" ? this.stdoutProgressByKey : this.fileProgressByKey;
|
|
||||||
const state = progressByKey.get(key);
|
|
||||||
|
|
||||||
if (!state) {
|
|
||||||
progressByKey.set(key, {
|
|
||||||
lastEmittedAt: now,
|
|
||||||
suppressed: 0,
|
|
||||||
});
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (now - state.lastEmittedAt < minIntervalMs) {
|
|
||||||
state.suppressed += 1;
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.lastEmittedAt = now;
|
|
||||||
const suppressed = state.suppressed;
|
|
||||||
state.suppressed = 0;
|
|
||||||
|
|
||||||
if (suppressed < 1) {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextData: JsonObject = {
|
|
||||||
...(record.data ?? {}),
|
|
||||||
suppressedSinceLastEmit: suppressed,
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
...record,
|
|
||||||
data: nextData,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private toSampledStreamEventRecord(
|
|
||||||
record: ClaudeTraceRecord,
|
|
||||||
destination: "stdout" | "file",
|
|
||||||
): ClaudeTraceRecord | undefined {
|
|
||||||
const now = Date.now();
|
|
||||||
const minIntervalMs = destination === "stdout" ? 700 : 1200;
|
|
||||||
const key = `${destination}:${record.sessionId}:${record.nodeId}:stream`;
|
|
||||||
const streamByKey = destination === "stdout" ? this.stdoutStreamByKey : this.fileStreamByKey;
|
|
||||||
const state = streamByKey.get(key);
|
|
||||||
|
|
||||||
if (!state) {
|
|
||||||
streamByKey.set(key, {
|
|
||||||
lastEmittedAt: now,
|
|
||||||
suppressed: 0,
|
|
||||||
});
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (now - state.lastEmittedAt < minIntervalMs) {
|
|
||||||
state.suppressed += 1;
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.lastEmittedAt = now;
|
|
||||||
const suppressed = state.suppressed;
|
|
||||||
state.suppressed = 0;
|
|
||||||
|
|
||||||
if (suppressed < 1) {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextData: JsonObject = {
|
|
||||||
...(record.data ?? {}),
|
|
||||||
suppressedStreamEventsSinceLastEmit: suppressed,
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
...record,
|
|
||||||
data: nextData,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private reportFileWriteFailure(error: unknown): void {
|
|
||||||
this.fileWriteFailureCount += 1;
|
|
||||||
if (this.fileWriteFailureCount <= 5) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
console.warn(
|
|
||||||
`[claude-trace] failed to append trace log to ${this.logPath}: ${truncate(message, 180)}`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.fileWriteFailureCount === 6) {
|
|
||||||
console.warn("[claude-trace] additional trace-log write failures suppressed.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
import { readFile } from "node:fs/promises";
|
|
||||||
import { resolve } from "node:path";
|
|
||||||
|
|
||||||
export type ClaudeTraceEvent = {
|
|
||||||
timestamp: string;
|
|
||||||
message: string;
|
|
||||||
stage?: string;
|
|
||||||
sessionId?: string;
|
|
||||||
sdkMessageType?: string;
|
|
||||||
sdkMessageSubtype?: string;
|
|
||||||
data?: unknown;
|
|
||||||
} & Record<string, unknown>;
|
|
||||||
|
|
||||||
type ClaudeTraceFilter = {
|
|
||||||
sessionId?: string;
|
|
||||||
limit?: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
function safeParseLine(line: string): ClaudeTraceEvent | undefined {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (!trimmed) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(trimmed) as unknown;
|
|
||||||
if (!parsed || typeof parsed !== "object") {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const record = parsed as Record<string, unknown>;
|
|
||||||
if (typeof record.timestamp !== "string" || typeof record.message !== "string") {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return record as ClaudeTraceEvent;
|
|
||||||
} catch {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function readClaudeTraceEvents(logPath: string): Promise<ClaudeTraceEvent[]> {
|
|
||||||
const absolutePath = resolve(logPath);
|
|
||||||
let content = "";
|
|
||||||
|
|
||||||
try {
|
|
||||||
content = await readFile(absolutePath, "utf8");
|
|
||||||
} catch (error) {
|
|
||||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parsed: ClaudeTraceEvent[] = [];
|
|
||||||
for (const line of content.split(/\r?\n/)) {
|
|
||||||
const event = safeParseLine(line);
|
|
||||||
if (event) {
|
|
||||||
parsed.push(event);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parsed.sort((left, right) => left.timestamp.localeCompare(right.timestamp));
|
|
||||||
return parsed;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function filterClaudeTraceEvents(
|
|
||||||
events: readonly ClaudeTraceEvent[],
|
|
||||||
filter: ClaudeTraceFilter,
|
|
||||||
): ClaudeTraceEvent[] {
|
|
||||||
const filtered: ClaudeTraceEvent[] = [];
|
|
||||||
|
|
||||||
for (const event of events) {
|
|
||||||
if (filter.sessionId && event.sessionId !== filter.sessionId) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
filtered.push(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!filter.limit || filter.limit < 1 || filtered.length <= filter.limit) {
|
|
||||||
return filtered;
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered.slice(-filter.limit);
|
|
||||||
}
|
|
||||||
@@ -23,7 +23,6 @@ export type LimitSettings = {
|
|||||||
topologyMaxDepth: number;
|
topologyMaxDepth: number;
|
||||||
topologyMaxRetries: number;
|
topologyMaxRetries: number;
|
||||||
relationshipMaxChildren: number;
|
relationshipMaxChildren: number;
|
||||||
mergeConflictMaxAttempts: number;
|
|
||||||
portBase: number;
|
portBase: number;
|
||||||
portBlockSize: number;
|
portBlockSize: number;
|
||||||
portBlockCount: number;
|
portBlockCount: number;
|
||||||
@@ -39,7 +38,6 @@ export type UiConfigSnapshot = {
|
|||||||
stateRoot: string;
|
stateRoot: string;
|
||||||
projectContextPath: string;
|
projectContextPath: string;
|
||||||
runtimeEventLogPath: string;
|
runtimeEventLogPath: string;
|
||||||
claudeTraceLogPath: string;
|
|
||||||
securityAuditLogPath: string;
|
securityAuditLogPath: string;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -90,7 +88,6 @@ function toLimits(config: Readonly<AppConfig>): LimitSettings {
|
|||||||
topologyMaxDepth: config.orchestration.maxDepth,
|
topologyMaxDepth: config.orchestration.maxDepth,
|
||||||
topologyMaxRetries: config.orchestration.maxRetries,
|
topologyMaxRetries: config.orchestration.maxRetries,
|
||||||
relationshipMaxChildren: config.orchestration.maxChildren,
|
relationshipMaxChildren: config.orchestration.maxChildren,
|
||||||
mergeConflictMaxAttempts: config.orchestration.mergeConflictMaxAttempts,
|
|
||||||
portBase: config.provisioning.portRange.basePort,
|
portBase: config.provisioning.portRange.basePort,
|
||||||
portBlockSize: config.provisioning.portRange.blockSize,
|
portBlockSize: config.provisioning.portRange.blockSize,
|
||||||
portBlockCount: config.provisioning.portRange.blockCount,
|
portBlockCount: config.provisioning.portRange.blockCount,
|
||||||
@@ -108,7 +105,6 @@ function toSnapshot(config: Readonly<AppConfig>, envFilePath: string): UiConfigS
|
|||||||
stateRoot: config.orchestration.stateRoot,
|
stateRoot: config.orchestration.stateRoot,
|
||||||
projectContextPath: config.orchestration.projectContextPath,
|
projectContextPath: config.orchestration.projectContextPath,
|
||||||
runtimeEventLogPath: config.runtimeEvents.logPath,
|
runtimeEventLogPath: config.runtimeEvents.logPath,
|
||||||
claudeTraceLogPath: config.provider.claudeObservability.logPath,
|
|
||||||
securityAuditLogPath: config.security.auditLogPath,
|
securityAuditLogPath: config.security.auditLogPath,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -174,7 +170,6 @@ export class UiConfigStore {
|
|||||||
AGENT_TOPOLOGY_MAX_DEPTH: String(input.topologyMaxDepth),
|
AGENT_TOPOLOGY_MAX_DEPTH: String(input.topologyMaxDepth),
|
||||||
AGENT_TOPOLOGY_MAX_RETRIES: String(input.topologyMaxRetries),
|
AGENT_TOPOLOGY_MAX_RETRIES: String(input.topologyMaxRetries),
|
||||||
AGENT_RELATIONSHIP_MAX_CHILDREN: String(input.relationshipMaxChildren),
|
AGENT_RELATIONSHIP_MAX_CHILDREN: String(input.relationshipMaxChildren),
|
||||||
AGENT_MERGE_CONFLICT_MAX_ATTEMPTS: String(input.mergeConflictMaxAttempts),
|
|
||||||
AGENT_PORT_BASE: String(input.portBase),
|
AGENT_PORT_BASE: String(input.portBase),
|
||||||
AGENT_PORT_BLOCK_SIZE: String(input.portBlockSize),
|
AGENT_PORT_BLOCK_SIZE: String(input.portBlockSize),
|
||||||
AGENT_PORT_BLOCK_COUNT: String(input.portBlockCount),
|
AGENT_PORT_BLOCK_COUNT: String(input.portBlockCount),
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import { isDomainEventType, type DomainEventEmission } from "../agents/domain-ev
|
|||||||
import type { ActorExecutionInput, ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
|
import type { ActorExecutionInput, ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
|
||||||
import { isRecord, type JsonObject, type JsonValue } from "../agents/types.js";
|
import { isRecord, type JsonObject, type JsonValue } from "../agents/types.js";
|
||||||
import { createSessionContext, type SessionContext } from "../examples/session-context.js";
|
import { createSessionContext, type SessionContext } from "../examples/session-context.js";
|
||||||
import { ClaudeObservabilityLogger } from "./claude-observability.js";
|
|
||||||
|
|
||||||
export type RunProvider = "codex" | "claude";
|
export type RunProvider = "codex" | "claude";
|
||||||
|
|
||||||
@@ -18,7 +17,6 @@ export type ProviderRunRuntime = {
|
|||||||
provider: RunProvider;
|
provider: RunProvider;
|
||||||
config: Readonly<AppConfig>;
|
config: Readonly<AppConfig>;
|
||||||
sessionContext: SessionContext;
|
sessionContext: SessionContext;
|
||||||
claudeObservability: ClaudeObservabilityLogger;
|
|
||||||
close: () => Promise<void>;
|
close: () => Promise<void>;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -335,7 +333,7 @@ function buildActorPrompt(input: ActorExecutionInput): string {
|
|||||||
},
|
},
|
||||||
events: [
|
events: [
|
||||||
{
|
{
|
||||||
type: "requirements_defined | tasks_planned | code_committed | task_ready_for_review | task_blocked | validation_passed | validation_failed | branch_merged | merge_conflict_detected | merge_conflict_resolved | merge_conflict_unresolved | merge_retry_started",
|
type: "requirements_defined | tasks_planned | code_committed | task_blocked | validation_passed | validation_failed | branch_merged",
|
||||||
payload: {
|
payload: {
|
||||||
summary: "optional",
|
summary: "optional",
|
||||||
details: {},
|
details: {},
|
||||||
@@ -418,40 +416,6 @@ type ClaudeTurnResult = {
|
|||||||
usage: ProviderUsage;
|
usage: ProviderUsage;
|
||||||
};
|
};
|
||||||
|
|
||||||
function toClaudeTraceContext(actorInput: ActorExecutionInput): {
|
|
||||||
sessionId: string;
|
|
||||||
nodeId: string;
|
|
||||||
attempt: number;
|
|
||||||
depth: number;
|
|
||||||
} {
|
|
||||||
return {
|
|
||||||
sessionId: actorInput.sessionId,
|
|
||||||
nodeId: actorInput.node.id,
|
|
||||||
attempt: actorInput.attempt,
|
|
||||||
depth: actorInput.depth,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function toProviderUsageJson(usage: ProviderUsage): JsonObject {
|
|
||||||
const output: JsonObject = {};
|
|
||||||
if (typeof usage.tokenInput === "number") {
|
|
||||||
output.tokenInput = usage.tokenInput;
|
|
||||||
}
|
|
||||||
if (typeof usage.tokenOutput === "number") {
|
|
||||||
output.tokenOutput = usage.tokenOutput;
|
|
||||||
}
|
|
||||||
if (typeof usage.tokenTotal === "number") {
|
|
||||||
output.tokenTotal = usage.tokenTotal;
|
|
||||||
}
|
|
||||||
if (typeof usage.durationMs === "number") {
|
|
||||||
output.durationMs = usage.durationMs;
|
|
||||||
}
|
|
||||||
if (typeof usage.costUsd === "number") {
|
|
||||||
output.costUsd = usage.costUsd;
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildClaudeOptions(input: {
|
function buildClaudeOptions(input: {
|
||||||
runtime: ProviderRunRuntime;
|
runtime: ProviderRunRuntime;
|
||||||
actorInput: ActorExecutionInput;
|
actorInput: ActorExecutionInput;
|
||||||
@@ -469,7 +433,6 @@ function buildClaudeOptions(input: {
|
|||||||
...runtime.sessionContext.runtimeInjection.env,
|
...runtime.sessionContext.runtimeInjection.env,
|
||||||
...buildClaudeAuthEnv(runtime.config.provider),
|
...buildClaudeAuthEnv(runtime.config.provider),
|
||||||
};
|
};
|
||||||
const traceContext = toClaudeTraceContext(actorInput);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
maxTurns: CLAUDE_PROVIDER_MAX_TURNS,
|
maxTurns: CLAUDE_PROVIDER_MAX_TURNS,
|
||||||
@@ -486,9 +449,6 @@ function buildClaudeOptions(input: {
|
|||||||
canUseTool: actorInput.mcp.createClaudeCanUseTool(),
|
canUseTool: actorInput.mcp.createClaudeCanUseTool(),
|
||||||
cwd: runtime.sessionContext.runtimeInjection.workingDirectory,
|
cwd: runtime.sessionContext.runtimeInjection.workingDirectory,
|
||||||
env: runtimeEnv,
|
env: runtimeEnv,
|
||||||
...runtime.claudeObservability.toOptionOverrides({
|
|
||||||
context: traceContext,
|
|
||||||
}),
|
|
||||||
outputFormat: CLAUDE_OUTPUT_FORMAT,
|
outputFormat: CLAUDE_OUTPUT_FORMAT,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -498,19 +458,10 @@ async function runClaudeTurn(input: {
|
|||||||
actorInput: ActorExecutionInput;
|
actorInput: ActorExecutionInput;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
}): Promise<ClaudeTurnResult> {
|
}): Promise<ClaudeTurnResult> {
|
||||||
const traceContext = toClaudeTraceContext(input.actorInput);
|
|
||||||
const options = buildClaudeOptions({
|
const options = buildClaudeOptions({
|
||||||
runtime: input.runtime,
|
runtime: input.runtime,
|
||||||
actorInput: input.actorInput,
|
actorInput: input.actorInput,
|
||||||
});
|
});
|
||||||
input.runtime.claudeObservability.recordQueryStarted({
|
|
||||||
context: traceContext,
|
|
||||||
data: {
|
|
||||||
...(options.model ? { model: options.model } : {}),
|
|
||||||
maxTurns: options.maxTurns ?? CLAUDE_PROVIDER_MAX_TURNS,
|
|
||||||
cwd: input.runtime.sessionContext.runtimeInjection.workingDirectory,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const startedAt = Date.now();
|
const startedAt = Date.now();
|
||||||
const stream = query({
|
const stream = query({
|
||||||
@@ -521,7 +472,6 @@ async function runClaudeTurn(input: {
|
|||||||
let resultText = "";
|
let resultText = "";
|
||||||
let structuredOutput: unknown;
|
let structuredOutput: unknown;
|
||||||
let usage: ProviderUsage = {};
|
let usage: ProviderUsage = {};
|
||||||
let messageCount = 0;
|
|
||||||
|
|
||||||
const onAbort = (): void => {
|
const onAbort = (): void => {
|
||||||
stream.close();
|
stream.close();
|
||||||
@@ -531,12 +481,6 @@ async function runClaudeTurn(input: {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
for await (const message of stream as AsyncIterable<SDKMessage>) {
|
for await (const message of stream as AsyncIterable<SDKMessage>) {
|
||||||
messageCount += 1;
|
|
||||||
input.runtime.claudeObservability.recordMessage({
|
|
||||||
context: traceContext,
|
|
||||||
message,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (message.type !== "result") {
|
if (message.type !== "result") {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -558,12 +502,6 @@ async function runClaudeTurn(input: {
|
|||||||
costUsd: message.total_cost_usd,
|
costUsd: message.total_cost_usd,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
input.runtime.claudeObservability.recordQueryError({
|
|
||||||
context: traceContext,
|
|
||||||
error,
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
} finally {
|
} finally {
|
||||||
input.actorInput.signal.removeEventListener("abort", onAbort);
|
input.actorInput.signal.removeEventListener("abort", onAbort);
|
||||||
stream.close();
|
stream.close();
|
||||||
@@ -574,22 +512,9 @@ async function runClaudeTurn(input: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!resultText) {
|
if (!resultText) {
|
||||||
const error = new Error("Claude run completed without a final result.");
|
throw new Error("Claude run completed without a final result.");
|
||||||
input.runtime.claudeObservability.recordQueryError({
|
|
||||||
context: traceContext,
|
|
||||||
error,
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
input.runtime.claudeObservability.recordQueryCompleted({
|
|
||||||
context: traceContext,
|
|
||||||
data: {
|
|
||||||
messageCount,
|
|
||||||
usage: toProviderUsageJson(usage),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
text: resultText,
|
text: resultText,
|
||||||
structuredOutput,
|
structuredOutput,
|
||||||
@@ -628,30 +553,18 @@ export async function createProviderRunRuntime(input: {
|
|||||||
provider: RunProvider;
|
provider: RunProvider;
|
||||||
initialPrompt: string;
|
initialPrompt: string;
|
||||||
config: Readonly<AppConfig>;
|
config: Readonly<AppConfig>;
|
||||||
projectPath: string;
|
|
||||||
observabilityRootPath?: string;
|
|
||||||
}): Promise<ProviderRunRuntime> {
|
}): Promise<ProviderRunRuntime> {
|
||||||
const sessionContext = await createSessionContext(input.provider, {
|
const sessionContext = await createSessionContext(input.provider, {
|
||||||
prompt: input.initialPrompt,
|
prompt: input.initialPrompt,
|
||||||
config: input.config,
|
config: input.config,
|
||||||
workspaceRoot: input.projectPath,
|
|
||||||
});
|
|
||||||
const claudeObservability = new ClaudeObservabilityLogger({
|
|
||||||
workspaceRoot: input.observabilityRootPath ?? input.projectPath,
|
|
||||||
config: input.config.provider.claudeObservability,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
provider: input.provider,
|
provider: input.provider,
|
||||||
config: input.config,
|
config: input.config,
|
||||||
sessionContext,
|
sessionContext,
|
||||||
claudeObservability,
|
|
||||||
close: async () => {
|
close: async () => {
|
||||||
try {
|
await sessionContext.close();
|
||||||
await sessionContext.close();
|
|
||||||
} finally {
|
|
||||||
await claudeObservability.close();
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ const state = {
|
|||||||
config: null,
|
config: null,
|
||||||
manifests: [],
|
manifests: [],
|
||||||
sessions: [],
|
sessions: [],
|
||||||
sessionMetadata: [],
|
|
||||||
runs: [],
|
runs: [],
|
||||||
selectedSessionId: "",
|
selectedSessionId: "",
|
||||||
selectedManifestPath: "",
|
selectedManifestPath: "",
|
||||||
@@ -26,22 +25,13 @@ const dom = {
|
|||||||
runProvider: document.querySelector("#run-provider"),
|
runProvider: document.querySelector("#run-provider"),
|
||||||
runTopologyHint: document.querySelector("#run-topology-hint"),
|
runTopologyHint: document.querySelector("#run-topology-hint"),
|
||||||
runFlags: document.querySelector("#run-flags"),
|
runFlags: document.querySelector("#run-flags"),
|
||||||
runRuntimeContext: document.querySelector("#run-runtime-context"),
|
|
||||||
runValidationNodes: document.querySelector("#run-validation-nodes"),
|
runValidationNodes: document.querySelector("#run-validation-nodes"),
|
||||||
killRun: document.querySelector("#kill-run"),
|
killRun: document.querySelector("#kill-run"),
|
||||||
runStatus: document.querySelector("#run-status"),
|
runStatus: document.querySelector("#run-status"),
|
||||||
sessionForm: document.querySelector("#session-form"),
|
|
||||||
sessionProjectPath: document.querySelector("#session-project-path"),
|
|
||||||
sessionCreate: document.querySelector("#session-create"),
|
|
||||||
sessionClose: document.querySelector("#session-close"),
|
|
||||||
sessionCloseMerge: document.querySelector("#session-close-merge"),
|
|
||||||
nodeInspector: document.querySelector("#node-inspector"),
|
nodeInspector: document.querySelector("#node-inspector"),
|
||||||
eventsLimit: document.querySelector("#events-limit"),
|
eventsLimit: document.querySelector("#events-limit"),
|
||||||
eventsRefresh: document.querySelector("#events-refresh"),
|
eventsRefresh: document.querySelector("#events-refresh"),
|
||||||
eventFeed: document.querySelector("#event-feed"),
|
eventFeed: document.querySelector("#event-feed"),
|
||||||
claudeEventsLimit: document.querySelector("#claude-events-limit"),
|
|
||||||
claudeEventsRefresh: document.querySelector("#claude-events-refresh"),
|
|
||||||
claudeEventFeed: document.querySelector("#claude-event-feed"),
|
|
||||||
historyRefresh: document.querySelector("#history-refresh"),
|
historyRefresh: document.querySelector("#history-refresh"),
|
||||||
historyBody: document.querySelector("#history-body"),
|
historyBody: document.querySelector("#history-body"),
|
||||||
notificationsForm: document.querySelector("#notifications-form"),
|
notificationsForm: document.querySelector("#notifications-form"),
|
||||||
@@ -87,7 +77,6 @@ const dom = {
|
|||||||
cfgTopologyDepth: document.querySelector("#cfg-topology-depth"),
|
cfgTopologyDepth: document.querySelector("#cfg-topology-depth"),
|
||||||
cfgTopologyRetries: document.querySelector("#cfg-topology-retries"),
|
cfgTopologyRetries: document.querySelector("#cfg-topology-retries"),
|
||||||
cfgRelationshipChildren: document.querySelector("#cfg-relationship-children"),
|
cfgRelationshipChildren: document.querySelector("#cfg-relationship-children"),
|
||||||
cfgMergeConflictAttempts: document.querySelector("#cfg-merge-conflict-attempts"),
|
|
||||||
cfgPortBase: document.querySelector("#cfg-port-base"),
|
cfgPortBase: document.querySelector("#cfg-port-base"),
|
||||||
cfgPortBlockSize: document.querySelector("#cfg-port-block-size"),
|
cfgPortBlockSize: document.querySelector("#cfg-port-block-size"),
|
||||||
cfgPortBlockCount: document.querySelector("#cfg-port-block-count"),
|
cfgPortBlockCount: document.querySelector("#cfg-port-block-count"),
|
||||||
@@ -122,15 +111,10 @@ const MANIFEST_EVENT_TRIGGERS = [
|
|||||||
"requirements_defined",
|
"requirements_defined",
|
||||||
"tasks_planned",
|
"tasks_planned",
|
||||||
"code_committed",
|
"code_committed",
|
||||||
"task_ready_for_review",
|
|
||||||
"task_blocked",
|
"task_blocked",
|
||||||
"validation_passed",
|
"validation_passed",
|
||||||
"validation_failed",
|
"validation_failed",
|
||||||
"branch_merged",
|
"branch_merged",
|
||||||
"merge_conflict_detected",
|
|
||||||
"merge_conflict_resolved",
|
|
||||||
"merge_conflict_unresolved",
|
|
||||||
"merge_retry_started",
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const RUN_MANIFEST_EDITOR_VALUE = "__editor__";
|
const RUN_MANIFEST_EDITOR_VALUE = "__editor__";
|
||||||
@@ -145,12 +129,8 @@ const LABEL_HELP_BY_CONTROL = Object.freeze({
|
|||||||
"run-provider": "Choose which model provider backend handles provider-mode runs.",
|
"run-provider": "Choose which model provider backend handles provider-mode runs.",
|
||||||
"run-topology-hint": "Optional hint that nudges orchestration toward a topology strategy.",
|
"run-topology-hint": "Optional hint that nudges orchestration toward a topology strategy.",
|
||||||
"run-flags": "Optional JSON object passed in as initial run flags.",
|
"run-flags": "Optional JSON object passed in as initial run flags.",
|
||||||
"run-runtime-context": "Optional JSON object of template values injected into persona prompts (for example repo or ticket).",
|
|
||||||
"run-validation-nodes": "Optional comma-separated node IDs to simulate validation outcomes for.",
|
"run-validation-nodes": "Optional comma-separated node IDs to simulate validation outcomes for.",
|
||||||
"session-project-path": "Absolute project path used when creating an explicit managed session.",
|
|
||||||
"session-close-merge": "When enabled, close will merge the session base branch back into the project branch.",
|
|
||||||
"events-limit": "Set how many recent runtime events are loaded per refresh.",
|
"events-limit": "Set how many recent runtime events are loaded per refresh.",
|
||||||
"claude-events-limit": "Set how many Claude SDK trace records are loaded per refresh.",
|
|
||||||
"cfg-webhook-url": "Webhook endpoint that receives runtime event notifications.",
|
"cfg-webhook-url": "Webhook endpoint that receives runtime event notifications.",
|
||||||
"cfg-webhook-severity": "Minimum severity level that triggers webhook notifications.",
|
"cfg-webhook-severity": "Minimum severity level that triggers webhook notifications.",
|
||||||
"cfg-webhook-always": "Event types that should always notify, regardless of severity.",
|
"cfg-webhook-always": "Event types that should always notify, regardless of severity.",
|
||||||
@@ -165,7 +145,6 @@ const LABEL_HELP_BY_CONTROL = Object.freeze({
|
|||||||
"cfg-topology-depth": "Maximum orchestration graph depth permitted by topology rules.",
|
"cfg-topology-depth": "Maximum orchestration graph depth permitted by topology rules.",
|
||||||
"cfg-topology-retries": "Maximum retry expansions allowed by topology orchestration.",
|
"cfg-topology-retries": "Maximum retry expansions allowed by topology orchestration.",
|
||||||
"cfg-relationship-children": "Maximum children each persona relationship can spawn.",
|
"cfg-relationship-children": "Maximum children each persona relationship can spawn.",
|
||||||
"cfg-merge-conflict-attempts": "Maximum merge-conflict resolution attempts before emitting unresolved conflict events.",
|
|
||||||
"cfg-port-base": "Starting port number for provisioning port allocations.",
|
"cfg-port-base": "Starting port number for provisioning port allocations.",
|
||||||
"cfg-port-block-size": "Number of ports reserved per allocated block.",
|
"cfg-port-block-size": "Number of ports reserved per allocated block.",
|
||||||
"cfg-port-block-count": "Number of port blocks available for allocation.",
|
"cfg-port-block-count": "Number of port blocks available for allocation.",
|
||||||
@@ -1050,7 +1029,6 @@ async function loadConfig() {
|
|||||||
dom.cfgTopologyDepth.value = String(limits.topologyMaxDepth);
|
dom.cfgTopologyDepth.value = String(limits.topologyMaxDepth);
|
||||||
dom.cfgTopologyRetries.value = String(limits.topologyMaxRetries);
|
dom.cfgTopologyRetries.value = String(limits.topologyMaxRetries);
|
||||||
dom.cfgRelationshipChildren.value = String(limits.relationshipMaxChildren);
|
dom.cfgRelationshipChildren.value = String(limits.relationshipMaxChildren);
|
||||||
dom.cfgMergeConflictAttempts.value = String(limits.mergeConflictMaxAttempts);
|
|
||||||
dom.cfgPortBase.value = String(limits.portBase);
|
dom.cfgPortBase.value = String(limits.portBase);
|
||||||
dom.cfgPortBlockSize.value = String(limits.portBlockSize);
|
dom.cfgPortBlockSize.value = String(limits.portBlockSize);
|
||||||
dom.cfgPortBlockCount.value = String(limits.portBlockCount);
|
dom.cfgPortBlockCount.value = String(limits.portBlockCount);
|
||||||
@@ -1082,28 +1060,11 @@ function statusChipClass(status) {
|
|||||||
return `status-chip status-${status || "unknown"}`;
|
return `status-chip status-${status || "unknown"}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSessionLifecycleStatus(sessionId) {
|
|
||||||
const metadata = state.sessionMetadata.find((entry) => entry?.sessionId === sessionId);
|
|
||||||
if (!metadata) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const status = metadata.sessionStatus;
|
|
||||||
if (status === "active" || status === "suspended" || status === "closed" || status === "closed_with_conflicts") {
|
|
||||||
return status;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderRunsAndSessionsTable() {
|
function renderRunsAndSessionsTable() {
|
||||||
const rows = [];
|
const rows = [];
|
||||||
|
|
||||||
for (const session of state.sessions) {
|
for (const session of state.sessions) {
|
||||||
const lifecycleStatus = getSessionLifecycleStatus(session.sessionId);
|
const sessionStatus = session.status || "unknown";
|
||||||
const sessionStatus =
|
|
||||||
lifecycleStatus === "closed" || lifecycleStatus === "closed_with_conflicts"
|
|
||||||
? lifecycleStatus
|
|
||||||
: session.status || lifecycleStatus || "unknown";
|
|
||||||
rows.push(`
|
rows.push(`
|
||||||
<tr data-session-id="${escapeHtml(session.sessionId)}">
|
<tr data-session-id="${escapeHtml(session.sessionId)}">
|
||||||
<td>${escapeHtml(session.sessionId)}</td>
|
<td>${escapeHtml(session.sessionId)}</td>
|
||||||
@@ -1131,7 +1092,6 @@ function renderRunsAndSessionsTable() {
|
|||||||
async function loadSessions() {
|
async function loadSessions() {
|
||||||
const payload = await apiRequest("/api/sessions");
|
const payload = await apiRequest("/api/sessions");
|
||||||
state.sessions = payload.sessions || [];
|
state.sessions = payload.sessions || [];
|
||||||
state.sessionMetadata = payload.sessionMetadata || [];
|
|
||||||
state.runs = payload.runs || [];
|
state.runs = payload.runs || [];
|
||||||
|
|
||||||
if (!state.selectedSessionId && state.sessions.length > 0) {
|
if (!state.selectedSessionId && state.sessions.length > 0) {
|
||||||
@@ -1497,43 +1457,6 @@ function renderEventFeed(events) {
|
|||||||
dom.eventFeed.innerHTML = rows || '<div class="event-row"><div class="event-time">-</div><div class="event-type">-</div><div>No runtime events.</div></div>';
|
dom.eventFeed.innerHTML = rows || '<div class="event-row"><div class="event-time">-</div><div class="event-type">-</div><div>No runtime events.</div></div>';
|
||||||
}
|
}
|
||||||
|
|
||||||
function toClaudeRowSeverity(event) {
|
|
||||||
const stage = String(event?.stage || "");
|
|
||||||
const type = String(event?.sdkMessageType || "");
|
|
||||||
if (stage === "query.error") {
|
|
||||||
return "critical";
|
|
||||||
}
|
|
||||||
if (stage === "query.stderr" || (type === "result" && String(event?.sdkMessageSubtype || "").startsWith("error_"))) {
|
|
||||||
return "warning";
|
|
||||||
}
|
|
||||||
return "info";
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderClaudeTraceFeed(events) {
|
|
||||||
const rows = [...events]
|
|
||||||
.reverse()
|
|
||||||
.map((event) => {
|
|
||||||
const ts = new Date(event.timestamp).toLocaleTimeString();
|
|
||||||
const stage = String(event.stage || "query.message");
|
|
||||||
const sdkMessageType = String(event.sdkMessageType || "");
|
|
||||||
const sdkMessageSubtype = String(event.sdkMessageSubtype || "");
|
|
||||||
const typeLabel = sdkMessageType
|
|
||||||
? `${stage}/${sdkMessageType}${sdkMessageSubtype ? `:${sdkMessageSubtype}` : ""}`
|
|
||||||
: stage;
|
|
||||||
const message = typeof event.message === "string" ? event.message : JSON.stringify(event.message || "");
|
|
||||||
return `
|
|
||||||
<div class="event-row ${escapeHtml(toClaudeRowSeverity(event))}">
|
|
||||||
<div class="event-time">${escapeHtml(ts)}</div>
|
|
||||||
<div class="event-type">${escapeHtml(typeLabel)}</div>
|
|
||||||
<div>${escapeHtml(message)}</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
})
|
|
||||||
.join("");
|
|
||||||
|
|
||||||
dom.claudeEventFeed.innerHTML = rows || '<div class="event-row"><div class="event-time">-</div><div class="event-type">-</div><div>No Claude trace events.</div></div>';
|
|
||||||
}
|
|
||||||
|
|
||||||
async function refreshEvents() {
|
async function refreshEvents() {
|
||||||
const limit = Number(dom.eventsLimit.value || "150");
|
const limit = Number(dom.eventsLimit.value || "150");
|
||||||
const params = new URLSearchParams({
|
const params = new URLSearchParams({
|
||||||
@@ -1548,20 +1471,6 @@ async function refreshEvents() {
|
|||||||
renderEventFeed(payload.events || []);
|
renderEventFeed(payload.events || []);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function refreshClaudeTrace() {
|
|
||||||
const limit = Number(dom.claudeEventsLimit.value || "150");
|
|
||||||
const params = new URLSearchParams({
|
|
||||||
limit: String(limit),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (state.selectedSessionId) {
|
|
||||||
params.set("sessionId", state.selectedSessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = await apiRequest(`/api/claude-trace?${params.toString()}`);
|
|
||||||
renderClaudeTraceFeed(payload.events || []);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function startRun(event) {
|
async function startRun(event) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|
||||||
@@ -1577,12 +1486,6 @@ async function startRun(event) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const runtimeContext = parseJsonSafe(dom.runRuntimeContext.value, {});
|
|
||||||
if (typeof runtimeContext !== "object" || Array.isArray(runtimeContext) || !runtimeContext) {
|
|
||||||
showRunStatus("Runtime Context Overrides must be a JSON object.", true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const manifestSelection = dom.runManifestSelect.value.trim();
|
const manifestSelection = dom.runManifestSelect.value.trim();
|
||||||
|
|
||||||
const payload = {
|
const payload = {
|
||||||
@@ -1591,21 +1494,9 @@ async function startRun(event) {
|
|||||||
provider: dom.runProvider.value,
|
provider: dom.runProvider.value,
|
||||||
topologyHint: dom.runTopologyHint.value.trim() || undefined,
|
topologyHint: dom.runTopologyHint.value.trim() || undefined,
|
||||||
initialFlags: flags,
|
initialFlags: flags,
|
||||||
runtimeContextOverrides: runtimeContext,
|
|
||||||
simulateValidationNodeIds: fromCsv(dom.runValidationNodes.value),
|
simulateValidationNodeIds: fromCsv(dom.runValidationNodes.value),
|
||||||
};
|
};
|
||||||
|
|
||||||
const selectedSessionMetadata = state.sessionMetadata.find(
|
|
||||||
(entry) => entry?.sessionId === state.selectedSessionId,
|
|
||||||
);
|
|
||||||
if (
|
|
||||||
selectedSessionMetadata &&
|
|
||||||
(selectedSessionMetadata.sessionStatus === "active" ||
|
|
||||||
selectedSessionMetadata.sessionStatus === "suspended")
|
|
||||||
) {
|
|
||||||
payload.sessionId = selectedSessionMetadata.sessionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (manifestSelection === RUN_MANIFEST_EDITOR_VALUE) {
|
if (manifestSelection === RUN_MANIFEST_EDITOR_VALUE) {
|
||||||
const manifestFromEditor = parseJsonSafe(dom.manifestEditor.value, null);
|
const manifestFromEditor = parseJsonSafe(dom.manifestEditor.value, null);
|
||||||
if (!manifestFromEditor) {
|
if (!manifestFromEditor) {
|
||||||
@@ -1636,7 +1527,6 @@ async function startRun(event) {
|
|||||||
dom.sessionSelect.value = run.sessionId;
|
dom.sessionSelect.value = run.sessionId;
|
||||||
await refreshGraph();
|
await refreshGraph();
|
||||||
await refreshEvents();
|
await refreshEvents();
|
||||||
await refreshClaudeTrace();
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||||
}
|
}
|
||||||
@@ -1657,67 +1547,6 @@ async function cancelActiveRun() {
|
|||||||
await loadSessions();
|
await loadSessions();
|
||||||
await refreshGraph();
|
await refreshGraph();
|
||||||
await refreshEvents();
|
await refreshEvents();
|
||||||
await refreshClaudeTrace();
|
|
||||||
} catch (error) {
|
|
||||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createSessionFromUi() {
|
|
||||||
const projectPath = dom.sessionProjectPath.value.trim();
|
|
||||||
if (!projectPath) {
|
|
||||||
showRunStatus("Project path is required to create a session.", true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const payload = await apiRequest("/api/sessions", {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify({
|
|
||||||
projectPath,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
const created = payload.session;
|
|
||||||
if (created?.sessionId) {
|
|
||||||
state.selectedSessionId = created.sessionId;
|
|
||||||
showRunStatus(`Session ${created.sessionId} created.`);
|
|
||||||
} else {
|
|
||||||
showRunStatus("Session created.");
|
|
||||||
}
|
|
||||||
await loadSessions();
|
|
||||||
if (state.selectedSessionId) {
|
|
||||||
dom.sessionSelect.value = state.selectedSessionId;
|
|
||||||
await refreshGraph();
|
|
||||||
await refreshEvents();
|
|
||||||
await refreshClaudeTrace();
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function closeSelectedSessionFromUi() {
|
|
||||||
const sessionId = state.selectedSessionId || dom.sessionSelect.value;
|
|
||||||
if (!sessionId) {
|
|
||||||
showRunStatus("Select a session before closing.", true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const payload = await apiRequest(`/api/sessions/${encodeURIComponent(sessionId)}/close`, {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify({
|
|
||||||
mergeToProject: dom.sessionCloseMerge.checked,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
const nextStatus = payload?.session?.sessionStatus || "closed";
|
|
||||||
showRunStatus(`Session ${sessionId} closed with status ${nextStatus}.`);
|
|
||||||
await loadSessions();
|
|
||||||
await refreshGraph();
|
|
||||||
await refreshEvents();
|
|
||||||
await refreshClaudeTrace();
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||||
}
|
}
|
||||||
@@ -1768,7 +1597,6 @@ async function saveLimits(event) {
|
|||||||
topologyMaxDepth: Number(dom.cfgTopologyDepth.value),
|
topologyMaxDepth: Number(dom.cfgTopologyDepth.value),
|
||||||
topologyMaxRetries: Number(dom.cfgTopologyRetries.value),
|
topologyMaxRetries: Number(dom.cfgTopologyRetries.value),
|
||||||
relationshipMaxChildren: Number(dom.cfgRelationshipChildren.value),
|
relationshipMaxChildren: Number(dom.cfgRelationshipChildren.value),
|
||||||
mergeConflictMaxAttempts: Number(dom.cfgMergeConflictAttempts.value),
|
|
||||||
portBase: Number(dom.cfgPortBase.value),
|
portBase: Number(dom.cfgPortBase.value),
|
||||||
portBlockSize: Number(dom.cfgPortBlockSize.value),
|
portBlockSize: Number(dom.cfgPortBlockSize.value),
|
||||||
portBlockCount: Number(dom.cfgPortBlockCount.value),
|
portBlockCount: Number(dom.cfgPortBlockCount.value),
|
||||||
@@ -1867,7 +1695,6 @@ function bindUiEvents() {
|
|||||||
state.selectedSessionId = dom.sessionSelect.value;
|
state.selectedSessionId = dom.sessionSelect.value;
|
||||||
await refreshGraph();
|
await refreshGraph();
|
||||||
await refreshEvents();
|
await refreshEvents();
|
||||||
await refreshClaudeTrace();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
dom.graphManifestSelect.addEventListener("change", async () => {
|
dom.graphManifestSelect.addEventListener("change", async () => {
|
||||||
@@ -1887,26 +1714,15 @@ function bindUiEvents() {
|
|||||||
await refreshEvents();
|
await refreshEvents();
|
||||||
});
|
});
|
||||||
|
|
||||||
dom.claudeEventsRefresh.addEventListener("click", async () => {
|
|
||||||
await refreshClaudeTrace();
|
|
||||||
});
|
|
||||||
|
|
||||||
dom.historyRefresh.addEventListener("click", async () => {
|
dom.historyRefresh.addEventListener("click", async () => {
|
||||||
await loadSessions();
|
await loadSessions();
|
||||||
await refreshGraph();
|
await refreshGraph();
|
||||||
await refreshClaudeTrace();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
dom.runForm.addEventListener("submit", startRun);
|
dom.runForm.addEventListener("submit", startRun);
|
||||||
dom.killRun.addEventListener("click", () => {
|
dom.killRun.addEventListener("click", () => {
|
||||||
void cancelActiveRun();
|
void cancelActiveRun();
|
||||||
});
|
});
|
||||||
dom.sessionCreate.addEventListener("click", () => {
|
|
||||||
void createSessionFromUi();
|
|
||||||
});
|
|
||||||
dom.sessionClose.addEventListener("click", () => {
|
|
||||||
void closeSelectedSessionFromUi();
|
|
||||||
});
|
|
||||||
|
|
||||||
dom.notificationsForm.addEventListener("submit", (event) => {
|
dom.notificationsForm.addEventListener("submit", (event) => {
|
||||||
void saveNotifications(event);
|
void saveNotifications(event);
|
||||||
@@ -2014,7 +1830,6 @@ async function refreshAll() {
|
|||||||
|
|
||||||
await refreshGraph();
|
await refreshGraph();
|
||||||
await refreshEvents();
|
await refreshEvents();
|
||||||
await refreshClaudeTrace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function initialize() {
|
async function initialize() {
|
||||||
@@ -2045,10 +1860,6 @@ async function initialize() {
|
|||||||
void refreshEvents();
|
void refreshEvents();
|
||||||
}, 3000);
|
}, 3000);
|
||||||
|
|
||||||
setInterval(() => {
|
|
||||||
void refreshClaudeTrace();
|
|
||||||
}, 3000);
|
|
||||||
|
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
void refreshGraph();
|
void refreshGraph();
|
||||||
}, 7000);
|
}, 7000);
|
||||||
|
|||||||
@@ -75,10 +75,6 @@
|
|||||||
Initial Flags (JSON)
|
Initial Flags (JSON)
|
||||||
<textarea id="run-flags" rows="3" placeholder='{"needs_bootstrap": true}'></textarea>
|
<textarea id="run-flags" rows="3" placeholder='{"needs_bootstrap": true}'></textarea>
|
||||||
</label>
|
</label>
|
||||||
<label>
|
|
||||||
Runtime Context Overrides (JSON)
|
|
||||||
<textarea id="run-runtime-context" rows="3" placeholder='{"repo":"ai_ops","ticket":"AIOPS-123"}'></textarea>
|
|
||||||
</label>
|
|
||||||
<label>
|
<label>
|
||||||
Simulate Validation Nodes (CSV)
|
Simulate Validation Nodes (CSV)
|
||||||
<input id="run-validation-nodes" type="text" placeholder="coder-1,qa-1" />
|
<input id="run-validation-nodes" type="text" placeholder="coder-1,qa-1" />
|
||||||
@@ -90,23 +86,6 @@
|
|||||||
</form>
|
</form>
|
||||||
<div id="run-status" class="subtle"></div>
|
<div id="run-status" class="subtle"></div>
|
||||||
|
|
||||||
<div class="divider"></div>
|
|
||||||
<h3>Session Controls</h3>
|
|
||||||
<form id="session-form" class="stacked-form">
|
|
||||||
<label>
|
|
||||||
Project Path (absolute)
|
|
||||||
<input id="session-project-path" type="text" placeholder="/abs/path/to/project" />
|
|
||||||
</label>
|
|
||||||
<label class="inline-checkbox">
|
|
||||||
<input id="session-close-merge" type="checkbox" />
|
|
||||||
Merge base into project when closing selected session
|
|
||||||
</label>
|
|
||||||
<div class="inline-actions">
|
|
||||||
<button id="session-create" type="button">Create Session</button>
|
|
||||||
<button id="session-close" type="button">Close Selected Session</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<div class="divider"></div>
|
<div class="divider"></div>
|
||||||
<h3>Node Inspector</h3>
|
<h3>Node Inspector</h3>
|
||||||
<div id="node-inspector" class="inspector empty">Select a graph node.</div>
|
<div id="node-inspector" class="inspector empty">Select a graph node.</div>
|
||||||
@@ -130,24 +109,6 @@
|
|||||||
<div id="event-feed" class="event-feed"></div>
|
<div id="event-feed" class="event-feed"></div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section class="panel claude-panel">
|
|
||||||
<div class="panel-head">
|
|
||||||
<h2>Claude Trace</h2>
|
|
||||||
<div class="panel-actions">
|
|
||||||
<label>
|
|
||||||
Limit
|
|
||||||
<select id="claude-events-limit">
|
|
||||||
<option value="80">80</option>
|
|
||||||
<option value="150" selected>150</option>
|
|
||||||
<option value="300">300</option>
|
|
||||||
</select>
|
|
||||||
</label>
|
|
||||||
<button id="claude-events-refresh" type="button">Refresh</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="claude-event-feed" class="event-feed claude-event-feed"></div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="panel history-panel">
|
<section class="panel history-panel">
|
||||||
<div class="panel-head">
|
<div class="panel-head">
|
||||||
<h2>Run History</h2>
|
<h2>Run History</h2>
|
||||||
@@ -231,7 +192,6 @@
|
|||||||
<label>AGENT_TOPOLOGY_MAX_DEPTH<input id="cfg-topology-depth" type="number" min="1" /></label>
|
<label>AGENT_TOPOLOGY_MAX_DEPTH<input id="cfg-topology-depth" type="number" min="1" /></label>
|
||||||
<label>AGENT_TOPOLOGY_MAX_RETRIES<input id="cfg-topology-retries" type="number" min="0" /></label>
|
<label>AGENT_TOPOLOGY_MAX_RETRIES<input id="cfg-topology-retries" type="number" min="0" /></label>
|
||||||
<label>AGENT_RELATIONSHIP_MAX_CHILDREN<input id="cfg-relationship-children" type="number" min="1" /></label>
|
<label>AGENT_RELATIONSHIP_MAX_CHILDREN<input id="cfg-relationship-children" type="number" min="1" /></label>
|
||||||
<label>AGENT_MERGE_CONFLICT_MAX_ATTEMPTS<input id="cfg-merge-conflict-attempts" type="number" min="1" /></label>
|
|
||||||
<label>AGENT_PORT_BASE<input id="cfg-port-base" type="number" min="1" /></label>
|
<label>AGENT_PORT_BASE<input id="cfg-port-base" type="number" min="1" /></label>
|
||||||
<label>AGENT_PORT_BLOCK_SIZE<input id="cfg-port-block-size" type="number" min="1" /></label>
|
<label>AGENT_PORT_BLOCK_SIZE<input id="cfg-port-block-size" type="number" min="1" /></label>
|
||||||
<label>AGENT_PORT_BLOCK_COUNT<input id="cfg-port-block-count" type="number" min="1" /></label>
|
<label>AGENT_PORT_BLOCK_COUNT<input id="cfg-port-block-count" type="number" min="1" /></label>
|
||||||
|
|||||||
@@ -79,8 +79,7 @@ p {
|
|||||||
grid-template-columns: minmax(0, 2fr) minmax(280px, 1fr);
|
grid-template-columns: minmax(0, 2fr) minmax(280px, 1fr);
|
||||||
grid-template-areas:
|
grid-template-areas:
|
||||||
"graph side"
|
"graph side"
|
||||||
"feed claude"
|
"feed history"
|
||||||
"history history"
|
|
||||||
"config config";
|
"config config";
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -130,10 +129,6 @@ p {
|
|||||||
grid-area: history;
|
grid-area: history;
|
||||||
}
|
}
|
||||||
|
|
||||||
.claude-panel {
|
|
||||||
grid-area: claude;
|
|
||||||
}
|
|
||||||
|
|
||||||
.config-panel {
|
.config-panel {
|
||||||
grid-area: config;
|
grid-area: config;
|
||||||
}
|
}
|
||||||
@@ -147,12 +142,6 @@ label {
|
|||||||
letter-spacing: 0.015em;
|
letter-spacing: 0.015em;
|
||||||
}
|
}
|
||||||
|
|
||||||
label.inline-checkbox {
|
|
||||||
flex-direction: row;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.45rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
input,
|
input,
|
||||||
select,
|
select,
|
||||||
textarea,
|
textarea,
|
||||||
@@ -319,14 +308,6 @@ button.danger {
|
|||||||
color: var(--critical);
|
color: var(--critical);
|
||||||
}
|
}
|
||||||
|
|
||||||
.claude-event-feed .event-row {
|
|
||||||
grid-template-columns: 110px 150px 1fr;
|
|
||||||
}
|
|
||||||
|
|
||||||
.claude-event-feed .event-type {
|
|
||||||
font-size: 0.7rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.history-table {
|
.history-table {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
@@ -372,22 +353,6 @@ button.danger {
|
|||||||
border-color: rgba(255, 201, 74, 0.6);
|
border-color: rgba(255, 201, 74, 0.6);
|
||||||
}
|
}
|
||||||
|
|
||||||
.status-active {
|
|
||||||
color: var(--accent-cool);
|
|
||||||
border-color: rgba(86, 195, 255, 0.6);
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-suspended,
|
|
||||||
.status-closed_with_conflicts {
|
|
||||||
color: var(--warn);
|
|
||||||
border-color: rgba(255, 201, 74, 0.6);
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-closed {
|
|
||||||
color: var(--muted);
|
|
||||||
border-color: rgba(155, 184, 207, 0.45);
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-unknown {
|
.status-unknown {
|
||||||
color: var(--muted);
|
color: var(--muted);
|
||||||
border-color: rgba(155, 184, 207, 0.45);
|
border-color: rgba(155, 184, 207, 0.45);
|
||||||
@@ -498,7 +463,6 @@ button.danger {
|
|||||||
"graph"
|
"graph"
|
||||||
"side"
|
"side"
|
||||||
"feed"
|
"feed"
|
||||||
"claude"
|
|
||||||
"history"
|
"history"
|
||||||
"config";
|
"config";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,17 +3,11 @@ import { mkdir, readFile, writeFile } from "node:fs/promises";
|
|||||||
import { resolve } from "node:path";
|
import { resolve } from "node:path";
|
||||||
import { SchemaDrivenExecutionEngine } from "../agents/orchestration.js";
|
import { SchemaDrivenExecutionEngine } from "../agents/orchestration.js";
|
||||||
import { parseAgentManifest, type AgentManifest } from "../agents/manifest.js";
|
import { parseAgentManifest, type AgentManifest } from "../agents/manifest.js";
|
||||||
import { FileSystemProjectContextStore } from "../agents/project-context.js";
|
|
||||||
import type {
|
import type {
|
||||||
ActorExecutionResult,
|
ActorExecutionResult,
|
||||||
ActorExecutor,
|
ActorExecutor,
|
||||||
PipelineAggregateStatus,
|
PipelineAggregateStatus,
|
||||||
} from "../agents/pipeline.js";
|
} from "../agents/pipeline.js";
|
||||||
import {
|
|
||||||
FileSystemSessionMetadataStore,
|
|
||||||
SessionWorktreeManager,
|
|
||||||
type SessionMetadata,
|
|
||||||
} from "../agents/session-lifecycle.js";
|
|
||||||
import { loadConfig, type AppConfig } from "../config.js";
|
import { loadConfig, type AppConfig } from "../config.js";
|
||||||
import { parseEnvFile } from "./env-store.js";
|
import { parseEnvFile } from "./env-store.js";
|
||||||
import {
|
import {
|
||||||
@@ -246,19 +240,6 @@ async function loadRuntimeConfig(envPath: string): Promise<Readonly<AppConfig>>
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveRuntimePaths(input: {
|
|
||||||
workspaceRoot: string;
|
|
||||||
config: Readonly<AppConfig>;
|
|
||||||
}): {
|
|
||||||
stateRoot: string;
|
|
||||||
worktreeRoot: string;
|
|
||||||
} {
|
|
||||||
return {
|
|
||||||
stateRoot: resolve(input.workspaceRoot, input.config.orchestration.stateRoot),
|
|
||||||
worktreeRoot: resolve(input.workspaceRoot, input.config.provisioning.gitWorktree.rootDirectory),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeRunMeta(input: {
|
async function writeRunMeta(input: {
|
||||||
stateRoot: string;
|
stateRoot: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
@@ -338,102 +319,6 @@ export class UiRunService {
|
|||||||
this.envFilePath = resolve(this.workspaceRoot, input.envFilePath ?? ".env");
|
this.envFilePath = resolve(this.workspaceRoot, input.envFilePath ?? ".env");
|
||||||
}
|
}
|
||||||
|
|
||||||
private async loadRuntime(): Promise<{
|
|
||||||
config: Readonly<AppConfig>;
|
|
||||||
stateRoot: string;
|
|
||||||
sessionStore: FileSystemSessionMetadataStore;
|
|
||||||
worktreeManager: SessionWorktreeManager;
|
|
||||||
}> {
|
|
||||||
const config = await loadRuntimeConfig(this.envFilePath);
|
|
||||||
const paths = resolveRuntimePaths({
|
|
||||||
workspaceRoot: this.workspaceRoot,
|
|
||||||
config,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
config,
|
|
||||||
stateRoot: paths.stateRoot,
|
|
||||||
sessionStore: new FileSystemSessionMetadataStore({
|
|
||||||
stateRoot: paths.stateRoot,
|
|
||||||
}),
|
|
||||||
worktreeManager: new SessionWorktreeManager({
|
|
||||||
worktreeRoot: paths.worktreeRoot,
|
|
||||||
baseRef: config.provisioning.gitWorktree.baseRef,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async createSession(input: {
|
|
||||||
projectPath: string;
|
|
||||||
sessionId?: string;
|
|
||||||
}): Promise<SessionMetadata> {
|
|
||||||
const runtime = await this.loadRuntime();
|
|
||||||
const sessionId = input.sessionId?.trim() || toSessionId();
|
|
||||||
const baseWorkspacePath = runtime.worktreeManager.resolveBaseWorkspacePath(sessionId);
|
|
||||||
const session = await runtime.sessionStore.createSession({
|
|
||||||
sessionId,
|
|
||||||
projectPath: resolve(input.projectPath),
|
|
||||||
baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
await runtime.worktreeManager.initializeSessionBaseWorkspace({
|
|
||||||
sessionId: session.sessionId,
|
|
||||||
projectPath: session.projectPath,
|
|
||||||
baseWorkspacePath: session.baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
return session;
|
|
||||||
}
|
|
||||||
|
|
||||||
async listSessions(): Promise<SessionMetadata[]> {
|
|
||||||
const runtime = await this.loadRuntime();
|
|
||||||
return runtime.sessionStore.listSessions();
|
|
||||||
}
|
|
||||||
|
|
||||||
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
|
|
||||||
const runtime = await this.loadRuntime();
|
|
||||||
return runtime.sessionStore.readSession(sessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async closeSession(input: {
|
|
||||||
sessionId: string;
|
|
||||||
mergeToProject?: boolean;
|
|
||||||
}): Promise<SessionMetadata> {
|
|
||||||
const runtime = await this.loadRuntime();
|
|
||||||
const session = await runtime.sessionStore.readSession(input.sessionId);
|
|
||||||
if (!session) {
|
|
||||||
throw new Error(`Session \"${input.sessionId}\" does not exist.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const sessionProjectContextStore = new FileSystemProjectContextStore({
|
|
||||||
filePath: runtime.sessionStore.getSessionProjectContextPath(session.sessionId),
|
|
||||||
});
|
|
||||||
const projectContext = await sessionProjectContextStore.readState();
|
|
||||||
const taskWorktreePaths = projectContext.taskQueue
|
|
||||||
.map((task) => task.worktreePath)
|
|
||||||
.filter((path): path is string => typeof path === "string" && path.trim().length > 0);
|
|
||||||
|
|
||||||
const outcome = await runtime.worktreeManager.closeSession({
|
|
||||||
session,
|
|
||||||
taskWorktreePaths,
|
|
||||||
mergeBaseIntoProject: input.mergeToProject === true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (outcome.kind === "fatal_error") {
|
|
||||||
throw new Error(`Session close failed: ${outcome.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (outcome.kind === "conflict") {
|
|
||||||
return runtime.sessionStore.updateSession(session.sessionId, {
|
|
||||||
sessionStatus: "closed_with_conflicts",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return runtime.sessionStore.updateSession(session.sessionId, {
|
|
||||||
sessionStatus: "closed",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
listRuns(): RunRecord[] {
|
listRuns(): RunRecord[] {
|
||||||
const output = [...this.runHistory.values()].sort((left, right) => {
|
const output = [...this.runHistory.values()].sort((left, right) => {
|
||||||
return right.startedAt.localeCompare(left.startedAt);
|
return right.startedAt.localeCompare(left.startedAt);
|
||||||
@@ -446,24 +331,11 @@ export class UiRunService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async startRun(input: StartRunInput): Promise<RunRecord> {
|
async startRun(input: StartRunInput): Promise<RunRecord> {
|
||||||
const runtime = await this.loadRuntime();
|
const config = await loadRuntimeConfig(this.envFilePath);
|
||||||
const config = runtime.config;
|
|
||||||
const manifest = parseAgentManifest(input.manifest);
|
const manifest = parseAgentManifest(input.manifest);
|
||||||
const executionMode = input.executionMode ?? "mock";
|
const executionMode = input.executionMode ?? "mock";
|
||||||
const provider = input.provider ?? "codex";
|
const provider = input.provider ?? "codex";
|
||||||
const sessionId = input.sessionId?.trim() || toSessionId();
|
const sessionId = input.sessionId?.trim() || toSessionId();
|
||||||
const session = input.sessionId?.trim()
|
|
||||||
? await runtime.sessionStore.readSession(sessionId)
|
|
||||||
: undefined;
|
|
||||||
if (input.sessionId?.trim() && !session) {
|
|
||||||
throw new Error(`Session \"${sessionId}\" does not exist.`);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
session &&
|
|
||||||
(session.sessionStatus === "closed" || session.sessionStatus === "closed_with_conflicts")
|
|
||||||
) {
|
|
||||||
throw new Error(`Session \"${sessionId}\" is closed and cannot run new tasks.`);
|
|
||||||
}
|
|
||||||
const runId = randomUUID();
|
const runId = randomUUID();
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
|
|
||||||
@@ -487,8 +359,6 @@ export class UiRunService {
|
|||||||
provider,
|
provider,
|
||||||
initialPrompt: input.prompt,
|
initialPrompt: input.prompt,
|
||||||
config,
|
config,
|
||||||
projectPath: session?.baseWorkspacePath ?? this.workspaceRoot,
|
|
||||||
observabilityRootPath: this.workspaceRoot,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -506,20 +376,11 @@ export class UiRunService {
|
|||||||
actorExecutors,
|
actorExecutors,
|
||||||
settings: {
|
settings: {
|
||||||
workspaceRoot: this.workspaceRoot,
|
workspaceRoot: this.workspaceRoot,
|
||||||
stateRoot: runtime.stateRoot,
|
stateRoot: config.orchestration.stateRoot,
|
||||||
projectContextPath: session
|
projectContextPath: config.orchestration.projectContextPath,
|
||||||
? runtime.sessionStore.getSessionProjectContextPath(sessionId)
|
|
||||||
: resolve(this.workspaceRoot, config.orchestration.projectContextPath),
|
|
||||||
runtimeContext: {
|
runtimeContext: {
|
||||||
ui_mode: executionMode,
|
ui_mode: executionMode,
|
||||||
run_provider: provider,
|
run_provider: provider,
|
||||||
...(session
|
|
||||||
? {
|
|
||||||
session_id: sessionId,
|
|
||||||
project_path: session.projectPath,
|
|
||||||
base_workspace_path: session.baseWorkspacePath,
|
|
||||||
}
|
|
||||||
: {}),
|
|
||||||
...(input.runtimeContextOverrides ?? {}),
|
...(input.runtimeContextOverrides ?? {}),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -527,7 +388,7 @@ export class UiRunService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
await writeRunMeta({
|
await writeRunMeta({
|
||||||
stateRoot: runtime.stateRoot,
|
stateRoot: config.orchestration.stateRoot,
|
||||||
sessionId,
|
sessionId,
|
||||||
run: record,
|
run: record,
|
||||||
});
|
});
|
||||||
@@ -543,7 +404,6 @@ export class UiRunService {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
signal: controller.signal,
|
signal: controller.signal,
|
||||||
...(session ? { sessionMetadata: session } : {}),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const completedRecord = this.runHistory.get(runId);
|
const completedRecord = this.runHistory.get(runId);
|
||||||
@@ -559,7 +419,7 @@ export class UiRunService {
|
|||||||
this.runHistory.set(runId, next);
|
this.runHistory.set(runId, next);
|
||||||
|
|
||||||
await writeRunMeta({
|
await writeRunMeta({
|
||||||
stateRoot: runtime.stateRoot,
|
stateRoot: config.orchestration.stateRoot,
|
||||||
sessionId,
|
sessionId,
|
||||||
run: next,
|
run: next,
|
||||||
});
|
});
|
||||||
@@ -579,7 +439,7 @@ export class UiRunService {
|
|||||||
this.runHistory.set(runId, next);
|
this.runHistory.set(runId, next);
|
||||||
|
|
||||||
await writeRunMeta({
|
await writeRunMeta({
|
||||||
stateRoot: runtime.stateRoot,
|
stateRoot: config.orchestration.stateRoot,
|
||||||
sessionId,
|
sessionId,
|
||||||
run: next,
|
run: next,
|
||||||
});
|
});
|
||||||
|
|||||||
167
src/ui/server.ts
167
src/ui/server.ts
@@ -6,7 +6,6 @@ import { buildSessionGraphInsight, buildSessionSummaries } from "./session-insig
|
|||||||
import { UiConfigStore, type LimitSettings, type RuntimeNotificationSettings, type SecurityPolicySettings } from "./config-store.js";
|
import { UiConfigStore, type LimitSettings, type RuntimeNotificationSettings, type SecurityPolicySettings } from "./config-store.js";
|
||||||
import { ManifestStore } from "./manifest-store.js";
|
import { ManifestStore } from "./manifest-store.js";
|
||||||
import { filterRuntimeEvents, readRuntimeEvents } from "./runtime-events-store.js";
|
import { filterRuntimeEvents, readRuntimeEvents } from "./runtime-events-store.js";
|
||||||
import { filterClaudeTraceEvents, readClaudeTraceEvents } from "./claude-trace-store.js";
|
|
||||||
import { parseJsonBody, sendJson, methodNotAllowed, notFound, serveStaticFile } from "./http-utils.js";
|
import { parseJsonBody, sendJson, methodNotAllowed, notFound, serveStaticFile } from "./http-utils.js";
|
||||||
import { readRunMetaBySession, UiRunService, type RunExecutionMode } from "./run-service.js";
|
import { readRunMetaBySession, UiRunService, type RunExecutionMode } from "./run-service.js";
|
||||||
import type { RunProvider } from "./provider-executor.js";
|
import type { RunProvider } from "./provider-executor.js";
|
||||||
@@ -24,14 +23,6 @@ type StartRunRequest = {
|
|||||||
provider?: RunProvider;
|
provider?: RunProvider;
|
||||||
};
|
};
|
||||||
|
|
||||||
type CreateSessionRequest = {
|
|
||||||
projectPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type CloseSessionRequest = {
|
|
||||||
mergeToProject?: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
function parsePort(value: string | undefined): number {
|
function parsePort(value: string | undefined): number {
|
||||||
const parsed = Number(value ?? "4317");
|
const parsed = Number(value ?? "4317");
|
||||||
if (!Number.isInteger(parsed) || parsed < 1 || parsed > 65535) {
|
if (!Number.isInteger(parsed) || parsed < 1 || parsed > 65535) {
|
||||||
@@ -111,23 +102,14 @@ function ensureProvider(value: unknown): RunProvider {
|
|||||||
return value === "claude" ? "claude" : "codex";
|
return value === "claude" ? "claude" : "codex";
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureNonEmptyString(value: unknown, field: string): string {
|
|
||||||
if (typeof value !== "string" || value.trim().length === 0) {
|
|
||||||
throw new Error(`Field "${field}" is required.`);
|
|
||||||
}
|
|
||||||
return value.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readRuntimePaths(configStore: UiConfigStore, workspaceRoot: string): Promise<{
|
async function readRuntimePaths(configStore: UiConfigStore, workspaceRoot: string): Promise<{
|
||||||
stateRoot: string;
|
stateRoot: string;
|
||||||
runtimeEventLogPath: string;
|
runtimeEventLogPath: string;
|
||||||
claudeTraceLogPath: string;
|
|
||||||
}> {
|
}> {
|
||||||
const snapshot = await configStore.readSnapshot();
|
const snapshot = await configStore.readSnapshot();
|
||||||
return {
|
return {
|
||||||
stateRoot: resolve(workspaceRoot, snapshot.paths.stateRoot),
|
stateRoot: resolve(workspaceRoot, snapshot.paths.stateRoot),
|
||||||
runtimeEventLogPath: resolve(workspaceRoot, snapshot.paths.runtimeEventLogPath),
|
runtimeEventLogPath: resolve(workspaceRoot, snapshot.paths.runtimeEventLogPath),
|
||||||
claudeTraceLogPath: resolve(workspaceRoot, snapshot.paths.claudeTraceLogPath),
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -316,42 +298,7 @@ async function handleApiRequest(input: {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pathname === "/api/claude-trace") {
|
|
||||||
if (method !== "GET") {
|
|
||||||
methodNotAllowed(response);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { claudeTraceLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
|
||||||
const limit = parseLimit(requestUrl.searchParams.get("limit"), 200);
|
|
||||||
const sessionId = requestUrl.searchParams.get("sessionId") ?? undefined;
|
|
||||||
const events = filterClaudeTraceEvents(await readClaudeTraceEvents(claudeTraceLogPath), {
|
|
||||||
...(sessionId ? { sessionId } : {}),
|
|
||||||
limit,
|
|
||||||
});
|
|
||||||
|
|
||||||
sendJson(response, 200, {
|
|
||||||
ok: true,
|
|
||||||
events,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathname === "/api/sessions") {
|
if (pathname === "/api/sessions") {
|
||||||
if (method === "POST") {
|
|
||||||
const body = await parseJsonBody<CreateSessionRequest>(request);
|
|
||||||
const projectPath = ensureNonEmptyString(body.projectPath, "projectPath");
|
|
||||||
const session = await runService.createSession({
|
|
||||||
projectPath,
|
|
||||||
});
|
|
||||||
|
|
||||||
sendJson(response, 201, {
|
|
||||||
ok: true,
|
|
||||||
session,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (method !== "GET") {
|
if (method !== "GET") {
|
||||||
methodNotAllowed(response);
|
methodNotAllowed(response);
|
||||||
return true;
|
return true;
|
||||||
@@ -362,12 +309,10 @@ async function handleApiRequest(input: {
|
|||||||
stateRoot,
|
stateRoot,
|
||||||
runtimeEventLogPath,
|
runtimeEventLogPath,
|
||||||
});
|
});
|
||||||
const metadata = await runService.listSessions();
|
|
||||||
|
|
||||||
sendJson(response, 200, {
|
sendJson(response, 200, {
|
||||||
ok: true,
|
ok: true,
|
||||||
sessions,
|
sessions,
|
||||||
sessionMetadata: metadata,
|
|
||||||
runs: runService.listRuns(),
|
runs: runService.listRuns(),
|
||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
@@ -417,118 +362,6 @@ async function handleApiRequest(input: {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pathname.startsWith("/api/sessions/") && pathname.endsWith("/run")) {
|
|
||||||
if (method !== "POST") {
|
|
||||||
methodNotAllowed(response);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const sessionId = toRelativePathFromApi(pathname.slice("/api/sessions/".length, -"/run".length));
|
|
||||||
if (!sessionId) {
|
|
||||||
sendJson(response, 400, {
|
|
||||||
ok: false,
|
|
||||||
error: "Session id is required.",
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await parseJsonBody<StartRunRequest>(request);
|
|
||||||
if (typeof body.prompt !== "string" || body.prompt.trim().length === 0) {
|
|
||||||
sendJson(response, 400, {
|
|
||||||
ok: false,
|
|
||||||
error: 'Field "prompt" is required.',
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const manifestSource = (() => {
|
|
||||||
if (body.manifest !== undefined) {
|
|
||||||
return body.manifest;
|
|
||||||
}
|
|
||||||
if (typeof body.manifestPath === "string" && body.manifestPath.trim().length > 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
})();
|
|
||||||
|
|
||||||
const resolvedManifest = manifestSource ?? (() => {
|
|
||||||
if (!body.manifestPath) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return body.manifestPath;
|
|
||||||
})();
|
|
||||||
|
|
||||||
let manifest: unknown;
|
|
||||||
if (typeof resolvedManifest === "string") {
|
|
||||||
manifest = (await manifestStore.read(resolvedManifest)).source;
|
|
||||||
} else if (resolvedManifest !== undefined) {
|
|
||||||
manifest = resolvedManifest;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!manifest) {
|
|
||||||
sendJson(response, 400, {
|
|
||||||
ok: false,
|
|
||||||
error: "A manifest or manifestPath is required to start a run.",
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const record = await runService.startRun({
|
|
||||||
prompt: body.prompt,
|
|
||||||
manifest,
|
|
||||||
manifestPath: body.manifestPath,
|
|
||||||
sessionId,
|
|
||||||
topologyHint: body.topologyHint,
|
|
||||||
initialFlags: ensureBooleanRecord(body.initialFlags),
|
|
||||||
runtimeContextOverrides: ensureRuntimeContext(body.runtimeContextOverrides),
|
|
||||||
simulateValidationNodeIds: ensureStringArray(body.simulateValidationNodeIds),
|
|
||||||
executionMode: ensureExecutionMode(body.executionMode),
|
|
||||||
provider: ensureProvider(body.provider),
|
|
||||||
});
|
|
||||||
|
|
||||||
sendJson(response, 202, {
|
|
||||||
ok: true,
|
|
||||||
run: record,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathname.startsWith("/api/sessions/") && pathname.endsWith("/close")) {
|
|
||||||
if (method !== "POST") {
|
|
||||||
methodNotAllowed(response);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const sessionId = toRelativePathFromApi(pathname.slice("/api/sessions/".length, -"/close".length));
|
|
||||||
if (!sessionId) {
|
|
||||||
sendJson(response, 400, {
|
|
||||||
ok: false,
|
|
||||||
error: "Session id is required.",
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let body: CloseSessionRequest = {};
|
|
||||||
try {
|
|
||||||
body = await parseJsonBody<CloseSessionRequest>(request);
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
if (message !== "Request body is required.") {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const session = await runService.closeSession({
|
|
||||||
sessionId,
|
|
||||||
mergeToProject: body.mergeToProject === true,
|
|
||||||
});
|
|
||||||
|
|
||||||
sendJson(response, 200, {
|
|
||||||
ok: true,
|
|
||||||
session,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathname === "/api/runs") {
|
if (pathname === "/api/runs") {
|
||||||
if (method === "GET") {
|
if (method === "GET") {
|
||||||
sendJson(response, 200, {
|
sendJson(response, 200, {
|
||||||
|
|||||||
@@ -1,296 +0,0 @@
|
|||||||
import test from "node:test";
|
|
||||||
import assert from "node:assert/strict";
|
|
||||||
import { mkdtemp, readFile } from "node:fs/promises";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import type { SDKMessage } from "@anthropic-ai/claude-agent-sdk";
|
|
||||||
import { ClaudeObservabilityLogger, summarizeClaudeMessage } from "../src/ui/claude-observability.js";
|
|
||||||
|
|
||||||
test("summarizeClaudeMessage returns compact result metadata in summary mode", () => {
|
|
||||||
const message = {
|
|
||||||
type: "result",
|
|
||||||
subtype: "success",
|
|
||||||
stop_reason: "end_turn",
|
|
||||||
num_turns: 1,
|
|
||||||
total_cost_usd: 0.0012,
|
|
||||||
usage: {
|
|
||||||
input_tokens: 120,
|
|
||||||
output_tokens: 40,
|
|
||||||
},
|
|
||||||
result: "{\"status\":\"success\"}",
|
|
||||||
duration_ms: 40,
|
|
||||||
duration_api_ms: 32,
|
|
||||||
is_error: false,
|
|
||||||
modelUsage: {},
|
|
||||||
permission_denials: [],
|
|
||||||
uuid: "uuid-1",
|
|
||||||
session_id: "sdk-session-1",
|
|
||||||
} as unknown as SDKMessage;
|
|
||||||
|
|
||||||
const summary = summarizeClaudeMessage(message, "summary");
|
|
||||||
|
|
||||||
assert.equal(summary.messageType, "result");
|
|
||||||
assert.equal(summary.messageSubtype, "success");
|
|
||||||
assert.equal(summary.sdkSessionId, "sdk-session-1");
|
|
||||||
assert.equal(summary.summary, "Claude query result success.");
|
|
||||||
assert.equal(summary.data?.numTurns, 1);
|
|
||||||
const usage = summary.data?.usage as Record<string, unknown> | undefined;
|
|
||||||
assert.equal(usage?.input_tokens, 120);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("summarizeClaudeMessage redacts sensitive fields in full mode", () => {
|
|
||||||
const message = {
|
|
||||||
type: "system",
|
|
||||||
subtype: "init",
|
|
||||||
session_id: "sdk-session-2",
|
|
||||||
uuid: "uuid-2",
|
|
||||||
apiKey: "top-secret",
|
|
||||||
nested: {
|
|
||||||
authToken: "really-secret",
|
|
||||||
ok: true,
|
|
||||||
},
|
|
||||||
} as unknown as SDKMessage;
|
|
||||||
|
|
||||||
const summary = summarizeClaudeMessage(message, "full");
|
|
||||||
const payload = summary.data?.message as Record<string, unknown> | undefined;
|
|
||||||
const nested = payload?.nested as Record<string, unknown> | undefined;
|
|
||||||
|
|
||||||
assert.equal(summary.messageType, "system");
|
|
||||||
assert.equal(summary.messageSubtype, "init");
|
|
||||||
assert.equal(payload?.apiKey, "[redacted]");
|
|
||||||
assert.equal(nested?.authToken, "[redacted]");
|
|
||||||
assert.equal(nested?.ok, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("ClaudeObservabilityLogger samples tool_progress messages for stdout", () => {
|
|
||||||
const lines: string[] = [];
|
|
||||||
const originalLog = console.log;
|
|
||||||
const originalNow = Date.now;
|
|
||||||
let now = 1000;
|
|
||||||
|
|
||||||
console.log = (line?: unknown) => {
|
|
||||||
lines.push(String(line ?? ""));
|
|
||||||
};
|
|
||||||
Date.now = () => now;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const logger = new ClaudeObservabilityLogger({
|
|
||||||
workspaceRoot: process.cwd(),
|
|
||||||
config: {
|
|
||||||
mode: "stdout",
|
|
||||||
verbosity: "summary",
|
|
||||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
|
||||||
includePartialMessages: false,
|
|
||||||
debug: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const context = {
|
|
||||||
sessionId: "session-a",
|
|
||||||
nodeId: "node-a",
|
|
||||||
attempt: 1,
|
|
||||||
depth: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
const makeMessage = (): SDKMessage =>
|
|
||||||
({
|
|
||||||
type: "tool_progress",
|
|
||||||
tool_name: "Bash",
|
|
||||||
tool_use_id: "tool-1",
|
|
||||||
parent_tool_use_id: null,
|
|
||||||
elapsed_time_seconds: 1,
|
|
||||||
uuid: "uuid-tool",
|
|
||||||
session_id: "sdk-session-tool",
|
|
||||||
}) as unknown as SDKMessage;
|
|
||||||
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: makeMessage(),
|
|
||||||
});
|
|
||||||
|
|
||||||
now += 300;
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: makeMessage(),
|
|
||||||
});
|
|
||||||
|
|
||||||
now += 1200;
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: makeMessage(),
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(lines.length, 2);
|
|
||||||
assert.match(lines[0] ?? "", /^\[claude-trace\] /);
|
|
||||||
assert.match(lines[1] ?? "", /"suppressedSinceLastEmit":1/);
|
|
||||||
} finally {
|
|
||||||
console.log = originalLog;
|
|
||||||
Date.now = originalNow;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("ClaudeObservabilityLogger keeps assistant/user message records in file output", async () => {
|
|
||||||
const workspace = await mkdtemp(join(tmpdir(), "claude-obsv-test-"));
|
|
||||||
const logPath = ".ai_ops/events/claude-trace.ndjson";
|
|
||||||
const logger = new ClaudeObservabilityLogger({
|
|
||||||
workspaceRoot: workspace,
|
|
||||||
config: {
|
|
||||||
mode: "file",
|
|
||||||
verbosity: "summary",
|
|
||||||
logPath,
|
|
||||||
includePartialMessages: false,
|
|
||||||
debug: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const context = {
|
|
||||||
sessionId: "session-file",
|
|
||||||
nodeId: "node-file",
|
|
||||||
attempt: 1,
|
|
||||||
depth: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
logger.recordQueryStarted({
|
|
||||||
context,
|
|
||||||
});
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: {
|
|
||||||
type: "assistant",
|
|
||||||
uuid: "assistant-1",
|
|
||||||
session_id: "sdk-file-1",
|
|
||||||
parent_tool_use_id: null,
|
|
||||||
message: {} as never,
|
|
||||||
} as unknown as SDKMessage,
|
|
||||||
});
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: {
|
|
||||||
type: "user",
|
|
||||||
uuid: "user-1",
|
|
||||||
session_id: "sdk-file-1",
|
|
||||||
parent_tool_use_id: null,
|
|
||||||
message: {} as never,
|
|
||||||
} as unknown as SDKMessage,
|
|
||||||
});
|
|
||||||
logger.recordMessage({
|
|
||||||
context,
|
|
||||||
message: {
|
|
||||||
type: "result",
|
|
||||||
subtype: "success",
|
|
||||||
stop_reason: "end_turn",
|
|
||||||
num_turns: 1,
|
|
||||||
total_cost_usd: 0.0012,
|
|
||||||
usage: {
|
|
||||||
input_tokens: 100,
|
|
||||||
output_tokens: 20,
|
|
||||||
},
|
|
||||||
result: "{}",
|
|
||||||
duration_ms: 10,
|
|
||||||
duration_api_ms: 9,
|
|
||||||
is_error: false,
|
|
||||||
modelUsage: {},
|
|
||||||
permission_denials: [],
|
|
||||||
uuid: "result-1",
|
|
||||||
session_id: "sdk-file-1",
|
|
||||||
} as unknown as SDKMessage,
|
|
||||||
});
|
|
||||||
logger.recordQueryCompleted({
|
|
||||||
context,
|
|
||||||
});
|
|
||||||
|
|
||||||
await logger.close();
|
|
||||||
|
|
||||||
const filePath = join(workspace, logPath);
|
|
||||||
const content = await readFile(filePath, "utf8");
|
|
||||||
const lines = content.split(/\r?\n/).filter((line) => line.trim().length > 0);
|
|
||||||
const records = lines.map((line) => JSON.parse(line) as Record<string, unknown>);
|
|
||||||
const messageTypes = records
|
|
||||||
.map((record) => record.sdkMessageType)
|
|
||||||
.filter((value) => typeof value === "string");
|
|
||||||
|
|
||||||
assert.equal(messageTypes.includes("assistant"), true);
|
|
||||||
assert.equal(messageTypes.includes("user"), true);
|
|
||||||
assert.equal(messageTypes.includes("result"), true);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("summarizeClaudeMessage maps task_notification system subtype", () => {
|
|
||||||
const message = {
|
|
||||||
type: "system",
|
|
||||||
subtype: "task_notification",
|
|
||||||
task_id: "task-1",
|
|
||||||
status: "completed",
|
|
||||||
output_file: "/tmp/out.txt",
|
|
||||||
summary: "Task complete",
|
|
||||||
uuid: "uuid-task",
|
|
||||||
session_id: "sdk-session-task",
|
|
||||||
} as unknown as SDKMessage;
|
|
||||||
|
|
||||||
const summary = summarizeClaudeMessage(message, "summary");
|
|
||||||
|
|
||||||
assert.equal(summary.messageType, "system");
|
|
||||||
assert.equal(summary.messageSubtype, "task_notification");
|
|
||||||
assert.equal(summary.summary, "Task notification: completed.");
|
|
||||||
assert.equal(summary.data?.taskId, "task-1");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("ClaudeObservabilityLogger honors includePartialMessages for stream events", () => {
|
|
||||||
const lines: string[] = [];
|
|
||||||
const originalLog = console.log;
|
|
||||||
console.log = (line?: unknown) => {
|
|
||||||
lines.push(String(line ?? ""));
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const context = {
|
|
||||||
sessionId: "session-stream",
|
|
||||||
nodeId: "node-stream",
|
|
||||||
attempt: 1,
|
|
||||||
depth: 0,
|
|
||||||
};
|
|
||||||
const streamMessage = {
|
|
||||||
type: "stream_event",
|
|
||||||
event: {
|
|
||||||
type: "content_block_delta",
|
|
||||||
},
|
|
||||||
parent_tool_use_id: null,
|
|
||||||
uuid: "stream-1",
|
|
||||||
session_id: "sdk-session-stream",
|
|
||||||
} as unknown as SDKMessage;
|
|
||||||
|
|
||||||
const withoutPartial = new ClaudeObservabilityLogger({
|
|
||||||
workspaceRoot: process.cwd(),
|
|
||||||
config: {
|
|
||||||
mode: "stdout",
|
|
||||||
verbosity: "summary",
|
|
||||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
|
||||||
includePartialMessages: false,
|
|
||||||
debug: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
withoutPartial.recordMessage({
|
|
||||||
context,
|
|
||||||
message: streamMessage,
|
|
||||||
});
|
|
||||||
|
|
||||||
const withPartial = new ClaudeObservabilityLogger({
|
|
||||||
workspaceRoot: process.cwd(),
|
|
||||||
config: {
|
|
||||||
mode: "stdout",
|
|
||||||
verbosity: "summary",
|
|
||||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
|
||||||
includePartialMessages: true,
|
|
||||||
debug: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
withPartial.recordMessage({
|
|
||||||
context,
|
|
||||||
message: streamMessage,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(lines.length, 1);
|
|
||||||
assert.match(lines[0] ?? "", /\"sdkMessageType\":\"stream_event\"/);
|
|
||||||
} finally {
|
|
||||||
console.log = originalLog;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
import test from "node:test";
|
|
||||||
import assert from "node:assert/strict";
|
|
||||||
import { mkdtemp, writeFile } from "node:fs/promises";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import { filterClaudeTraceEvents, readClaudeTraceEvents } from "../src/ui/claude-trace-store.js";
|
|
||||||
|
|
||||||
test("readClaudeTraceEvents parses and sorts ndjson records", async () => {
|
|
||||||
const workspace = await mkdtemp(join(tmpdir(), "claude-trace-store-"));
|
|
||||||
const logPath = join(workspace, "claude-trace.ndjson");
|
|
||||||
await writeFile(
|
|
||||||
logPath,
|
|
||||||
[
|
|
||||||
'{"timestamp":"2026-02-24T17:27:05.000Z","message":"later","sessionId":"s1"}',
|
|
||||||
'not-json',
|
|
||||||
'{"timestamp":"2026-02-24T17:26:00.000Z","message":"earlier","sessionId":"s1"}',
|
|
||||||
'{"message":"missing timestamp"}',
|
|
||||||
].join("\n"),
|
|
||||||
"utf8",
|
|
||||||
);
|
|
||||||
|
|
||||||
const events = await readClaudeTraceEvents(logPath);
|
|
||||||
assert.equal(events.length, 2);
|
|
||||||
assert.equal(events[0]?.message, "earlier");
|
|
||||||
assert.equal(events[1]?.message, "later");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("filterClaudeTraceEvents filters by session and limit", () => {
|
|
||||||
const events = [
|
|
||||||
{ timestamp: "2026-02-24T17:00:00.000Z", message: "a", sessionId: "s1" },
|
|
||||||
{ timestamp: "2026-02-24T17:01:00.000Z", message: "b", sessionId: "s2" },
|
|
||||||
{ timestamp: "2026-02-24T17:02:00.000Z", message: "c", sessionId: "s1" },
|
|
||||||
];
|
|
||||||
|
|
||||||
const filtered = filterClaudeTraceEvents(events, {
|
|
||||||
sessionId: "s1",
|
|
||||||
limit: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(filtered.length, 1);
|
|
||||||
assert.equal(filtered[0]?.message, "c");
|
|
||||||
});
|
|
||||||
@@ -12,7 +12,6 @@ test("loads defaults and freezes config", () => {
|
|||||||
|
|
||||||
assert.equal(config.agentManager.maxConcurrentAgents, 4);
|
assert.equal(config.agentManager.maxConcurrentAgents, 4);
|
||||||
assert.equal(config.orchestration.maxDepth, 4);
|
assert.equal(config.orchestration.maxDepth, 4);
|
||||||
assert.equal(config.orchestration.mergeConflictMaxAttempts, 2);
|
|
||||||
assert.equal(config.provisioning.portRange.basePort, 36000);
|
assert.equal(config.provisioning.portRange.basePort, 36000);
|
||||||
assert.equal(config.discovery.fileRelativePath, ".agent-context/resources.json");
|
assert.equal(config.discovery.fileRelativePath, ".agent-context/resources.json");
|
||||||
assert.equal(config.security.violationHandling, "hard_abort");
|
assert.equal(config.security.violationHandling, "hard_abort");
|
||||||
@@ -25,11 +24,6 @@ test("loads defaults and freezes config", () => {
|
|||||||
"session.failed",
|
"session.failed",
|
||||||
]);
|
]);
|
||||||
assert.equal(config.provider.openAiAuthMode, "auto");
|
assert.equal(config.provider.openAiAuthMode, "auto");
|
||||||
assert.equal(config.provider.claudeObservability.mode, "off");
|
|
||||||
assert.equal(config.provider.claudeObservability.verbosity, "summary");
|
|
||||||
assert.equal(config.provider.claudeObservability.logPath, ".ai_ops/events/claude-trace.ndjson");
|
|
||||||
assert.equal(config.provider.claudeObservability.includePartialMessages, false);
|
|
||||||
assert.equal(config.provider.claudeObservability.debug, false);
|
|
||||||
assert.equal(Object.isFrozen(config), true);
|
assert.equal(Object.isFrozen(config), true);
|
||||||
assert.equal(Object.isFrozen(config.orchestration), true);
|
assert.equal(Object.isFrozen(config.orchestration), true);
|
||||||
});
|
});
|
||||||
@@ -62,38 +56,6 @@ test("validates runtime discord severity mode", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validates claude observability mode", () => {
|
|
||||||
assert.throws(
|
|
||||||
() => loadConfig({ CLAUDE_OBSERVABILITY_MODE: "stream" }),
|
|
||||||
/CLAUDE_OBSERVABILITY_MODE must be one of/,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("validates claude observability verbosity", () => {
|
|
||||||
assert.throws(
|
|
||||||
() => loadConfig({ CLAUDE_OBSERVABILITY_VERBOSITY: "verbose" }),
|
|
||||||
/CLAUDE_OBSERVABILITY_VERBOSITY must be one of/,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("loads claude observability settings", () => {
|
|
||||||
const config = loadConfig({
|
|
||||||
CLAUDE_OBSERVABILITY_MODE: "both",
|
|
||||||
CLAUDE_OBSERVABILITY_VERBOSITY: "full",
|
|
||||||
CLAUDE_OBSERVABILITY_LOG_PATH: ".ai_ops/debug/claude.ndjson",
|
|
||||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL: "true",
|
|
||||||
CLAUDE_OBSERVABILITY_DEBUG: "true",
|
|
||||||
CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH: ".ai_ops/debug/claude-sdk.log",
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(config.provider.claudeObservability.mode, "both");
|
|
||||||
assert.equal(config.provider.claudeObservability.verbosity, "full");
|
|
||||||
assert.equal(config.provider.claudeObservability.logPath, ".ai_ops/debug/claude.ndjson");
|
|
||||||
assert.equal(config.provider.claudeObservability.includePartialMessages, true);
|
|
||||||
assert.equal(config.provider.claudeObservability.debug, true);
|
|
||||||
assert.equal(config.provider.claudeObservability.debugLogPath, ".ai_ops/debug/claude-sdk.log");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("prefers CLAUDE_CODE_OAUTH_TOKEN over ANTHROPIC_API_KEY", () => {
|
test("prefers CLAUDE_CODE_OAUTH_TOKEN over ANTHROPIC_API_KEY", () => {
|
||||||
const config = loadConfig({
|
const config = loadConfig({
|
||||||
CLAUDE_CODE_OAUTH_TOKEN: "oauth-token",
|
CLAUDE_CODE_OAUTH_TOKEN: "oauth-token",
|
||||||
@@ -165,10 +127,3 @@ test("validates AGENT_WORKTREE_TARGET_PATH against parent traversal", () => {
|
|||||||
/must not contain "\.\." path segments/,
|
/must not contain "\.\." path segments/,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validates AGENT_MERGE_CONFLICT_MAX_ATTEMPTS bounds", () => {
|
|
||||||
assert.throws(
|
|
||||||
() => loadConfig({ AGENT_MERGE_CONFLICT_MAX_ATTEMPTS: "0" }),
|
|
||||||
/AGENT_MERGE_CONFLICT_MAX_ATTEMPTS must be an integer >= 1/,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -614,7 +614,6 @@ test("runs parallel topology blocks concurrently and routes via domain-event edg
|
|||||||
projectContextPatch: {
|
projectContextPatch: {
|
||||||
enqueueTasks: [
|
enqueueTasks: [
|
||||||
{
|
{
|
||||||
taskId: "task-integrate",
|
|
||||||
id: "task-integrate",
|
id: "task-integrate",
|
||||||
title: "Integrate feature branches",
|
title: "Integrate feature branches",
|
||||||
status: "pending",
|
status: "pending",
|
||||||
@@ -940,6 +939,86 @@ test("propagates abort signal into actor execution and stops the run", async ()
|
|||||||
assert.equal(observedAbort, true);
|
assert.equal(observedAbort, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("createClaudeCanUseTool accepts tool casing differences from providers", async () => {
|
||||||
|
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
||||||
|
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
||||||
|
const projectContextPath = resolve(stateRoot, "project-context.json");
|
||||||
|
|
||||||
|
const manifest = {
|
||||||
|
schemaVersion: "1",
|
||||||
|
topologies: ["sequential"],
|
||||||
|
personas: [
|
||||||
|
{
|
||||||
|
id: "coder",
|
||||||
|
displayName: "Coder",
|
||||||
|
systemPromptTemplate: "Coder",
|
||||||
|
toolClearance: {
|
||||||
|
allowlist: ["bash"],
|
||||||
|
banlist: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
relationships: [],
|
||||||
|
topologyConstraints: {
|
||||||
|
maxDepth: 2,
|
||||||
|
maxRetries: 0,
|
||||||
|
},
|
||||||
|
pipeline: {
|
||||||
|
entryNodeId: "case-node",
|
||||||
|
nodes: [
|
||||||
|
{
|
||||||
|
id: "case-node",
|
||||||
|
actorId: "case_actor",
|
||||||
|
personaId: "coder",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
edges: [],
|
||||||
|
},
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
const engine = new SchemaDrivenExecutionEngine({
|
||||||
|
manifest,
|
||||||
|
settings: {
|
||||||
|
workspaceRoot,
|
||||||
|
stateRoot,
|
||||||
|
projectContextPath,
|
||||||
|
maxChildren: 1,
|
||||||
|
maxDepth: 2,
|
||||||
|
maxRetries: 0,
|
||||||
|
runtimeContext: {},
|
||||||
|
},
|
||||||
|
actorExecutors: {
|
||||||
|
case_actor: async (input) => {
|
||||||
|
const canUseTool = input.mcp.createClaudeCanUseTool();
|
||||||
|
const allow = await canUseTool("Bash", {}, {
|
||||||
|
signal: new AbortController().signal,
|
||||||
|
toolUseID: "allow-bash",
|
||||||
|
});
|
||||||
|
assert.deepEqual(allow, {
|
||||||
|
behavior: "allow",
|
||||||
|
toolUseID: "allow-bash",
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: "success",
|
||||||
|
payload: {
|
||||||
|
ok: true,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await engine.runSession({
|
||||||
|
sessionId: "session-claude-tool-casing",
|
||||||
|
initialPayload: {
|
||||||
|
task: "verify tool casing",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result.status, "success");
|
||||||
|
});
|
||||||
|
|
||||||
test("hard-aborts pipeline on security violations by default", async () => {
|
test("hard-aborts pipeline on security violations by default", async () => {
|
||||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
||||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ test("project context store reads defaults and applies domain patches", async ()
|
|||||||
},
|
},
|
||||||
enqueueTasks: [
|
enqueueTasks: [
|
||||||
{
|
{
|
||||||
taskId: "task-1",
|
|
||||||
id: "task-1",
|
id: "task-1",
|
||||||
title: "Build parser",
|
title: "Build parser",
|
||||||
status: "pending",
|
status: "pending",
|
||||||
@@ -39,13 +38,11 @@ test("project context store reads defaults and applies domain patches", async ()
|
|||||||
const updated = await store.patchState({
|
const updated = await store.patchState({
|
||||||
upsertTasks: [
|
upsertTasks: [
|
||||||
{
|
{
|
||||||
taskId: "task-1",
|
|
||||||
id: "task-1",
|
id: "task-1",
|
||||||
title: "Build parser",
|
title: "Build parser",
|
||||||
status: "in_progress",
|
status: "in_progress",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
taskId: "task-2",
|
|
||||||
id: "task-2",
|
id: "task-2",
|
||||||
title: "Add tests",
|
title: "Add tests",
|
||||||
status: "pending",
|
status: "pending",
|
||||||
@@ -62,35 +59,6 @@ test("project context store reads defaults and applies domain patches", async ()
|
|||||||
assert.equal(updated.schemaVersion, 1);
|
assert.equal(updated.schemaVersion, 1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("project context accepts conflict-aware task statuses", async () => {
|
|
||||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-conflict-"));
|
|
||||||
const store = new FileSystemProjectContextStore({
|
|
||||||
filePath: resolve(root, "project-context.json"),
|
|
||||||
});
|
|
||||||
|
|
||||||
const updated = await store.patchState({
|
|
||||||
upsertTasks: [
|
|
||||||
{
|
|
||||||
taskId: "task-conflict",
|
|
||||||
id: "task-conflict",
|
|
||||||
title: "Resolve merge conflict",
|
|
||||||
status: "conflict",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
taskId: "task-resolving",
|
|
||||||
id: "task-resolving",
|
|
||||||
title: "Retry merge",
|
|
||||||
status: "resolving_conflict",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(
|
|
||||||
updated.taskQueue.map((task) => `${task.taskId}:${task.status}`),
|
|
||||||
["task-conflict:conflict", "task-resolving:resolving_conflict"],
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("project context parser merges missing root keys with defaults", async () => {
|
test("project context parser merges missing root keys with defaults", async () => {
|
||||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
|
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
|
||||||
const filePath = resolve(root, "project-context.json");
|
const filePath = resolve(root, "project-context.json");
|
||||||
@@ -102,7 +70,6 @@ test("project context parser merges missing root keys with defaults", async () =
|
|||||||
{
|
{
|
||||||
taskQueue: [
|
taskQueue: [
|
||||||
{
|
{
|
||||||
taskId: "task-1",
|
|
||||||
id: "task-1",
|
id: "task-1",
|
||||||
title: "Migrate",
|
title: "Migrate",
|
||||||
status: "pending",
|
status: "pending",
|
||||||
|
|||||||
@@ -1,14 +1,10 @@
|
|||||||
import test from "node:test";
|
import test from "node:test";
|
||||||
import assert from "node:assert/strict";
|
import assert from "node:assert/strict";
|
||||||
import { execFile } from "node:child_process";
|
import { mkdtemp, writeFile } from "node:fs/promises";
|
||||||
import { mkdtemp, mkdir, stat, writeFile } from "node:fs/promises";
|
|
||||||
import { tmpdir } from "node:os";
|
import { tmpdir } from "node:os";
|
||||||
import { resolve } from "node:path";
|
import { resolve } from "node:path";
|
||||||
import { promisify } from "node:util";
|
|
||||||
import { UiRunService, readRunMetaBySession } from "../src/ui/run-service.js";
|
import { UiRunService, readRunMetaBySession } from "../src/ui/run-service.js";
|
||||||
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
async function waitForTerminalRun(
|
async function waitForTerminalRun(
|
||||||
runService: UiRunService,
|
runService: UiRunService,
|
||||||
runId: string,
|
runId: string,
|
||||||
@@ -98,140 +94,3 @@ test("run service persists failure when pipeline summary is failure", async () =
|
|||||||
});
|
});
|
||||||
assert.equal(persisted?.status, "failure");
|
assert.equal(persisted?.status, "failure");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("run service creates, runs, and closes explicit sessions", async () => {
|
|
||||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-session-"));
|
|
||||||
const stateRoot = resolve(workspaceRoot, "state");
|
|
||||||
const envPath = resolve(workspaceRoot, ".env");
|
|
||||||
const projectPath = resolve(workspaceRoot, "project");
|
|
||||||
|
|
||||||
await mkdir(projectPath, { recursive: true });
|
|
||||||
await execFileAsync("git", ["init", projectPath], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "config", "user.name", "AI Ops"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "config", "user.email", "ai-ops@example.local"], { encoding: "utf8" });
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "initial"], { encoding: "utf8" });
|
|
||||||
|
|
||||||
await writeFile(
|
|
||||||
envPath,
|
|
||||||
[
|
|
||||||
`AGENT_STATE_ROOT=${stateRoot}`,
|
|
||||||
"AGENT_WORKTREE_ROOT=.ai_ops/worktrees",
|
|
||||||
"AGENT_WORKTREE_BASE_REF=HEAD",
|
|
||||||
].join("\n"),
|
|
||||||
"utf8",
|
|
||||||
);
|
|
||||||
|
|
||||||
const runService = new UiRunService({
|
|
||||||
workspaceRoot,
|
|
||||||
envFilePath: ".env",
|
|
||||||
});
|
|
||||||
|
|
||||||
const createdSession = await runService.createSession({
|
|
||||||
projectPath,
|
|
||||||
});
|
|
||||||
assert.equal(createdSession.sessionStatus, "active");
|
|
||||||
|
|
||||||
const manifest = {
|
|
||||||
schemaVersion: "1",
|
|
||||||
topologies: ["sequential"],
|
|
||||||
personas: [
|
|
||||||
{
|
|
||||||
id: "writer",
|
|
||||||
displayName: "Writer",
|
|
||||||
systemPromptTemplate: "Write draft",
|
|
||||||
toolClearance: {
|
|
||||||
allowlist: ["read_file", "write_file"],
|
|
||||||
banlist: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
relationships: [],
|
|
||||||
topologyConstraints: {
|
|
||||||
maxDepth: 1,
|
|
||||||
maxRetries: 0,
|
|
||||||
},
|
|
||||||
pipeline: {
|
|
||||||
entryNodeId: "write-node",
|
|
||||||
nodes: [
|
|
||||||
{
|
|
||||||
id: "write-node",
|
|
||||||
actorId: "writer-actor",
|
|
||||||
personaId: "writer",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
edges: [],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const started = await runService.startRun({
|
|
||||||
prompt: "complete task",
|
|
||||||
manifest,
|
|
||||||
sessionId: createdSession.sessionId,
|
|
||||||
executionMode: "mock",
|
|
||||||
});
|
|
||||||
|
|
||||||
const terminalStatus = await waitForTerminalRun(runService, started.runId);
|
|
||||||
assert.equal(terminalStatus, "success");
|
|
||||||
|
|
||||||
const closed = await runService.closeSession({
|
|
||||||
sessionId: createdSession.sessionId,
|
|
||||||
});
|
|
||||||
assert.equal(closed.sessionStatus, "closed");
|
|
||||||
|
|
||||||
await assert.rejects(() => stat(createdSession.baseWorkspacePath), {
|
|
||||||
code: "ENOENT",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("run service marks session closed_with_conflicts when close merge conflicts", async () => {
|
|
||||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-close-conflict-"));
|
|
||||||
const stateRoot = resolve(workspaceRoot, "state");
|
|
||||||
const envPath = resolve(workspaceRoot, ".env");
|
|
||||||
const projectPath = resolve(workspaceRoot, "project");
|
|
||||||
|
|
||||||
await mkdir(projectPath, { recursive: true });
|
|
||||||
await execFileAsync("git", ["init", projectPath], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "config", "user.name", "AI Ops"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "config", "user.email", "ai-ops@example.local"], { encoding: "utf8" });
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "base\n", "utf8");
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "initial"], { encoding: "utf8" });
|
|
||||||
|
|
||||||
await writeFile(
|
|
||||||
envPath,
|
|
||||||
[
|
|
||||||
`AGENT_STATE_ROOT=${stateRoot}`,
|
|
||||||
"AGENT_WORKTREE_ROOT=.ai_ops/worktrees",
|
|
||||||
"AGENT_WORKTREE_BASE_REF=HEAD",
|
|
||||||
].join("\n"),
|
|
||||||
"utf8",
|
|
||||||
);
|
|
||||||
|
|
||||||
const runService = new UiRunService({
|
|
||||||
workspaceRoot,
|
|
||||||
envFilePath: ".env",
|
|
||||||
});
|
|
||||||
|
|
||||||
const createdSession = await runService.createSession({
|
|
||||||
projectPath,
|
|
||||||
});
|
|
||||||
|
|
||||||
await writeFile(resolve(createdSession.baseWorkspacePath, "README.md"), "base branch update\n", "utf8");
|
|
||||||
await execFileAsync("git", ["-C", createdSession.baseWorkspacePath, "add", "README.md"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", createdSession.baseWorkspacePath, "commit", "-m", "base update"], { encoding: "utf8" });
|
|
||||||
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "project branch update\n", "utf8");
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
|
||||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "project update"], { encoding: "utf8" });
|
|
||||||
|
|
||||||
const closed = await runService.closeSession({
|
|
||||||
sessionId: createdSession.sessionId,
|
|
||||||
mergeToProject: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(closed.sessionStatus, "closed_with_conflicts");
|
|
||||||
const baseWorkspaceStats = await stat(createdSession.baseWorkspacePath);
|
|
||||||
assert.equal(baseWorkspaceStats.isDirectory(), true);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -155,41 +155,3 @@ test("secure executor runs with explicit env policy", async () => {
|
|||||||
assert.equal(result.stdout, "ok|\n");
|
assert.equal(result.stdout, "ok|\n");
|
||||||
assert.equal(streamedStdout, result.stdout);
|
assert.equal(streamedStdout, result.stdout);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("rules engine carries session context in tool audit events", () => {
|
|
||||||
const events: Array<Record<string, unknown>> = [];
|
|
||||||
const rules = new SecurityRulesEngine(
|
|
||||||
{
|
|
||||||
allowedBinaries: ["git"],
|
|
||||||
worktreeRoot: "/tmp",
|
|
||||||
protectedPaths: [],
|
|
||||||
requireCwdWithinWorktree: true,
|
|
||||||
rejectRelativePathTraversal: true,
|
|
||||||
enforcePathBoundaryOnArguments: true,
|
|
||||||
allowedEnvAssignments: [],
|
|
||||||
blockedEnvAssignments: [],
|
|
||||||
},
|
|
||||||
(event) => {
|
|
||||||
events.push(event as unknown as Record<string, unknown>);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
rules.assertToolInvocationAllowed({
|
|
||||||
tool: "git",
|
|
||||||
toolClearance: {
|
|
||||||
allowlist: ["git"],
|
|
||||||
banlist: [],
|
|
||||||
},
|
|
||||||
context: {
|
|
||||||
sessionId: "session-ctx",
|
|
||||||
nodeId: "node-ctx",
|
|
||||||
attempt: 2,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const allowedEvent = events.find((event) => event.type === "tool.invocation_allowed");
|
|
||||||
assert.ok(allowedEvent);
|
|
||||||
assert.equal(allowedEvent.sessionId, "session-ctx");
|
|
||||||
assert.equal(allowedEvent.nodeId, "node-ctx");
|
|
||||||
assert.equal(allowedEvent.attempt, 2);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,230 +0,0 @@
|
|||||||
import test from "node:test";
|
|
||||||
import assert from "node:assert/strict";
|
|
||||||
import { execFile } from "node:child_process";
|
|
||||||
import { mkdtemp, mkdir, readFile, rm, stat, writeFile } from "node:fs/promises";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { resolve } from "node:path";
|
|
||||||
import { promisify } from "node:util";
|
|
||||||
import {
|
|
||||||
FileSystemSessionMetadataStore,
|
|
||||||
SessionWorktreeManager,
|
|
||||||
type SessionMetadata,
|
|
||||||
} from "../src/agents/session-lifecycle.js";
|
|
||||||
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
async function git(args: string[]): Promise<string> {
|
|
||||||
const { stdout } = await execFileAsync("git", args, {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
return stdout.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
test("session metadata store persists and updates session metadata", async () => {
|
|
||||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-store-"));
|
|
||||||
const store = new FileSystemSessionMetadataStore({ stateRoot });
|
|
||||||
|
|
||||||
const created = await store.createSession({
|
|
||||||
sessionId: "session-abc",
|
|
||||||
projectPath: resolve(stateRoot, "project"),
|
|
||||||
baseWorkspacePath: resolve(stateRoot, "worktrees", "session-abc", "base"),
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(created.sessionStatus, "active");
|
|
||||||
assert.equal(created.sessionId, "session-abc");
|
|
||||||
|
|
||||||
const listed = await store.listSessions();
|
|
||||||
assert.equal(listed.length, 1);
|
|
||||||
assert.equal(listed[0]?.sessionId, "session-abc");
|
|
||||||
|
|
||||||
const updated = await store.updateSession("session-abc", {
|
|
||||||
sessionStatus: "closed",
|
|
||||||
});
|
|
||||||
assert.equal(updated.sessionStatus, "closed");
|
|
||||||
|
|
||||||
const readBack = await store.readSession("session-abc");
|
|
||||||
assert.equal(readBack?.sessionStatus, "closed");
|
|
||||||
|
|
||||||
const closedWithConflicts = await store.updateSession("session-abc", {
|
|
||||||
sessionStatus: "closed_with_conflicts",
|
|
||||||
});
|
|
||||||
assert.equal(closedWithConflicts.sessionStatus, "closed_with_conflicts");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("session worktree manager provisions and merges task worktrees", async () => {
|
|
||||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-"));
|
|
||||||
const projectPath = resolve(root, "project");
|
|
||||||
const worktreeRoot = resolve(root, "worktrees");
|
|
||||||
|
|
||||||
await mkdir(projectPath, { recursive: true });
|
|
||||||
await git(["init", projectPath]);
|
|
||||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
|
||||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
|
||||||
await git(["-C", projectPath, "add", "README.md"]);
|
|
||||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
|
||||||
|
|
||||||
const manager = new SessionWorktreeManager({
|
|
||||||
worktreeRoot,
|
|
||||||
baseRef: "HEAD",
|
|
||||||
});
|
|
||||||
|
|
||||||
const sessionId = "session-1";
|
|
||||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
|
||||||
|
|
||||||
await manager.initializeSessionBaseWorkspace({
|
|
||||||
sessionId,
|
|
||||||
projectPath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
const baseStats = await stat(baseWorkspacePath);
|
|
||||||
assert.equal(baseStats.isDirectory(), true);
|
|
||||||
|
|
||||||
const taskWorktreePath = (
|
|
||||||
await manager.ensureTaskWorktree({
|
|
||||||
sessionId,
|
|
||||||
taskId: "task-1",
|
|
||||||
baseWorkspacePath,
|
|
||||||
})
|
|
||||||
).taskWorktreePath;
|
|
||||||
|
|
||||||
await writeFile(resolve(taskWorktreePath, "feature.txt"), "task output\n", "utf8");
|
|
||||||
|
|
||||||
const mergeOutcome = await manager.mergeTaskIntoBase({
|
|
||||||
taskId: "task-1",
|
|
||||||
baseWorkspacePath,
|
|
||||||
taskWorktreePath,
|
|
||||||
});
|
|
||||||
assert.equal(mergeOutcome.kind, "success");
|
|
||||||
|
|
||||||
const mergedFile = await readFile(resolve(baseWorkspacePath, "feature.txt"), "utf8");
|
|
||||||
assert.equal(mergedFile, "task output\n");
|
|
||||||
|
|
||||||
const session: SessionMetadata = {
|
|
||||||
sessionId,
|
|
||||||
projectPath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
sessionStatus: "active",
|
|
||||||
createdAt: new Date().toISOString(),
|
|
||||||
updatedAt: new Date().toISOString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const closeOutcome = await manager.closeSession({
|
|
||||||
session,
|
|
||||||
taskWorktreePaths: [],
|
|
||||||
mergeBaseIntoProject: false,
|
|
||||||
});
|
|
||||||
assert.equal(closeOutcome.kind, "success");
|
|
||||||
|
|
||||||
await assert.rejects(() => stat(baseWorkspacePath), {
|
|
||||||
code: "ENOENT",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("session worktree manager returns conflict outcome instead of throwing", async () => {
|
|
||||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-conflict-"));
|
|
||||||
const projectPath = resolve(root, "project");
|
|
||||||
const worktreeRoot = resolve(root, "worktrees");
|
|
||||||
|
|
||||||
await mkdir(projectPath, { recursive: true });
|
|
||||||
await git(["init", projectPath]);
|
|
||||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
|
||||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "base\n", "utf8");
|
|
||||||
await git(["-C", projectPath, "add", "README.md"]);
|
|
||||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
|
||||||
|
|
||||||
const manager = new SessionWorktreeManager({
|
|
||||||
worktreeRoot,
|
|
||||||
baseRef: "HEAD",
|
|
||||||
});
|
|
||||||
|
|
||||||
const sessionId = "session-conflict-1";
|
|
||||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
|
||||||
|
|
||||||
await manager.initializeSessionBaseWorkspace({
|
|
||||||
sessionId,
|
|
||||||
projectPath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
const taskWorktreePath = (
|
|
||||||
await manager.ensureTaskWorktree({
|
|
||||||
sessionId,
|
|
||||||
taskId: "task-conflict",
|
|
||||||
baseWorkspacePath,
|
|
||||||
})
|
|
||||||
).taskWorktreePath;
|
|
||||||
|
|
||||||
await writeFile(resolve(baseWorkspacePath, "README.md"), "base branch change\n", "utf8");
|
|
||||||
await git(["-C", baseWorkspacePath, "add", "README.md"]);
|
|
||||||
await git(["-C", baseWorkspacePath, "commit", "-m", "base update"]);
|
|
||||||
|
|
||||||
await writeFile(resolve(taskWorktreePath, "README.md"), "task branch change\n", "utf8");
|
|
||||||
|
|
||||||
const mergeOutcome = await manager.mergeTaskIntoBase({
|
|
||||||
taskId: "task-conflict",
|
|
||||||
baseWorkspacePath,
|
|
||||||
taskWorktreePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(mergeOutcome.kind, "conflict");
|
|
||||||
if (mergeOutcome.kind !== "conflict") {
|
|
||||||
throw new Error("Expected merge conflict outcome.");
|
|
||||||
}
|
|
||||||
assert.equal(mergeOutcome.taskId, "task-conflict");
|
|
||||||
assert.equal(mergeOutcome.worktreePath, taskWorktreePath);
|
|
||||||
assert.ok(mergeOutcome.conflictFiles.includes("README.md"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("session worktree manager recreates a task worktree after stale metadata prune", async () => {
|
|
||||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-prune-"));
|
|
||||||
const projectPath = resolve(root, "project");
|
|
||||||
const worktreeRoot = resolve(root, "worktrees");
|
|
||||||
|
|
||||||
await mkdir(projectPath, { recursive: true });
|
|
||||||
await git(["init", projectPath]);
|
|
||||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
|
||||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
|
||||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
|
||||||
await git(["-C", projectPath, "add", "README.md"]);
|
|
||||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
|
||||||
|
|
||||||
const manager = new SessionWorktreeManager({
|
|
||||||
worktreeRoot,
|
|
||||||
baseRef: "HEAD",
|
|
||||||
});
|
|
||||||
|
|
||||||
const sessionId = "session-prune-1";
|
|
||||||
const taskId = "task-prune-1";
|
|
||||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
|
||||||
|
|
||||||
await manager.initializeSessionBaseWorkspace({
|
|
||||||
sessionId,
|
|
||||||
projectPath,
|
|
||||||
baseWorkspacePath,
|
|
||||||
});
|
|
||||||
|
|
||||||
const initialTaskWorktreePath = (
|
|
||||||
await manager.ensureTaskWorktree({
|
|
||||||
sessionId,
|
|
||||||
taskId,
|
|
||||||
baseWorkspacePath,
|
|
||||||
})
|
|
||||||
).taskWorktreePath;
|
|
||||||
|
|
||||||
await rm(initialTaskWorktreePath, { recursive: true, force: true });
|
|
||||||
|
|
||||||
const recreatedTaskWorktreePath = (
|
|
||||||
await manager.ensureTaskWorktree({
|
|
||||||
sessionId,
|
|
||||||
taskId,
|
|
||||||
baseWorkspacePath,
|
|
||||||
})
|
|
||||||
).taskWorktreePath;
|
|
||||||
|
|
||||||
assert.equal(recreatedTaskWorktreePath, initialTaskWorktreePath);
|
|
||||||
const stats = await stat(recreatedTaskWorktreePath);
|
|
||||||
assert.equal(stats.isDirectory(), true);
|
|
||||||
});
|
|
||||||
0
workspace/.gitkeep
Normal file
0
workspace/.gitkeep
Normal file
Reference in New Issue
Block a user