Compare commits
13 Commits
9b4ef8fed8
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 35e3f81327 | |||
| 659f3edcee | |||
| 90725eaae8 | |||
| 7727612ce9 | |||
| 45374a033b | |||
| 691591d279 | |||
| 422e8fe5a5 | |||
| 6863c1da0b | |||
| 9f032d9b14 | |||
| ca5fd3f096 | |||
| 23ad28ad12 | |||
| 47e20d8ec6 | |||
| 83bbf1a9ce |
15
.env.example
15
.env.example
@@ -16,6 +16,16 @@ CLAUDE_CODE_OAUTH_TOKEN=
|
||||
ANTHROPIC_API_KEY=
|
||||
CLAUDE_MODEL=
|
||||
CLAUDE_CODE_PATH=
|
||||
CLAUDE_MAX_TURNS=2
|
||||
# Claude binary observability: off | stdout | file | both
|
||||
CLAUDE_OBSERVABILITY_MODE=off
|
||||
# CLAUDE_OBSERVABILITY_VERBOSITY: summary | full
|
||||
CLAUDE_OBSERVABILITY_VERBOSITY=summary
|
||||
# Relative to repository workspace root in UI/provider runs.
|
||||
CLAUDE_OBSERVABILITY_LOG_PATH=.ai_ops/events/claude-trace.ndjson
|
||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=false
|
||||
CLAUDE_OBSERVABILITY_DEBUG=false
|
||||
CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH=
|
||||
|
||||
# Agent management limits
|
||||
AGENT_MAX_CONCURRENT=4
|
||||
@@ -28,10 +38,13 @@ AGENT_PROJECT_CONTEXT_PATH=.ai_ops/project-context.json
|
||||
AGENT_TOPOLOGY_MAX_DEPTH=4
|
||||
AGENT_TOPOLOGY_MAX_RETRIES=2
|
||||
AGENT_RELATIONSHIP_MAX_CHILDREN=4
|
||||
AGENT_MERGE_CONFLICT_MAX_ATTEMPTS=2
|
||||
|
||||
# Resource provisioning (hard + soft constraints)
|
||||
AGENT_WORKTREE_ROOT=.ai_ops/worktrees
|
||||
AGENT_WORKTREE_BASE_REF=HEAD
|
||||
# Optional relative path inside each worktree; enables sparse-checkout and sets working directory there.
|
||||
AGENT_WORKTREE_TARGET_PATH=
|
||||
AGENT_PORT_BASE=36000
|
||||
AGENT_PORT_BLOCK_SIZE=32
|
||||
AGENT_PORT_BLOCK_COUNT=512
|
||||
@@ -40,7 +53,7 @@ AGENT_PORT_LOCK_DIR=.ai_ops/locks/ports
|
||||
AGENT_DISCOVERY_FILE_RELATIVE_PATH=.agent-context/resources.json
|
||||
|
||||
# Security middleware
|
||||
# AGENT_SECURITY_VIOLATION_MODE: hard_abort | validation_fail
|
||||
# AGENT_SECURITY_VIOLATION_MODE: hard_abort | validation_fail | dangerous_warn_only
|
||||
AGENT_SECURITY_VIOLATION_MODE=hard_abort
|
||||
AGENT_SECURITY_ALLOWED_BINARIES=git,npm,node,cat,ls,pwd,echo,bash,sh
|
||||
AGENT_SECURITY_COMMAND_TIMEOUT_MS=120000
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@ dist
|
||||
mcp.config.json
|
||||
.ai_ops
|
||||
.agent-context
|
||||
.workspace
|
||||
@@ -29,9 +29,11 @@
|
||||
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
||||
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
||||
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
||||
- `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`
|
||||
- Provisioning/resource controls:
|
||||
- `AGENT_WORKTREE_ROOT`
|
||||
- `AGENT_WORKTREE_BASE_REF`
|
||||
- `AGENT_WORKTREE_TARGET_PATH`
|
||||
- `AGENT_PORT_BASE`
|
||||
- `AGENT_PORT_BLOCK_SIZE`
|
||||
- `AGENT_PORT_BLOCK_COUNT`
|
||||
|
||||
60
README.md
60
README.md
@@ -20,10 +20,19 @@ TypeScript runtime for deterministic multi-agent execution with:
|
||||
- Runtime events are emitted as best-effort side-channel telemetry and do not affect orchestration control flow.
|
||||
- `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required.
|
||||
- Session state is persisted under `AGENT_STATE_ROOT`.
|
||||
- Project state is persisted under `AGENT_PROJECT_CONTEXT_PATH` with schema-versioned JSON (`schemaVersion`) and domains:
|
||||
- Session lifecycle is explicit (`POST /api/sessions`, `POST /api/sessions/:id/run`, `POST /api/sessions/:id/close`) and each session is bound to a target project path.
|
||||
- Session project context is persisted as schema-versioned JSON (`schemaVersion`) with domains:
|
||||
- `globalFlags`
|
||||
- `artifactPointers`
|
||||
- `taskQueue`
|
||||
- each task record stores `taskId`, status, and optional `worktreePath` for task-scoped workspace ownership
|
||||
- conflict-aware statuses are supported (`conflict`, `resolving_conflict`)
|
||||
|
||||
## Deep Dives
|
||||
|
||||
- Session walkthrough with concrete artifacts from a successful provider run: `docs/session-walkthrough.md`
|
||||
- Orchestration engine internals: `docs/orchestration-engine.md`
|
||||
- Runtime event model and sinks: `docs/runtime-events.md`
|
||||
|
||||
## Repository Layout
|
||||
|
||||
@@ -51,6 +60,7 @@ TypeScript runtime for deterministic multi-agent execution with:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm --prefix ui install
|
||||
cp .env.example .env
|
||||
cp mcp.config.example.json mcp.config.json
|
||||
```
|
||||
@@ -77,6 +87,8 @@ Start the local UI server:
|
||||
npm run ui
|
||||
```
|
||||
|
||||
This script builds the React frontend from `ui/` before serving.
|
||||
|
||||
Then open:
|
||||
|
||||
- `http://127.0.0.1:4317` (default)
|
||||
@@ -86,6 +98,7 @@ The UI provides:
|
||||
- graph visualizer with topology/retry rendering, edge trigger labels, node economics (duration/cost/tokens), and critical-path highlighting
|
||||
- node inspector with attempt metadata and injected `ResolvedExecutionContext` sandbox payload
|
||||
- live runtime event feed from `AGENT_RUNTIME_EVENT_LOG_PATH` with severity coloring (including security mirror events)
|
||||
- Claude trace feed from `CLAUDE_OBSERVABILITY_LOG_PATH` (query lifecycle, SDK message types/subtypes, and errors)
|
||||
- run trigger + kill switch backed by `SchemaDrivenExecutionEngine.runSession(...)`
|
||||
- run mode selector: `provider` (real Codex/Claude execution) or `mock` (deterministic dry-run executor)
|
||||
- provider selector: `codex` or `claude`
|
||||
@@ -98,6 +111,10 @@ Provider mode notes:
|
||||
|
||||
- `provider=codex` uses existing OpenAI/Codex auth settings (`OPENAI_AUTH_MODE`, `CODEX_API_KEY`, `OPENAI_API_KEY`).
|
||||
- `provider=claude` uses Claude auth resolution (`CLAUDE_CODE_OAUTH_TOKEN` preferred, otherwise `ANTHROPIC_API_KEY`, or existing Claude Code login state).
|
||||
- `CLAUDE_MODEL` should be a Claude model id/alias recognized by Claude Code (for example `claude-sonnet-4-6`); `anthropic/...` prefixes are normalized automatically.
|
||||
- `CLAUDE_MAX_TURNS` controls the per-query Claude turn budget (default `2`).
|
||||
- Claude provider runs can emit Claude SDK/CLI internals to stdout and/or NDJSON with `CLAUDE_OBSERVABILITY_*` settings.
|
||||
- UI session-mode provider runs execute directly in orchestration-assigned task/base worktrees; provider adapters do not allocate additional nested worktrees.
|
||||
|
||||
## Manifest Semantics
|
||||
|
||||
@@ -121,9 +138,9 @@ Pipeline edges can route via:
|
||||
Domain events are typed and can trigger edges directly:
|
||||
|
||||
- planning: `requirements_defined`, `tasks_planned`
|
||||
- execution: `code_committed`, `task_blocked`
|
||||
- execution: `code_committed`, `task_ready_for_review`, `task_blocked`
|
||||
- validation: `validation_passed`, `validation_failed`
|
||||
- integration: `branch_merged`
|
||||
- integration: `branch_merged`, `merge_conflict_detected`, `merge_conflict_resolved`, `merge_conflict_unresolved`, `merge_retry_started`
|
||||
|
||||
Actors can emit events in `ActorExecutionResult.events`. Pipeline status also emits default validation/execution events.
|
||||
|
||||
@@ -192,6 +209,30 @@ Notes:
|
||||
- `security.tool.invocation_allowed`
|
||||
- `security.tool.invocation_blocked`
|
||||
|
||||
## Claude Observability
|
||||
|
||||
- `CLAUDE_OBSERVABILITY_MODE=stdout` prints structured Claude query internals (tool progress, system events, stderr, result lifecycle) to stdout as JSON lines prefixed with `[claude-trace]`.
|
||||
- `CLAUDE_OBSERVABILITY_MODE=file` appends the same records to `CLAUDE_OBSERVABILITY_LOG_PATH`.
|
||||
- `CLAUDE_OBSERVABILITY_MODE=both` enables both outputs.
|
||||
- Output samples high-frequency `tool_progress` events to avoid log flooding while retaining suppression counters.
|
||||
- `assistant` and `user` message records are retained so turn flow is inspectable end-to-end.
|
||||
- `CLAUDE_OBSERVABILITY_VERBOSITY=summary` stores compact metadata; `full` stores redacted full SDK message payloads.
|
||||
- `CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=true` enables and emits sampled partial assistant stream events from the SDK.
|
||||
- `CLAUDE_OBSERVABILITY_DEBUG=true` enables Claude SDK debug mode.
|
||||
- `CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH` writes Claude SDK debug output to a file (also enables debug mode).
|
||||
- In UI/provider mode, `CLAUDE_OBSERVABILITY_LOG_PATH` resolves relative to the repo workspace root.
|
||||
- UI API: `GET /api/claude-trace?limit=<n>&sessionId=<id>` reads filtered Claude trace records.
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
CLAUDE_OBSERVABILITY_MODE=both
|
||||
CLAUDE_OBSERVABILITY_VERBOSITY=summary
|
||||
CLAUDE_OBSERVABILITY_LOG_PATH=.ai_ops/events/claude-trace.ndjson
|
||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL=false
|
||||
CLAUDE_OBSERVABILITY_DEBUG=false
|
||||
```
|
||||
|
||||
### Analytics Quick Start
|
||||
|
||||
Inspect latest events:
|
||||
@@ -231,9 +272,11 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
||||
- Every actor execution input now includes `security` helpers (`rulesEngine`, `createCommandExecutor(...)`) so executors can enforce shell/tool policy at the execution boundary.
|
||||
- Every actor execution input now includes `mcp` helpers (`resolvedConfig`, `resolveConfig(...)`, `filterToolsForProvider(...)`, `createClaudeCanUseTool()`) so provider adapters are filtered against `executionContext.allowedTools` before SDK calls.
|
||||
- For Claude-based executors, pass `input.mcp.filterToolsForProvider(...)` and `input.mcp.createClaudeCanUseTool()` into the SDK call path so unauthorized tools are never exposed and runtime bypass attempts trigger security violations.
|
||||
- Claude `canUseTool` permission checks normalize provider casing (`Bash` vs `bash`) before enforcing persona allowlists.
|
||||
- Pipeline behavior on `SecurityViolationError` is configurable:
|
||||
- `hard_abort` (default)
|
||||
- `validation_fail` (retry-unrolled remediation)
|
||||
- `dangerous_warn_only` (logs violations and continues execution; high risk)
|
||||
|
||||
## Environment Variables
|
||||
|
||||
@@ -248,6 +291,13 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
||||
- `ANTHROPIC_API_KEY` (used when `CLAUDE_CODE_OAUTH_TOKEN` is unset)
|
||||
- `CLAUDE_MODEL`
|
||||
- `CLAUDE_CODE_PATH`
|
||||
- `CLAUDE_MAX_TURNS` (integer >= 1, defaults to `2`)
|
||||
- `CLAUDE_OBSERVABILITY_MODE` (`off`, `stdout`, `file`, or `both`)
|
||||
- `CLAUDE_OBSERVABILITY_VERBOSITY` (`summary` or `full`)
|
||||
- `CLAUDE_OBSERVABILITY_LOG_PATH`
|
||||
- `CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL` (`true` or `false`)
|
||||
- `CLAUDE_OBSERVABILITY_DEBUG` (`true` or `false`)
|
||||
- `CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH`
|
||||
- `MCP_CONFIG_PATH`
|
||||
|
||||
### Agent Manager Limits
|
||||
@@ -263,11 +313,13 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
||||
- `AGENT_TOPOLOGY_MAX_DEPTH`
|
||||
- `AGENT_TOPOLOGY_MAX_RETRIES`
|
||||
- `AGENT_RELATIONSHIP_MAX_CHILDREN`
|
||||
- `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`
|
||||
|
||||
### Provisioning / Resource Controls
|
||||
|
||||
- `AGENT_WORKTREE_ROOT`
|
||||
- `AGENT_WORKTREE_BASE_REF`
|
||||
- `AGENT_WORKTREE_TARGET_PATH` (optional relative path; enables sparse checkout and sets session working directory to that subfolder)
|
||||
- `AGENT_PORT_BASE`
|
||||
- `AGENT_PORT_BLOCK_SIZE`
|
||||
- `AGENT_PORT_BLOCK_COUNT`
|
||||
@@ -277,7 +329,7 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
|
||||
|
||||
### Security Middleware
|
||||
|
||||
- `AGENT_SECURITY_VIOLATION_MODE` (`hard_abort` or `validation_fail`)
|
||||
- `AGENT_SECURITY_VIOLATION_MODE` (`hard_abort`, `validation_fail`, or `dangerous_warn_only`)
|
||||
- `AGENT_SECURITY_ALLOWED_BINARIES`
|
||||
- `AGENT_SECURITY_COMMAND_TIMEOUT_MS`
|
||||
- `AGENT_SECURITY_AUDIT_LOG_PATH`
|
||||
|
||||
73
demo-manifest.json
Normal file
73
demo-manifest.json
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"schemaVersion": "1",
|
||||
"topologies": [
|
||||
"sequential"
|
||||
],
|
||||
"personas": [
|
||||
{
|
||||
"id": "researcher",
|
||||
"displayName": "Researcher",
|
||||
"systemPromptTemplate": "You are a researcher. Read the README.md file in the repository to understand the core architecture of the AI Ops platform. Once you understand it, output your summary.",
|
||||
"toolClearance": {
|
||||
"allowlist": [
|
||||
"read_file",
|
||||
"list_directory"
|
||||
],
|
||||
"banlist": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "writer",
|
||||
"displayName": "Writer",
|
||||
"systemPromptTemplate": "You are a writer. Take the summary provided by the researcher and write it to a new file called 'demo-summary.txt' in the root directory.",
|
||||
"toolClearance": {
|
||||
"allowlist": [
|
||||
"write_file"
|
||||
],
|
||||
"banlist": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"relationships": [
|
||||
{
|
||||
"parentPersonaId": "researcher",
|
||||
"childPersonaId": "writer",
|
||||
"constraints": {
|
||||
"maxDepth": 1,
|
||||
"maxChildren": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"topologyConstraints": {
|
||||
"maxDepth": 5,
|
||||
"maxRetries": 2
|
||||
},
|
||||
"pipeline": {
|
||||
"entryNodeId": "research-node",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "research-node",
|
||||
"actorId": "researcher_actor",
|
||||
"personaId": "researcher",
|
||||
"topology": {
|
||||
"kind": "sequential"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "write-node",
|
||||
"actorId": "writer_actor",
|
||||
"personaId": "writer",
|
||||
"topology": {
|
||||
"kind": "sequential"
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"from": "research-node",
|
||||
"to": "write-node",
|
||||
"on": "success"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ The orchestration runtime introduces explicit schema validation and deterministi
|
||||
- Project context store (`src/agents/project-context.ts`): project-scoped global flags, artifact pointers, and task queue persisted across sessions.
|
||||
- Orchestration facade (`src/agents/orchestration.ts`): wires manifest + registry + pipeline + state manager + project context with env-driven limits.
|
||||
- Hierarchical resource suballocation (`src/agents/provisioning.ts`): builds child `git-worktree` and child `port-range` requests from parent allocation data.
|
||||
- Optional `AGENT_WORKTREE_TARGET_PATH` enables sparse-checkout for a subdirectory and sets per-session working directory to that target path.
|
||||
- Recursive manager runtime (`src/agents/manager.ts`): utility invoked by the pipeline engine for fan-out/retry-unrolled execution.
|
||||
|
||||
## Constraint model
|
||||
@@ -36,6 +37,11 @@ Before each actor invocation, orchestration resolves an immutable `ResolvedExecu
|
||||
|
||||
This keeps orchestration policy resolution separate from executor enforcement. Executors do not need to parse manifests or MCP registry internals.
|
||||
|
||||
Worktree ownership invariant:
|
||||
|
||||
- In UI session mode, orchestration/session lifecycle is the single owner of git worktree allocation.
|
||||
- Provider adapters (Codex/Claude runtime wrappers) must execute inside `ResolvedExecutionContext.security.worktreePath` and must not provision independent worktrees.
|
||||
|
||||
## Execution topology model
|
||||
|
||||
- Pipeline graph execution is DAG-based with ready-node frontiers.
|
||||
@@ -51,10 +57,16 @@ This keeps orchestration policy resolution separate from executor enforcement. E
|
||||
- planning: `requirements_defined`, `tasks_planned`
|
||||
- execution: `code_committed`, `task_blocked`
|
||||
- validation: `validation_passed`, `validation_failed`
|
||||
- integration: `branch_merged`
|
||||
- integration: `branch_merged`, `merge_conflict_detected`, `merge_conflict_resolved`, `merge_conflict_unresolved`, `merge_retry_started`
|
||||
- Pipeline edges can trigger on domain events (`edge.event`) in addition to legacy status triggers (`edge.on`).
|
||||
- `history_has_event` route conditions evaluate persisted domain event history entries (`validation_failed`, `task_blocked`, etc.).
|
||||
|
||||
## Merge conflict orchestration
|
||||
|
||||
- Task merge/close merge operations return structured outcomes (`success`, `conflict`, `fatal_error`) instead of throwing for conflicts.
|
||||
- Task state supports conflict workflows (`conflict`, `resolving_conflict`) and conflict metadata is persisted under `task.metadata.mergeConflict`.
|
||||
- Conflict retries are bounded by `AGENT_MERGE_CONFLICT_MAX_ATTEMPTS`; exhaustion emits `merge_conflict_unresolved` and the session continues without crashing.
|
||||
|
||||
## Security note
|
||||
|
||||
Security enforcement now lives in `src/security`:
|
||||
|
||||
@@ -30,6 +30,7 @@ This middleware provides a first-pass hardening layer for agent-executed shell c
|
||||
|
||||
- `hard_abort` (default): fail fast and stop the pipeline.
|
||||
- `validation_fail`: map violation to retry-unrolled behavior so the actor can attempt a compliant alternative.
|
||||
- `dangerous_warn_only`: emit security audit/runtime events but continue execution. This is intentionally unsafe and should only be used for temporary unblock/debug workflows.
|
||||
|
||||
## MCP integration
|
||||
|
||||
@@ -40,6 +41,7 @@ This middleware provides a first-pass hardening layer for agent-executed shell c
|
||||
- `registry`: resolved runtime `McpRegistry`
|
||||
- `resolveConfig(...)`: centralized MCP config resolution with persona tool-clearance applied
|
||||
- `createClaudeCanUseTool()`: helper for Claude SDK `canUseTool` callback so each tool invocation is allowlist/banlist-enforced before execution
|
||||
- Tool matching is case-insensitive at invocation time to handle provider-emitted names like `Bash` versus allowlist entries like `bash`.
|
||||
|
||||
## Known limits and TODOs
|
||||
|
||||
|
||||
160
docs/session-walkthrough.md
Normal file
160
docs/session-walkthrough.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# Session Walkthrough (Concrete Example)
|
||||
|
||||
This document walks through one successful provider run end-to-end using:
|
||||
|
||||
- session id: `ui-session-mlzw94bv-cb753677`
|
||||
- run id: `9287775f-a507-492a-9afa-347ed3f3a6b3`
|
||||
- execution mode: `provider`
|
||||
- provider: `claude`
|
||||
- manifest: `.ai_ops/manifests/test.json`
|
||||
|
||||
Use this as a mental model and as a debugging template for future sessions.
|
||||
|
||||
## 1) What happened in this run
|
||||
|
||||
The manifest defines two sequential nodes:
|
||||
|
||||
1. `write-node` (persona: writer)
|
||||
2. `copy-node` (persona: copy-editor)
|
||||
|
||||
Edge routing is `write-node -> copy-node` on `success`.
|
||||
|
||||
In this run:
|
||||
|
||||
1. `write-node` succeeded on attempt 1 and emitted `validation_passed` and `tasks_planned`.
|
||||
2. `copy-node` succeeded on attempt 1 and emitted `validation_passed`.
|
||||
3. Session aggregate status was `success`.
|
||||
|
||||
## 2) Timeline from runtime events
|
||||
|
||||
From `.ai_ops/events/runtime-events.ndjson`:
|
||||
|
||||
1. `2026-02-24T00:55:28.632Z` `session.started`
|
||||
2. `2026-02-24T00:55:48.705Z` `node.attempt.completed` for `write-node` with `status=success`
|
||||
3. `2026-02-24T00:55:48.706Z` `domain.validation_passed` for `write-node`
|
||||
4. `2026-02-24T00:55:48.706Z` `domain.tasks_planned` for `write-node`
|
||||
5. `2026-02-24T00:56:14.237Z` `node.attempt.completed` for `copy-node` with `status=success`
|
||||
6. `2026-02-24T00:56:14.238Z` `domain.validation_passed` for `copy-node`
|
||||
7. `2026-02-24T00:56:14.242Z` `session.completed` with `status=success`
|
||||
|
||||
## 3) How artifacts map to runtime behavior
|
||||
|
||||
### Run metadata (UI-level)
|
||||
|
||||
`state/<session>/ui-run-meta.json` stores run summary fields:
|
||||
|
||||
- run/provider/mode
|
||||
- status (`running`, `success`, `failure`, `cancelled`)
|
||||
- start/end timestamps
|
||||
|
||||
For this run:
|
||||
|
||||
```json
|
||||
{
|
||||
"sessionId": "ui-session-mlzw94bv-cb753677",
|
||||
"status": "success",
|
||||
"executionMode": "provider",
|
||||
"provider": "claude"
|
||||
}
|
||||
```
|
||||
|
||||
### Handoffs (node input payloads)
|
||||
|
||||
`state/<session>/handoffs/*.json` stores payload handoffs per node.
|
||||
|
||||
`write-node.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"nodeId": "write-node",
|
||||
"payload": { "prompt": "be yourself" }
|
||||
}
|
||||
```
|
||||
|
||||
`copy-node.json` includes `fromNodeId: "write-node"` and carries the story generated by the writer node.
|
||||
|
||||
Important: this is the payload pipeline edge transfer. If a downstream node output looks strange, inspect this file first.
|
||||
|
||||
### Session state (flags + metadata + history)
|
||||
|
||||
`state/<session>/state.json` is cumulative session state:
|
||||
|
||||
- `flags`: merged boolean flags from node results
|
||||
- `metadata`: merged metadata from node results/behavior patches
|
||||
- `history`: domain-event history entries
|
||||
|
||||
For this run, state includes:
|
||||
|
||||
- flags: `story_written=true`, `copy_edited=true`
|
||||
- history events:
|
||||
- `write-node: validation_passed`
|
||||
- `write-node: tasks_planned`
|
||||
- `copy-node: validation_passed`
|
||||
|
||||
### Project context pointer
|
||||
|
||||
`.ai_ops/project-context.json` tracks cross-session pointers like:
|
||||
|
||||
- `sessions/<session>/last_completed_node`
|
||||
- `sessions/<session>/last_attempt`
|
||||
- `sessions/<session>/final_state`
|
||||
|
||||
This lets operators and tooling locate the final state file for any completed session.
|
||||
|
||||
## 4) Code path (from button click to persisted state)
|
||||
|
||||
1. UI starts run via `UiRunService.startRun(...)`.
|
||||
2. Service loads config, parses manifest, creates engine, writes initial run meta.
|
||||
3. Engine `runSession(...)` initializes state and writes entry handoff.
|
||||
4. Pipeline executes ready nodes:
|
||||
- builds fresh node context (`handoff + state`)
|
||||
- renders persona system prompt
|
||||
- invokes provider executor
|
||||
- receives actor result
|
||||
5. Lifecycle observer persists:
|
||||
- state flags/metadata/history
|
||||
- runtime events (`node.attempt.completed`, `domain.*`)
|
||||
- project context pointers (`last_completed_node`, `last_attempt`)
|
||||
6. Pipeline evaluates edges and writes downstream handoffs.
|
||||
7. Pipeline computes aggregate status and emits `session.completed`.
|
||||
8. UI run service writes final `ui-run-meta.json` status from pipeline summary.
|
||||
|
||||
Primary entrypoints:
|
||||
|
||||
- `src/ui/run-service.ts`
|
||||
- `src/agents/orchestration.ts`
|
||||
- `src/agents/pipeline.ts`
|
||||
- `src/agents/lifecycle-observer.ts`
|
||||
- `src/agents/state-context.ts`
|
||||
- `src/ui/provider-executor.ts`
|
||||
|
||||
## 5) Mental model that keeps this manageable
|
||||
|
||||
Think of one session as five stores and one loop:
|
||||
|
||||
1. Manifest (static plan): node graph + routing rules.
|
||||
2. Handoffs (per-node input payload snapshots).
|
||||
3. State (session memory): flags + metadata + domain history.
|
||||
4. Runtime events (timeline/audit side channel).
|
||||
5. Project context (cross-session pointers and shared context).
|
||||
6. Loop: dequeue ready node -> execute -> persist result/events -> enqueue next nodes.
|
||||
|
||||
If you track those six things, behavior becomes deterministic and explainable.
|
||||
|
||||
## 6) Debug checklist for any future session id
|
||||
|
||||
Given `<sid>`, inspect in this order:
|
||||
|
||||
1. `state/<sid>/ui-run-meta.json`
|
||||
2. `.ai_ops/events/runtime-events.ndjson` filtered by `<sid>`
|
||||
3. `state/<sid>/handoffs/*.json`
|
||||
4. `state/<sid>/state.json`
|
||||
5. `.ai_ops/project-context.json` pointer entries for `<sid>`
|
||||
|
||||
Interpretation:
|
||||
|
||||
1. No `session.started`: run failed before pipeline began.
|
||||
2. `node.attempt.completed` with `failureCode=provider_*`: provider/runtime issue.
|
||||
3. Missing downstream handoff file: edge condition did not pass.
|
||||
4. `history` has `validation_failed`: retry/unrolled path or remediation branch likely triggered.
|
||||
5. `ui-run-meta` disagrees with runtime events: check run-service status mapping and restart server on new code.
|
||||
262
human_only_TODO
262
human_only_TODO
@@ -10,21 +10,137 @@
|
||||
|
||||
# in progress
|
||||
|
||||
there is some major ui issue. there is app/provider logic wrapped up in the ui which i didnt know about or understand and it has gotten out of hand. we need to rip it out and clean it up. additionally the work trees are still not working as intended after like 5 attempts to fix it so that has got to be officially spaghetti at this point
|
||||
|
||||
here is the takeaway from the ui app logic issue
|
||||
|
||||
- Keep orchestration core in src/agents.
|
||||
- Move backend run/session/provider code out of src/ui into src/control-plane (or src/backend).
|
||||
- Keep src/ui as static/frontend + API client only.
|
||||
- Treat provider prompt shaping as an adapter concern (src/providers), not UI concern.
|
||||
|
||||
|
||||
test results
|
||||
session itself has a dir in worktrees that is a worktree
|
||||
then there is a base dir and a tasks dir
|
||||
base is also a worktree
|
||||
inside of base, there is ANOTHER WORKTREE
|
||||
inside of tasks is a product-intake??? directory
|
||||
code is being written in both product-intake and the worktree in the base/worktrees/d3e411... directory
|
||||
|
||||
i dont think that the product guy is writing any files
|
||||
fwiw, the dev agents are definitely making the app
|
||||
|
||||
log activity of claude code binary
|
||||
WHY IS IT STILL NOT LOGGING WHAT IS ACTUALLY HAPPENING
|
||||
it will not explain it, it just keeps adding different logs
|
||||
test run
|
||||
|
||||
they are writing files!
|
||||
|
||||
# problem 1 - logging
|
||||
logging is still fucking dog dick fuck ass shit
|
||||
|
||||
# problem 2 - worktree
|
||||
the worktree shit is fucking insanity
|
||||
they are getting confused because they see some of the orchestration infrastructure
|
||||
they legit need to be in a clean room and know nothing about the world outside of their project going forward
|
||||
|
||||
# problem 3 - task management/product context being passed in its entirety
|
||||
the dev agents for some reason have the entire fucking task list in their context
|
||||
|
||||
|
||||
|
||||
# Scheduled
|
||||
|
||||
|
||||
So yes, the UI growing into “its own project” increases risk because orchestration logic leaks into UI-layer
|
||||
services.
|
||||
|
||||
Best refactor target:
|
||||
|
||||
1. Make UI a thin transport layer (HTTP in/out, no resource ownership decisions).
|
||||
2. Move run/session orchestration into one app-service module with a strict interface.
|
||||
3. Enforce single-owner invariants in code (worktree owner = session lifecycle only).
|
||||
4. Add contract tests around ownership boundaries (like the regression we just added).
|
||||
|
||||
what even is src/examples ????
|
||||
|
||||
|
||||
clean up events/locks/ports (may not be needed with new session work?)
|
||||
|
||||
|
||||
|
||||
|
||||
ui is gargantuan - needs a full rewrite in a different dir or something holy
|
||||
|
||||
the ais arent actually writing to the directory
|
||||
the ui is fucking bad
|
||||
it kinda slow
|
||||
i think the banned command thing is kind of restrictive, idk if they will really be able to do anything
|
||||
codex isnt working?
|
||||
i dont even know if this runs on linux at all
|
||||
wtf is even happening in the backend i dont see any logs for it anywhere
|
||||
|
||||
# identify potential design conflict dag vs things that happen not on the dag?
|
||||
## linked to confusion around max env vars and what they do
|
||||
|
||||
- "This runtime enforces a DAG, so merge-conflict retry loops are multi-run (rerun the same managed session) rather than in-run graph cycles."
|
||||
- what are the implications of this? is this a stop gap until we do stricter control at the ends (task manager + git worktree cleanup)? or do we have a design inconsistency that we need to address?
|
||||
|
||||
For structured-dev-workflow specifically
|
||||
|
||||
- Happy path is depth 4 (product -> task -> dev -> qa -> merge).
|
||||
- QA-fail/rework path reaches depth ~6.
|
||||
- If merge-conflict resolver nodes are used, path can reach ~7.
|
||||
|
||||
# confusion around max env vars and what they do
|
||||
## linked to identify potential design conflict dag vs things that happen not on the dag?
|
||||
|
||||
PROBLEM
|
||||
.env has AGENT_MAX_RECURSIVE_DEPTH=3, but structured-dev-workflow requires depth 4 (product -> task -> dev -> qa -> merge)
|
||||
- does this mean that if qa sends a task back to dev that the depth is 5? or does it literally mean num of agents in a branch of the dag?
|
||||
- if it blocks, problem because it will just fail if we have to pass back
|
||||
- if it doesnt, problem bc this does nothing - we are either allowing them to loop infinitely creating themselves OR we are relying on enforcing that elsewhere and we need to consolidated
|
||||
|
||||
RESPONSE
|
||||
|
||||
• Short answer: your concern is valid, and today the two depth limits are conflated more than they should be.
|
||||
|
||||
1. Does QA -> Dev make depth 5?
|
||||
|
||||
- In this system, depth is not “number of unique agents.” It is path depth from the entry node (edge distance), plus recursive child depth when retries/fanout happen.
|
||||
- Back-edges like qa -> dev to the same upstream node are not allowed because pipeline must be a strict DAG (manifest.ts (/home/zman/projects/ai_ops/src/agents/manifest.ts)).
|
||||
- So “send back” is modeled as downstream rework nodes (qa -> dev-rework -> qa-rework -> merge), which does increase depth.
|
||||
|
||||
2. If it blocks, that’s bad; if not, does the limit do nothing?
|
||||
|
||||
- It does block today, and it also does still serve a purpose.
|
||||
- PipelineExecutor increments node depth by edge traversal (pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:748), pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:815)).
|
||||
- But that same depth is passed into AgentManager.runRecursive (pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:875)), and AgentManager enforces maxRecursiveDepth on that depth (manager.ts (/home/zman/projects/ai_ops/
|
||||
src/agents/manager.ts:343)).
|
||||
- So AGENT_MAX_RECURSIVE_DEPTH is currently acting as a second pipeline-depth cap, not just a recursion cap.
|
||||
|
||||
For structured-dev-workflow specifically
|
||||
|
||||
- Happy path is depth 4 (product -> task -> dev -> qa -> merge).
|
||||
- QA-fail/rework path reaches depth ~6.
|
||||
- If merge-conflict resolver nodes are used, path can reach ~7.
|
||||
- So with .env values like AGENT_MAX_RECURSIVE_DEPTH=3 and AGENT_TOPOLOGY_MAX_DEPTH=4, fallback branches will fail.
|
||||
|
||||
SOLUTION
|
||||
|
||||
2. Structural fix (better):
|
||||
|
||||
- Decouple meanings:
|
||||
- AGENT_TOPOLOGY_MAX_DEPTH should gate DAG traversal depth only.
|
||||
- AGENT_MAX_RECURSIVE_DEPTH should gate recursive fanout/retry depth only.
|
||||
- In practice: stop passing pipeline queue depth into manager recursive depth; start recursive runs at a local depth baseline per node.
|
||||
|
||||
3. Safety/clarity guard:
|
||||
|
||||
- Add a preflight check that computes max possible DAG depth and warns/errors if env depth limits are below it.
|
||||
|
||||
# other scheduled
|
||||
|
||||
- persona definitions
|
||||
@@ -556,3 +672,149 @@ Manifest Builder: A UI to visually build or edit the AgentManifest (Schema "1"),
|
||||
Security Policy Management: An interface mapped to src/security/schemas.ts. This allows admins to define AGENT_SECURITY_ALLOWED_BINARIES, toggle AGENT_SECURITY_VIOLATION_MODE (hard_abort vs validation_fail), and manage MCP tool allowlists/banlists.
|
||||
|
||||
Environment & Resource Limits: Simple forms to configure agent manager limits (AGENT_MAX_CONCURRENT) and port block sizing without manually editing the .env file.
|
||||
|
||||
|
||||
# Architecture Requirements: Session Isolation & Task-Scoped Worktrees
|
||||
|
||||
## Objective
|
||||
|
||||
Disentangle the `ai_ops` control plane from the target project data plane. Replace the implicit `process.cwd()` execution anchor with a formal Session lifecycle and dynamic, task-scoped Git worktrees. This ensures concurrent agents operate in isolated environments and prevents the runtime from mutating its own repository.
|
||||
|
||||
## 1. Domain Definitions
|
||||
|
||||
- **Target Project:** The absolute local path to the repository being operated on (e.g., `/home/user/target_repo`).
|
||||
|
||||
- **Session (The Clean Room):** A persistent orchestration context strictly bound to one Target Project. It maintains a "Base Workspace" (a localized Git checkout/branch) that represents the integrated, approved state of the current work period.
|
||||
|
||||
- **Task Worktree:** An ephemeral Git worktree branched from the Session's Base Workspace. It is scoped strictly to a `taskId`, enabling multi-agent handoffs (e.g., Coder $\rightarrow$ QA) within the same isolated environment before merging back to the Base Workspace.
|
||||
|
||||
|
||||
## 2. Core Data Model Updates
|
||||
|
||||
Introduce explicit types to track project binding and resource ownership.
|
||||
|
||||
- **API Payloads:**
|
||||
|
||||
TypeScript
|
||||
|
||||
```
|
||||
interface CreateSessionRequest {
|
||||
projectPath: string; // Absolute local path to target repo
|
||||
}
|
||||
```
|
||||
|
||||
- **Session State (`AGENT_STATE_ROOT`):**
|
||||
|
||||
TypeScript
|
||||
|
||||
```
|
||||
interface SessionMetadata {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionStatus: 'active' | 'suspended' | 'closed';
|
||||
baseWorkspacePath: string; // e.g., ${AGENT_WORKTREE_ROOT}/${sessionId}/base
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
```
|
||||
|
||||
- **Project Context (`src/agents/project-context.ts`):**
|
||||
|
||||
Update the `taskQueue` schema to act as the persistent ledger for worktree ownership.
|
||||
|
||||
TypeScript
|
||||
|
||||
```
|
||||
interface TaskRecord {
|
||||
taskId: string;
|
||||
status: 'pending' | 'in_progress' | 'review' | 'merged' | 'failed';
|
||||
worktreePath?: string; // e.g., ${AGENT_WORKTREE_ROOT}/${sessionId}/tasks/${taskId}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## 3. API & Control Plane (`src/ui/server.ts`)
|
||||
|
||||
Replace implicit session generation with an explicit lifecycle API.
|
||||
|
||||
- `POST /api/sessions`: Accepts `CreateSessionRequest`. Initializes the SessionMetadata and provisions the Base Workspace.
|
||||
|
||||
- `GET /api/sessions`: Returns existing sessions for resuming work across restarts.
|
||||
|
||||
- `POST /api/sessions/:id/run`: Triggers `SchemaDrivenExecutionEngine.runSession(...)`, passing the resolved `SessionMetadata`.
|
||||
|
||||
- `POST /api/sessions/:id/close`: Prunes all task worktrees, optionally merges the Base Workspace back to the original `projectPath`, and marks the session closed.
|
||||
|
||||
|
||||
## 4. Provisioning Layer (`src/agents/provisioning.ts`)
|
||||
|
||||
Remove all fallback logic relying on `process.cwd()`.
|
||||
|
||||
- **Session Initialization:** Clone or create a primary worktree of `projectPath` into `baseWorkspacePath`.
|
||||
|
||||
- **Task Provisioning:** When a task begins execution, check out a new branch from the Base Workspace and provision it at `worktreePath`.
|
||||
|
||||
- **Security & MCP Isolation:** `SecureCommandExecutor` and MCP handler configurations must dynamically anchor their working directories to the specific `worktreePath` injected into the execution context, preventing traversal outside the task scope.
|
||||
|
||||
|
||||
## 5. Orchestration & Routing (`src/agents/pipeline.ts`)
|
||||
|
||||
Implement the hybrid routing model: Domain Events for control flow, Project Context for resource lookup.
|
||||
|
||||
1. **The Signal (Domain Events):** When a Coder agent finishes, it emits a standard domain event (e.g., `task_ready_for_review` with the `taskId`). The pipeline routes this event to trigger the QA agent.
|
||||
|
||||
2. **The Map (Project Context):** Before initializing the QA agent's sandbox, the lifecycle observer/engine reads `project-context.ts` to look up the `worktreePath` associated with that `taskId`.
|
||||
|
||||
3. **The Execution:** The QA agent boots inside the exact same Task Worktree the Coder agent just vacated, preserving all uncommitted files and local state.
|
||||
|
||||
4. **The Merge:** Upon successful QA (e.g., `validation_passed`), the orchestration layer commits the Task Worktree, merges it into the Base Workspace, and deletes the Task Worktree.
|
||||
|
||||
|
||||
# turning merge conflicts into first-class orchestration events instead of hard exceptions.
|
||||
|
||||
1. Add new domain events:
|
||||
|
||||
- merge_conflict_detected
|
||||
- merge_conflict_resolved
|
||||
- merge_conflict_unresolved (after max attempts)
|
||||
- optionally merge_retry_started
|
||||
|
||||
2. Extend task state model with conflict-aware statuses:
|
||||
|
||||
- add conflict (and maybe resolving_conflict)
|
||||
|
||||
3. Change merge code path to return structured outcomes instead of throwing on conflict:
|
||||
|
||||
- success
|
||||
- conflict (with conflictFiles, mergeBase, taskId, worktreePath)
|
||||
- fatal_error
|
||||
- only throw for truly fatal cases (repo corruption, missing worktree, etc.)
|
||||
|
||||
4. On conflict, patch project context + emit event:
|
||||
|
||||
- set task to conflict
|
||||
- store conflict metadata in task.metadata
|
||||
- emit merge_conflict_detected
|
||||
|
||||
5. Route conflict events to dedicated resolver personas in the pipeline:
|
||||
|
||||
- Coder/QA conflict-resolver agent opens same worktreePath
|
||||
- resolves conflict markers, runs checks
|
||||
- emits merge_conflict_resolved
|
||||
|
||||
6. Retry merge after resolution event:
|
||||
|
||||
- integration node attempts merge again
|
||||
- if successful, emit branch_merged, mark merged, prune task worktree
|
||||
- if still conflicting, loop with bounded retries
|
||||
|
||||
7. Add retry guardrails:
|
||||
|
||||
- max conflict-resolution attempts per task
|
||||
- on exhaustion emit merge_conflict_unresolved and stop cleanly (not crash the whole session)
|
||||
|
||||
8. Apply same pattern to session close (base -> project) so close can become:
|
||||
|
||||
- conflict workflow or “closed_with_conflicts” state, rather than a hard failure.
|
||||
|
||||
This keeps the app stable and lets agents handle conflicts as part of normal orchestration.
|
||||
968
package-lock.json
generated
968
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,8 @@
|
||||
"test": "node --import tsx/esm --test tests/**/*.test.ts",
|
||||
"verify": "npm run check && npm run check:tests && npm run test && npm run build",
|
||||
"dev": "node --import tsx/esm src/index.ts",
|
||||
"ui": "node --import tsx/esm src/ui/server.ts",
|
||||
"ui:build": "npm --prefix ui run build",
|
||||
"ui": "npm run ui:build && node --import tsx/esm src/ui/server.ts",
|
||||
"codex": "node --import tsx/esm src/examples/codex.ts",
|
||||
"claude": "node --import tsx/esm src/examples/claude.ts",
|
||||
"start": "node dist/index.js"
|
||||
@@ -29,11 +30,16 @@
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.50",
|
||||
"@openai/codex-sdk": "^0.104.0",
|
||||
"cors": "^2.8.6",
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"lowdb": "^7.0.1",
|
||||
"sh-syntax": "^0.5.8",
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.19",
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/node": "^25.3.0",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3"
|
||||
|
||||
@@ -2,9 +2,14 @@ import { randomUUID } from "node:crypto";
|
||||
import type { JsonObject } from "./types.js";
|
||||
|
||||
export type PlanningDomainEventType = "requirements_defined" | "tasks_planned";
|
||||
export type ExecutionDomainEventType = "code_committed" | "task_blocked";
|
||||
export type ExecutionDomainEventType = "code_committed" | "task_blocked" | "task_ready_for_review";
|
||||
export type ValidationDomainEventType = "validation_passed" | "validation_failed";
|
||||
export type IntegrationDomainEventType = "branch_merged";
|
||||
export type IntegrationDomainEventType =
|
||||
| "branch_merged"
|
||||
| "merge_conflict_detected"
|
||||
| "merge_conflict_resolved"
|
||||
| "merge_conflict_unresolved"
|
||||
| "merge_retry_started";
|
||||
|
||||
export type DomainEventType =
|
||||
| PlanningDomainEventType
|
||||
@@ -46,9 +51,14 @@ const DOMAIN_EVENT_TYPES = new Set<DomainEventType>([
|
||||
"tasks_planned",
|
||||
"code_committed",
|
||||
"task_blocked",
|
||||
"task_ready_for_review",
|
||||
"validation_passed",
|
||||
"validation_failed",
|
||||
"branch_merged",
|
||||
"merge_conflict_detected",
|
||||
"merge_conflict_resolved",
|
||||
"merge_conflict_unresolved",
|
||||
"merge_retry_started",
|
||||
]);
|
||||
|
||||
export function isDomainEventType(value: string): value is DomainEventType {
|
||||
|
||||
@@ -50,10 +50,14 @@ function toNodeAttemptSeverity(status: ActorResultStatus): RuntimeEventSeverity
|
||||
}
|
||||
|
||||
function toDomainEventSeverity(type: DomainEventType): RuntimeEventSeverity {
|
||||
if (type === "task_blocked") {
|
||||
if (type === "task_blocked" || type === "merge_conflict_unresolved") {
|
||||
return "critical";
|
||||
}
|
||||
if (type === "validation_failed") {
|
||||
if (
|
||||
type === "validation_failed" ||
|
||||
type === "merge_conflict_detected" ||
|
||||
type === "merge_retry_started"
|
||||
) {
|
||||
return "warning";
|
||||
}
|
||||
return "info";
|
||||
|
||||
@@ -1,398 +1,113 @@
|
||||
import { isRecord } from "./types.js";
|
||||
import { z } from "zod";
|
||||
import { isDomainEventType, type DomainEventType } from "./domain-events.js";
|
||||
import {
|
||||
parseToolClearancePolicy,
|
||||
toolClearancePolicySchema,
|
||||
type ToolClearancePolicy as SecurityToolClearancePolicy,
|
||||
} from "../security/schemas.js";
|
||||
|
||||
export type ToolClearancePolicy = SecurityToolClearancePolicy;
|
||||
|
||||
export type ManifestPersona = {
|
||||
id: string;
|
||||
displayName: string;
|
||||
systemPromptTemplate: string;
|
||||
modelConstraint?: string;
|
||||
toolClearance: ToolClearancePolicy;
|
||||
};
|
||||
export const ManifestPersonaSchema = z.object({
|
||||
id: z.string().trim().min(1, 'Manifest field "id" must be a non-empty string.'),
|
||||
displayName: z.string().trim().min(1, 'Manifest field "displayName" must be a non-empty string.'),
|
||||
systemPromptTemplate: z.string().trim().min(1, 'Manifest field "systemPromptTemplate" must be a non-empty string.'),
|
||||
modelConstraint: z.string().trim().min(1, 'Manifest persona field "modelConstraint" must be a non-empty string when provided.').optional(),
|
||||
toolClearance: toolClearancePolicySchema,
|
||||
});
|
||||
export type ManifestPersona = z.infer<typeof ManifestPersonaSchema>;
|
||||
|
||||
export type RelationshipConstraint = {
|
||||
maxDepth?: number;
|
||||
maxChildren?: number;
|
||||
};
|
||||
export const RelationshipConstraintSchema = z.object({
|
||||
maxDepth: z.number().int().min(1, 'Manifest field "maxDepth" must be an integer >= 1.').optional(),
|
||||
maxChildren: z.number().int().min(1, 'Manifest field "maxChildren" must be an integer >= 1.').optional(),
|
||||
});
|
||||
export type RelationshipConstraint = z.infer<typeof RelationshipConstraintSchema>;
|
||||
|
||||
export type RelationshipEdge = {
|
||||
parentPersonaId: string;
|
||||
childPersonaId: string;
|
||||
constraints?: RelationshipConstraint;
|
||||
};
|
||||
export const RelationshipEdgeSchema = z.object({
|
||||
parentPersonaId: z.string().trim().min(1, 'Manifest field "parentPersonaId" must be a non-empty string.'),
|
||||
childPersonaId: z.string().trim().min(1, 'Manifest field "childPersonaId" must be a non-empty string.'),
|
||||
constraints: RelationshipConstraintSchema.optional(),
|
||||
});
|
||||
export type RelationshipEdge = z.infer<typeof RelationshipEdgeSchema>;
|
||||
|
||||
export type RouteCondition =
|
||||
| {
|
||||
type: "always";
|
||||
}
|
||||
| {
|
||||
type: "state_flag";
|
||||
key: string;
|
||||
equals: boolean;
|
||||
}
|
||||
| {
|
||||
type: "history_has_event";
|
||||
event: string;
|
||||
}
|
||||
| {
|
||||
type: "file_exists";
|
||||
path: string;
|
||||
};
|
||||
export const RouteConditionSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal("always") }),
|
||||
z.object({ type: z.literal("state_flag"), key: z.string().trim().min(1), equals: z.boolean() }),
|
||||
z.object({ type: z.literal("history_has_event"), event: z.string().trim().min(1) }),
|
||||
z.object({ type: z.literal("file_exists"), path: z.string().trim().min(1) }),
|
||||
]);
|
||||
export type RouteCondition = z.infer<typeof RouteConditionSchema>;
|
||||
|
||||
export type PipelineConstraint = {
|
||||
maxRetries?: number;
|
||||
};
|
||||
export const PipelineConstraintSchema = z.object({
|
||||
maxRetries: z.number().int().min(0, 'Manifest field "maxRetries" must be an integer >= 0.').optional(),
|
||||
});
|
||||
export type PipelineConstraint = z.infer<typeof PipelineConstraintSchema>;
|
||||
|
||||
export type NodeTopologyKind = "sequential" | "parallel" | "hierarchical" | "retry-unrolled";
|
||||
export const NodeTopologyKindSchema = z.enum(["sequential", "parallel", "hierarchical", "retry-unrolled"]);
|
||||
export type NodeTopologyKind = z.infer<typeof NodeTopologyKindSchema>;
|
||||
|
||||
export type PipelineNodeTopology = {
|
||||
kind: NodeTopologyKind;
|
||||
blockId?: string;
|
||||
};
|
||||
export const PipelineNodeTopologySchema = z.object({
|
||||
kind: NodeTopologyKindSchema,
|
||||
blockId: z.string().trim().min(1, 'Pipeline node topology blockId must be a non-empty string when provided.').optional(),
|
||||
});
|
||||
export type PipelineNodeTopology = z.infer<typeof PipelineNodeTopologySchema>;
|
||||
|
||||
export type PipelineNode = {
|
||||
id: string;
|
||||
actorId: string;
|
||||
personaId: string;
|
||||
constraints?: PipelineConstraint;
|
||||
topology?: PipelineNodeTopology;
|
||||
};
|
||||
export const PipelineNodeSchema = z.object({
|
||||
id: z.string().trim().min(1),
|
||||
actorId: z.string().trim().min(1),
|
||||
personaId: z.string().trim().min(1),
|
||||
constraints: PipelineConstraintSchema.optional(),
|
||||
topology: PipelineNodeTopologySchema.optional(),
|
||||
});
|
||||
export type PipelineNode = z.infer<typeof PipelineNodeSchema>;
|
||||
|
||||
export type PipelineEdge = {
|
||||
from: string;
|
||||
to: string;
|
||||
on?: "success" | "validation_fail" | "failure" | "always";
|
||||
event?: DomainEventType;
|
||||
when?: RouteCondition[];
|
||||
};
|
||||
export const PipelineEdgeSchema = z.object({
|
||||
from: z.string().trim().min(1),
|
||||
to: z.string().trim().min(1),
|
||||
on: z.enum(["success", "validation_fail", "failure", "always"]).optional(),
|
||||
event: z.string().refine((val): val is DomainEventType => isDomainEventType(val), {
|
||||
message: "Pipeline edge field 'event' has unsupported domain event.",
|
||||
}).optional(),
|
||||
when: z.array(RouteConditionSchema).optional(),
|
||||
}).refine((data) => {
|
||||
if (!data.on && !data.event) return false;
|
||||
if (data.on && data.event) return false;
|
||||
return true;
|
||||
}, {
|
||||
message: 'Pipeline edge must provide either an "on" trigger or an "event" trigger, but not both.',
|
||||
});
|
||||
export type PipelineEdge = z.infer<typeof PipelineEdgeSchema>;
|
||||
|
||||
export type PipelineGraph = {
|
||||
entryNodeId: string;
|
||||
nodes: PipelineNode[];
|
||||
edges: PipelineEdge[];
|
||||
};
|
||||
export const PipelineGraphSchema = z.object({
|
||||
entryNodeId: z.string().trim().min(1),
|
||||
nodes: z.array(PipelineNodeSchema).min(1, "Manifest pipeline.nodes must be a non-empty array."),
|
||||
edges: z.array(PipelineEdgeSchema),
|
||||
});
|
||||
export type PipelineGraph = z.infer<typeof PipelineGraphSchema>;
|
||||
|
||||
export type TopologyKind = "hierarchical" | "parallel" | "retry-unrolled" | "sequential";
|
||||
export const TopologyKindSchema = z.enum(["hierarchical", "parallel", "retry-unrolled", "sequential"]);
|
||||
export type TopologyKind = z.infer<typeof TopologyKindSchema>;
|
||||
|
||||
export type TopologyConstraint = {
|
||||
maxDepth: number;
|
||||
maxRetries: number;
|
||||
};
|
||||
export const TopologyConstraintSchema = z.object({
|
||||
maxDepth: z.number().int().min(1).default(4),
|
||||
maxRetries: z.number().int().min(0).default(2),
|
||||
});
|
||||
export type TopologyConstraint = z.infer<typeof TopologyConstraintSchema>;
|
||||
|
||||
export type AgentManifest = {
|
||||
schemaVersion: "1";
|
||||
topologies: TopologyKind[];
|
||||
personas: ManifestPersona[];
|
||||
relationships: RelationshipEdge[];
|
||||
pipeline: PipelineGraph;
|
||||
topologyConstraints: TopologyConstraint;
|
||||
};
|
||||
|
||||
function readString(record: Record<string, unknown>, key: string): string {
|
||||
const value = record[key];
|
||||
if (typeof value !== "string" || value.trim().length === 0) {
|
||||
throw new Error(`Manifest field \"${key}\" must be a non-empty string.`);
|
||||
}
|
||||
return value.trim();
|
||||
}
|
||||
|
||||
function readOptionalInteger(
|
||||
record: Record<string, unknown>,
|
||||
key: string,
|
||||
input: {
|
||||
min: number;
|
||||
},
|
||||
): number | undefined {
|
||||
const value = record[key];
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value !== "number" || !Number.isInteger(value) || value < input.min) {
|
||||
throw new Error(`Manifest field \"${key}\" must be an integer >= ${String(input.min)}.`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function readStringArray(record: Record<string, unknown>, key: string): string[] {
|
||||
const value = record[key];
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error(`Manifest field \"${key}\" must be an array.`);
|
||||
}
|
||||
|
||||
const output: string[] = [];
|
||||
for (const item of value) {
|
||||
if (typeof item !== "string" || item.trim().length === 0) {
|
||||
throw new Error(`Manifest field \"${key}\" contains an invalid string.`);
|
||||
}
|
||||
output.push(item.trim());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function parseToolClearance(value: unknown): ToolClearancePolicy {
|
||||
try {
|
||||
return parseToolClearancePolicy(value);
|
||||
} catch (error) {
|
||||
const detail = error instanceof Error ? error.message : String(error);
|
||||
throw new Error(`Manifest persona toolClearance is invalid: ${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
function parsePersona(value: unknown): ManifestPersona {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Manifest persona entry must be an object.");
|
||||
}
|
||||
|
||||
const modelConstraintRaw = value.modelConstraint;
|
||||
if (
|
||||
modelConstraintRaw !== undefined &&
|
||||
(typeof modelConstraintRaw !== "string" || modelConstraintRaw.trim().length === 0)
|
||||
) {
|
||||
throw new Error('Manifest persona field "modelConstraint" must be a non-empty string when provided.');
|
||||
}
|
||||
|
||||
return {
|
||||
id: readString(value, "id"),
|
||||
displayName: readString(value, "displayName"),
|
||||
systemPromptTemplate: readString(value, "systemPromptTemplate"),
|
||||
...(typeof modelConstraintRaw === "string"
|
||||
? { modelConstraint: modelConstraintRaw.trim() }
|
||||
: {}),
|
||||
toolClearance: parseToolClearance(value.toolClearance),
|
||||
};
|
||||
}
|
||||
|
||||
function parseRelationship(value: unknown): RelationshipEdge {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Manifest relationship entry must be an object.");
|
||||
}
|
||||
|
||||
const constraints = isRecord(value.constraints)
|
||||
? {
|
||||
maxDepth: readOptionalInteger(value.constraints, "maxDepth", { min: 1 }),
|
||||
maxChildren: readOptionalInteger(value.constraints, "maxChildren", { min: 1 }),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
parentPersonaId: readString(value, "parentPersonaId"),
|
||||
childPersonaId: readString(value, "childPersonaId"),
|
||||
constraints,
|
||||
};
|
||||
}
|
||||
|
||||
function parseCondition(value: unknown): RouteCondition {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Route condition must be an object.");
|
||||
}
|
||||
|
||||
const type = readString(value, "type");
|
||||
if (type === "always") {
|
||||
return { type };
|
||||
}
|
||||
|
||||
if (type === "state_flag") {
|
||||
const key = readString(value, "key");
|
||||
const equals = value.equals;
|
||||
if (typeof equals !== "boolean") {
|
||||
throw new Error('Route condition field "equals" must be a boolean.');
|
||||
}
|
||||
return {
|
||||
type,
|
||||
key,
|
||||
equals,
|
||||
};
|
||||
}
|
||||
|
||||
if (type === "history_has_event") {
|
||||
return {
|
||||
type,
|
||||
event: readString(value, "event"),
|
||||
};
|
||||
}
|
||||
|
||||
if (type === "file_exists") {
|
||||
return {
|
||||
type,
|
||||
path: readString(value, "path"),
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported route condition type \"${type}\".`);
|
||||
}
|
||||
|
||||
function parsePipelineNode(value: unknown): PipelineNode {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Pipeline node must be an object.");
|
||||
}
|
||||
|
||||
const topology = value.topology;
|
||||
let parsedTopology: PipelineNodeTopology | undefined;
|
||||
if (topology !== undefined) {
|
||||
if (!isRecord(topology)) {
|
||||
throw new Error("Pipeline node topology must be an object when provided.");
|
||||
}
|
||||
|
||||
const kind = readString(topology, "kind");
|
||||
if (
|
||||
kind !== "sequential" &&
|
||||
kind !== "parallel" &&
|
||||
kind !== "hierarchical" &&
|
||||
kind !== "retry-unrolled"
|
||||
) {
|
||||
throw new Error(`Pipeline node topology kind "${kind}" is not supported.`);
|
||||
}
|
||||
|
||||
const blockIdRaw = topology.blockId;
|
||||
if (blockIdRaw !== undefined && (typeof blockIdRaw !== "string" || blockIdRaw.trim().length === 0)) {
|
||||
throw new Error("Pipeline node topology blockId must be a non-empty string when provided.");
|
||||
}
|
||||
|
||||
parsedTopology = {
|
||||
kind,
|
||||
...(typeof blockIdRaw === "string" ? { blockId: blockIdRaw.trim() } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
const constraints = isRecord(value.constraints)
|
||||
? {
|
||||
maxRetries: readOptionalInteger(value.constraints, "maxRetries", { min: 0 }),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
id: readString(value, "id"),
|
||||
actorId: readString(value, "actorId"),
|
||||
personaId: readString(value, "personaId"),
|
||||
constraints,
|
||||
...(parsedTopology ? { topology: parsedTopology } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function parsePipelineEdge(value: unknown): PipelineEdge {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Pipeline edge must be an object.");
|
||||
}
|
||||
|
||||
const validEvents: NonNullable<PipelineEdge["on"]>[] = [
|
||||
"success",
|
||||
"validation_fail",
|
||||
"failure",
|
||||
"always",
|
||||
];
|
||||
|
||||
const rawOn = value.on;
|
||||
let on: PipelineEdge["on"];
|
||||
if (rawOn !== undefined) {
|
||||
if (typeof rawOn !== "string" || !validEvents.includes(rawOn as NonNullable<PipelineEdge["on"]>)) {
|
||||
throw new Error(`Pipeline edge field "on" has unsupported event "${String(rawOn)}".`);
|
||||
}
|
||||
on = rawOn as NonNullable<PipelineEdge["on"]>;
|
||||
}
|
||||
|
||||
const rawDomainEvent = value.event;
|
||||
let event: DomainEventType | undefined;
|
||||
if (rawDomainEvent !== undefined) {
|
||||
if (typeof rawDomainEvent !== "string" || !isDomainEventType(rawDomainEvent)) {
|
||||
throw new Error(`Pipeline edge field "event" has unsupported domain event "${String(rawDomainEvent)}".`);
|
||||
}
|
||||
event = rawDomainEvent;
|
||||
}
|
||||
|
||||
if (!on && !event) {
|
||||
throw new Error('Pipeline edge must provide either an "on" trigger or an "event" trigger.');
|
||||
}
|
||||
if (on && event) {
|
||||
throw new Error('Pipeline edge cannot define both "on" and "event" triggers simultaneously.');
|
||||
}
|
||||
|
||||
const rawWhen = value.when;
|
||||
const when: RouteCondition[] = [];
|
||||
if (rawWhen !== undefined) {
|
||||
if (!Array.isArray(rawWhen)) {
|
||||
throw new Error('Pipeline edge field "when" must be an array when provided.');
|
||||
}
|
||||
for (const condition of rawWhen) {
|
||||
when.push(parseCondition(condition));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
from: readString(value, "from"),
|
||||
to: readString(value, "to"),
|
||||
...(on ? { on } : {}),
|
||||
...(event ? { event } : {}),
|
||||
...(when.length > 0 ? { when } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function parsePipeline(value: unknown): PipelineGraph {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Manifest pipeline must be an object.");
|
||||
}
|
||||
|
||||
const nodesValue = value.nodes;
|
||||
if (!Array.isArray(nodesValue) || nodesValue.length === 0) {
|
||||
throw new Error("Manifest pipeline.nodes must be a non-empty array.");
|
||||
}
|
||||
|
||||
const edgesValue = value.edges;
|
||||
if (!Array.isArray(edgesValue)) {
|
||||
throw new Error("Manifest pipeline.edges must be an array.");
|
||||
}
|
||||
|
||||
const nodes = nodesValue.map(parsePipelineNode);
|
||||
const edges = edgesValue.map(parsePipelineEdge);
|
||||
|
||||
return {
|
||||
entryNodeId: readString(value, "entryNodeId"),
|
||||
nodes,
|
||||
edges,
|
||||
};
|
||||
}
|
||||
|
||||
function parseTopologies(value: unknown): TopologyKind[] {
|
||||
if (!Array.isArray(value) || value.length === 0) {
|
||||
throw new Error("Manifest topologies must be a non-empty array.");
|
||||
}
|
||||
|
||||
const valid = new Set<TopologyKind>(["hierarchical", "parallel", "retry-unrolled", "sequential"]);
|
||||
const result: TopologyKind[] = [];
|
||||
|
||||
for (const item of value) {
|
||||
if (typeof item !== "string" || !valid.has(item as TopologyKind)) {
|
||||
throw new Error("Manifest topologies contains an unsupported topology kind.");
|
||||
}
|
||||
result.push(item as TopologyKind);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function parseTopologyConstraints(value: unknown): TopologyConstraint {
|
||||
if (!isRecord(value)) {
|
||||
throw new Error("Manifest topologyConstraints must be an object.");
|
||||
}
|
||||
|
||||
const maxDepth = readOptionalInteger(value, "maxDepth", { min: 1 });
|
||||
const maxRetries = readOptionalInteger(value, "maxRetries", { min: 0 });
|
||||
|
||||
return {
|
||||
maxDepth: maxDepth ?? 4,
|
||||
maxRetries: maxRetries ?? 2,
|
||||
};
|
||||
}
|
||||
export const AgentManifestSchema = z.object({
|
||||
schemaVersion: z.literal("1"),
|
||||
topologies: z.array(TopologyKindSchema).min(1, "Manifest topologies must be a non-empty array."),
|
||||
personas: z.array(ManifestPersonaSchema).min(1, "Manifest personas must be a non-empty array."),
|
||||
relationships: z.array(RelationshipEdgeSchema),
|
||||
pipeline: PipelineGraphSchema,
|
||||
topologyConstraints: TopologyConstraintSchema,
|
||||
});
|
||||
export type AgentManifest = z.infer<typeof AgentManifestSchema>;
|
||||
|
||||
function assertNoDuplicates(items: string[], label: string): void {
|
||||
const seen = new Set<string>();
|
||||
for (const item of items) {
|
||||
if (seen.has(item)) {
|
||||
throw new Error(`${label} contains duplicate id \"${item}\".`);
|
||||
throw new Error(`${label} contains duplicate id "${item}".`);
|
||||
}
|
||||
seen.add(item);
|
||||
}
|
||||
@@ -409,20 +124,20 @@ function assertPipelineDag(pipeline: PipelineGraph): void {
|
||||
}
|
||||
|
||||
if (!nodeIds.has(pipeline.entryNodeId)) {
|
||||
throw new Error(`Pipeline entry node \"${pipeline.entryNodeId}\" is not defined.`);
|
||||
throw new Error(`Pipeline entry node "${pipeline.entryNodeId}" is not defined.`);
|
||||
}
|
||||
|
||||
for (const edge of pipeline.edges) {
|
||||
if (!nodeIds.has(edge.from)) {
|
||||
throw new Error(`Pipeline edge references unknown from node \"${edge.from}\".`);
|
||||
throw new Error(`Pipeline edge references unknown from node "${edge.from}".`);
|
||||
}
|
||||
if (!nodeIds.has(edge.to)) {
|
||||
throw new Error(`Pipeline edge references unknown to node \"${edge.to}\".`);
|
||||
throw new Error(`Pipeline edge references unknown to node "${edge.to}".`);
|
||||
}
|
||||
|
||||
const neighbors = adjacency.get(edge.from);
|
||||
if (!neighbors) {
|
||||
throw new Error(`Internal DAG error for node \"${edge.from}\".`);
|
||||
throw new Error(`Internal DAG error for node "${edge.from}".`);
|
||||
}
|
||||
neighbors.push(edge.to);
|
||||
const currentInDegree = indegree.get(edge.to);
|
||||
@@ -503,34 +218,16 @@ function assertRelationshipDag(relationships: RelationshipEdge[]): void {
|
||||
}
|
||||
|
||||
export function parseAgentManifest(input: unknown): AgentManifest {
|
||||
if (!isRecord(input)) {
|
||||
throw new Error("AgentManifest must be an object.");
|
||||
let manifest: AgentManifest;
|
||||
try {
|
||||
manifest = AgentManifestSchema.parse(input);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
throw new Error("Manifest invalid: " + error.issues.map((e: any) => e.message).join(", "));
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const schemaVersion = readString(input, "schemaVersion");
|
||||
if (schemaVersion !== "1") {
|
||||
throw new Error(`Unsupported AgentManifest schemaVersion \"${schemaVersion}\".`);
|
||||
}
|
||||
|
||||
const personasValue = input.personas;
|
||||
if (!Array.isArray(personasValue) || personasValue.length === 0) {
|
||||
throw new Error("Manifest personas must be a non-empty array.");
|
||||
}
|
||||
|
||||
const relationshipsValue = input.relationships;
|
||||
if (!Array.isArray(relationshipsValue)) {
|
||||
throw new Error("Manifest relationships must be an array.");
|
||||
}
|
||||
|
||||
const manifest: AgentManifest = {
|
||||
schemaVersion: "1",
|
||||
topologies: parseTopologies(input.topologies),
|
||||
personas: personasValue.map(parsePersona),
|
||||
relationships: relationshipsValue.map(parseRelationship),
|
||||
pipeline: parsePipeline(input.pipeline),
|
||||
topologyConstraints: parseTopologyConstraints(input.topologyConstraints),
|
||||
};
|
||||
|
||||
assertNoDuplicates(
|
||||
manifest.personas.map((persona) => persona.id),
|
||||
"Manifest personas",
|
||||
@@ -545,12 +242,12 @@ export function parseAgentManifest(input: unknown): AgentManifest {
|
||||
for (const relation of manifest.relationships) {
|
||||
if (!personaIds.has(relation.parentPersonaId)) {
|
||||
throw new Error(
|
||||
`Relationship references unknown parent persona \"${relation.parentPersonaId}\".`,
|
||||
`Relationship references unknown parent persona "${relation.parentPersonaId}".`,
|
||||
);
|
||||
}
|
||||
if (!personaIds.has(relation.childPersonaId)) {
|
||||
throw new Error(
|
||||
`Relationship references unknown child persona \"${relation.childPersonaId}\".`,
|
||||
`Relationship references unknown child persona "${relation.childPersonaId}".`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -559,7 +256,7 @@ export function parseAgentManifest(input: unknown): AgentManifest {
|
||||
|
||||
for (const node of manifest.pipeline.nodes) {
|
||||
if (!personaIds.has(node.personaId)) {
|
||||
throw new Error(`Pipeline node \"${node.id}\" references unknown persona \"${node.personaId}\".`);
|
||||
throw new Error(`Pipeline node "${node.id}" references unknown persona "${node.personaId}".`);
|
||||
}
|
||||
|
||||
if (node.topology && !manifest.topologies.includes(node.topology.kind as TopologyKind)) {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { resolve } from "node:path";
|
||||
import { getConfig, loadConfig, type AppConfig } from "../config.js";
|
||||
import { createDefaultMcpRegistry, loadMcpConfigFromEnv, McpRegistry } from "../mcp.js";
|
||||
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
|
||||
import type { DomainEventEmission } from "./domain-events.js";
|
||||
import { AgentManager } from "./manager.js";
|
||||
import {
|
||||
PersonaRegistry,
|
||||
@@ -13,12 +14,19 @@ import {
|
||||
type ActorExecutionSecurityContext,
|
||||
type ActorExecutor,
|
||||
type PipelineRunSummary,
|
||||
type TaskExecutionLifecycle,
|
||||
} from "./pipeline.js";
|
||||
import { FileSystemProjectContextStore } from "./project-context.js";
|
||||
import {
|
||||
FileSystemProjectContextStore,
|
||||
type ProjectTask,
|
||||
type ProjectTaskStatus,
|
||||
} from "./project-context.js";
|
||||
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
|
||||
import type { JsonObject } from "./types.js";
|
||||
import { SessionWorktreeManager, type SessionMetadata } from "./session-lifecycle.js";
|
||||
import {
|
||||
SecureCommandExecutor,
|
||||
type SecurityViolationHandling,
|
||||
type SecurityAuditEvent,
|
||||
type SecurityAuditSink,
|
||||
SecurityRulesEngine,
|
||||
@@ -38,7 +46,8 @@ export type OrchestrationSettings = {
|
||||
maxDepth: number;
|
||||
maxRetries: number;
|
||||
maxChildren: number;
|
||||
securityViolationHandling: "hard_abort" | "validation_fail";
|
||||
mergeConflictMaxAttempts: number;
|
||||
securityViolationHandling: SecurityViolationHandling;
|
||||
runtimeContext: Record<string, string | number | boolean>;
|
||||
};
|
||||
|
||||
@@ -56,6 +65,7 @@ export function loadOrchestrationSettingsFromEnv(
|
||||
maxDepth: config.orchestration.maxDepth,
|
||||
maxRetries: config.orchestration.maxRetries,
|
||||
maxChildren: config.orchestration.maxChildren,
|
||||
mergeConflictMaxAttempts: config.orchestration.mergeConflictMaxAttempts,
|
||||
securityViolationHandling: config.security.violationHandling,
|
||||
};
|
||||
}
|
||||
@@ -181,6 +191,9 @@ function createActorSecurityContext(input: {
|
||||
type: `security.${event.type}`,
|
||||
severity: mapSecurityAuditSeverity(event),
|
||||
message: toSecurityAuditMessage(event),
|
||||
...(event.sessionId ? { sessionId: event.sessionId } : {}),
|
||||
...(event.nodeId ? { nodeId: event.nodeId } : {}),
|
||||
...(typeof event.attempt === "number" ? { attempt: event.attempt } : {}),
|
||||
metadata: toSecurityAuditMetadata(event),
|
||||
});
|
||||
};
|
||||
@@ -199,6 +212,9 @@ function createActorSecurityContext(input: {
|
||||
blockedEnvAssignments: ["AGENT_STATE_ROOT", "AGENT_PROJECT_CONTEXT_PATH"],
|
||||
},
|
||||
auditSink,
|
||||
{
|
||||
violationHandling: input.settings.securityViolationHandling,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
@@ -221,6 +237,57 @@ function createActorSecurityContext(input: {
|
||||
};
|
||||
}
|
||||
|
||||
function resolveSessionProjectContextPath(stateRoot: string, sessionId: string): string {
|
||||
return resolve(stateRoot, sessionId, "project-context.json");
|
||||
}
|
||||
|
||||
function readTaskIdFromPayload(payload: JsonObject, fallback: string): string {
|
||||
const candidates = [payload.taskId, payload.task_id, payload.task];
|
||||
for (const candidate of candidates) {
|
||||
if (typeof candidate === "string" && candidate.trim().length > 0) {
|
||||
return candidate.trim();
|
||||
}
|
||||
}
|
||||
return fallback;
|
||||
}
|
||||
|
||||
function toTaskStatusForFailure(
|
||||
resultStatus: "validation_fail" | "failure",
|
||||
statusAtStart: string,
|
||||
): ProjectTaskStatus {
|
||||
if (resultStatus === "failure") {
|
||||
return "failed";
|
||||
}
|
||||
if (statusAtStart === "conflict" || statusAtStart === "resolving_conflict") {
|
||||
return "conflict";
|
||||
}
|
||||
return "in_progress";
|
||||
}
|
||||
|
||||
function shouldMergeFromStatus(statusAtStart: string): boolean {
|
||||
return statusAtStart === "review" || statusAtStart === "resolving_conflict";
|
||||
}
|
||||
|
||||
function toTaskIdLabel(task: ProjectTask): string {
|
||||
return task.taskId || task.id || "task";
|
||||
}
|
||||
|
||||
function toJsonObject(value: unknown): JsonObject | undefined {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
return undefined;
|
||||
}
|
||||
return value as JsonObject;
|
||||
}
|
||||
|
||||
function readMergeConflictAttempts(metadata: JsonObject | undefined): number {
|
||||
const record = toJsonObject(metadata?.mergeConflict);
|
||||
const attempts = record?.attempts;
|
||||
if (typeof attempts === "number" && Number.isInteger(attempts) && attempts >= 0) {
|
||||
return attempts;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
export class SchemaDrivenExecutionEngine {
|
||||
private readonly manifest: AgentManifest;
|
||||
private readonly personaRegistry = new PersonaRegistry();
|
||||
@@ -234,6 +301,7 @@ export class SchemaDrivenExecutionEngine {
|
||||
private readonly mcpRegistry: McpRegistry;
|
||||
private readonly runtimeEventPublisher: RuntimeEventPublisher;
|
||||
private readonly securityContext: ActorExecutionSecurityContext;
|
||||
private readonly sessionWorktreeManager: SessionWorktreeManager;
|
||||
|
||||
constructor(input: {
|
||||
manifest: AgentManifest | unknown;
|
||||
@@ -260,6 +328,8 @@ export class SchemaDrivenExecutionEngine {
|
||||
maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
|
||||
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
|
||||
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
|
||||
mergeConflictMaxAttempts:
|
||||
input.settings?.mergeConflictMaxAttempts ?? config.orchestration.mergeConflictMaxAttempts,
|
||||
securityViolationHandling:
|
||||
input.settings?.securityViolationHandling ?? config.security.violationHandling,
|
||||
runtimeContext: {
|
||||
@@ -273,6 +343,11 @@ export class SchemaDrivenExecutionEngine {
|
||||
this.projectContextStore = new FileSystemProjectContextStore({
|
||||
filePath: this.settings.projectContextPath,
|
||||
});
|
||||
this.sessionWorktreeManager = new SessionWorktreeManager({
|
||||
worktreeRoot: resolve(this.settings.workspaceRoot, this.config.provisioning.gitWorktree.rootDirectory),
|
||||
baseRef: this.config.provisioning.gitWorktree.baseRef,
|
||||
targetPath: this.config.provisioning.gitWorktree.targetPath,
|
||||
});
|
||||
|
||||
this.actorExecutors = toExecutorMap(input.actorExecutors);
|
||||
this.manager =
|
||||
@@ -352,9 +427,26 @@ export class SchemaDrivenExecutionEngine {
|
||||
initialPayload: JsonObject;
|
||||
initialState?: Partial<StoredSessionState>;
|
||||
signal?: AbortSignal;
|
||||
sessionMetadata?: SessionMetadata;
|
||||
}): Promise<PipelineRunSummary> {
|
||||
const managerSessionId = `${input.sessionId}__pipeline`;
|
||||
const managerSession = this.manager.createSession(managerSessionId);
|
||||
const workspaceRoot = input.sessionMetadata
|
||||
? this.sessionWorktreeManager.resolveWorkingDirectoryForWorktree(
|
||||
input.sessionMetadata.baseWorkspacePath,
|
||||
)
|
||||
: this.settings.workspaceRoot;
|
||||
const projectContextStore = input.sessionMetadata
|
||||
? new FileSystemProjectContextStore({
|
||||
filePath: resolveSessionProjectContextPath(this.settings.stateRoot, input.sessionId),
|
||||
})
|
||||
: this.projectContextStore;
|
||||
const taskLifecycle = input.sessionMetadata
|
||||
? this.createTaskExecutionLifecycle({
|
||||
session: input.sessionMetadata,
|
||||
projectContextStore,
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const executor = new PipelineExecutor(
|
||||
this.manifest,
|
||||
@@ -362,25 +454,26 @@ export class SchemaDrivenExecutionEngine {
|
||||
this.stateManager,
|
||||
this.actorExecutors,
|
||||
{
|
||||
workspaceRoot: this.settings.workspaceRoot,
|
||||
workspaceRoot,
|
||||
runtimeContext: this.settings.runtimeContext,
|
||||
defaultModelConstraint: this.config.provider.claudeModel,
|
||||
resolvedExecutionSecurityConstraints: {
|
||||
dropUid: this.config.security.dropUid !== undefined,
|
||||
dropGid: this.config.security.dropGid !== undefined,
|
||||
worktreePath: this.settings.workspaceRoot,
|
||||
worktreePath: workspaceRoot,
|
||||
violationMode: this.settings.securityViolationHandling,
|
||||
},
|
||||
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
|
||||
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
|
||||
manager: this.manager,
|
||||
managerSessionId,
|
||||
projectContextStore: this.projectContextStore,
|
||||
resolveMcpConfig: ({ providerHint, prompt, toolClearance }) =>
|
||||
projectContextStore,
|
||||
resolveMcpConfig: ({ providerHint, prompt, toolClearance, workingDirectory }) =>
|
||||
loadMcpConfigFromEnv(
|
||||
{
|
||||
providerHint,
|
||||
prompt,
|
||||
...(workingDirectory ? { workingDirectory } : {}),
|
||||
},
|
||||
{
|
||||
config: this.config,
|
||||
@@ -391,6 +484,7 @@ export class SchemaDrivenExecutionEngine {
|
||||
securityViolationHandling: this.settings.securityViolationHandling,
|
||||
securityContext: this.securityContext,
|
||||
runtimeEventPublisher: this.runtimeEventPublisher,
|
||||
...(taskLifecycle ? { taskLifecycle } : {}),
|
||||
},
|
||||
);
|
||||
try {
|
||||
@@ -405,6 +499,335 @@ export class SchemaDrivenExecutionEngine {
|
||||
}
|
||||
}
|
||||
|
||||
private createTaskExecutionLifecycle(input: {
|
||||
session: SessionMetadata;
|
||||
projectContextStore: FileSystemProjectContextStore;
|
||||
}): TaskExecutionLifecycle {
|
||||
return {
|
||||
prepareTaskExecution: async ({ node, context }) => {
|
||||
const taskId = readTaskIdFromPayload(context.handoff.payload, node.id);
|
||||
const projectContext = await input.projectContextStore.readState();
|
||||
const existing = projectContext.taskQueue.find(
|
||||
(task) => toTaskIdLabel(task) === taskId,
|
||||
);
|
||||
|
||||
const ensured = await this.sessionWorktreeManager.ensureTaskWorktree({
|
||||
sessionId: input.session.sessionId,
|
||||
taskId,
|
||||
baseWorkspacePath: input.session.baseWorkspacePath,
|
||||
...(existing?.worktreePath ? { existingWorktreePath: existing.worktreePath } : {}),
|
||||
});
|
||||
|
||||
const statusAtStart: ProjectTaskStatus =
|
||||
existing?.status === "review" ||
|
||||
existing?.status === "conflict" ||
|
||||
existing?.status === "resolving_conflict"
|
||||
? existing.status
|
||||
: "in_progress";
|
||||
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId,
|
||||
id: taskId,
|
||||
status: statusAtStart,
|
||||
worktreePath: ensured.taskWorktreePath,
|
||||
...(existing?.title ? { title: existing.title } : { title: taskId }),
|
||||
...(existing?.metadata ? { metadata: existing.metadata } : {}),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
return {
|
||||
taskId,
|
||||
workingDirectory: ensured.taskWorkingDirectory,
|
||||
worktreePath: ensured.taskWorktreePath,
|
||||
statusAtStart,
|
||||
...(existing?.metadata ? { metadata: existing.metadata } : {}),
|
||||
};
|
||||
},
|
||||
finalizeTaskExecution: async ({ task, result, domainEvents }) => {
|
||||
const emittedTypes = new Set(domainEvents.map((event) => event.type));
|
||||
const additionalEvents: DomainEventEmission[] = [];
|
||||
const emitEvent = (
|
||||
type: DomainEventEmission["type"],
|
||||
payload?: DomainEventEmission["payload"],
|
||||
): void => {
|
||||
if (emittedTypes.has(type)) {
|
||||
return;
|
||||
}
|
||||
emittedTypes.add(type);
|
||||
additionalEvents.push(payload ? { type, payload } : { type });
|
||||
};
|
||||
|
||||
if (result.status === "failure" || result.status === "validation_fail") {
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: toTaskStatusForFailure(result.status, task.statusAtStart),
|
||||
worktreePath: task.worktreePath,
|
||||
title: task.taskId,
|
||||
...(task.metadata ? { metadata: task.metadata } : {}),
|
||||
},
|
||||
],
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (task.statusAtStart === "conflict") {
|
||||
const attempts = readMergeConflictAttempts(task.metadata);
|
||||
const metadata: JsonObject = {
|
||||
...(task.metadata ?? {}),
|
||||
mergeConflict: {
|
||||
attempts,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
status: "resolved",
|
||||
resolvedAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: "resolving_conflict",
|
||||
worktreePath: task.worktreePath,
|
||||
title: task.taskId,
|
||||
metadata,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
emitEvent("merge_conflict_resolved", {
|
||||
summary: `Merge conflicts resolved for task "${task.taskId}".`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
attempts,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
additionalEvents,
|
||||
handoffPayloadPatch: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
mergeConflictStatus: "resolved",
|
||||
mergeConflictAttempts: attempts,
|
||||
} as JsonObject,
|
||||
};
|
||||
}
|
||||
|
||||
if (shouldMergeFromStatus(task.statusAtStart)) {
|
||||
const attemptsBeforeMerge = readMergeConflictAttempts(task.metadata);
|
||||
if (task.statusAtStart === "resolving_conflict") {
|
||||
emitEvent("merge_retry_started", {
|
||||
summary: `Retrying merge for task "${task.taskId}".`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
nextAttempt: attemptsBeforeMerge + 1,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const mergeOutcome = await this.sessionWorktreeManager.mergeTaskIntoBase({
|
||||
taskId: task.taskId,
|
||||
baseWorkspacePath: input.session.baseWorkspacePath,
|
||||
taskWorktreePath: task.worktreePath,
|
||||
});
|
||||
|
||||
if (mergeOutcome.kind === "success") {
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: "merged",
|
||||
title: task.taskId,
|
||||
metadata: {
|
||||
...(task.metadata ?? {}),
|
||||
mergeConflict: {
|
||||
attempts: attemptsBeforeMerge,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
status: "merged",
|
||||
mergedAt: new Date().toISOString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
emitEvent("branch_merged", {
|
||||
summary: `Task "${task.taskId}" merged into session base branch.`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
additionalEvents,
|
||||
handoffPayloadPatch: {
|
||||
taskId: task.taskId,
|
||||
mergeStatus: "merged",
|
||||
} as JsonObject,
|
||||
};
|
||||
}
|
||||
|
||||
if (mergeOutcome.kind === "conflict") {
|
||||
const attempts = attemptsBeforeMerge + 1;
|
||||
const exhausted = attempts >= this.settings.mergeConflictMaxAttempts;
|
||||
const metadata: JsonObject = {
|
||||
...(task.metadata ?? {}),
|
||||
mergeConflict: {
|
||||
attempts,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
status: exhausted ? "unresolved" : "conflict",
|
||||
conflictFiles: mergeOutcome.conflictFiles,
|
||||
worktreePath: mergeOutcome.worktreePath,
|
||||
detectedAt: new Date().toISOString(),
|
||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
||||
},
|
||||
};
|
||||
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: "conflict",
|
||||
worktreePath: task.worktreePath,
|
||||
title: task.taskId,
|
||||
metadata,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
emitEvent("merge_conflict_detected", {
|
||||
summary: `Merge conflict detected for task "${task.taskId}".`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: mergeOutcome.worktreePath,
|
||||
conflictFiles: mergeOutcome.conflictFiles,
|
||||
attempts,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (exhausted) {
|
||||
emitEvent("merge_conflict_unresolved", {
|
||||
summary:
|
||||
`Merge conflict attempts exhausted for task "${task.taskId}" ` +
|
||||
`(${String(attempts)}/${String(this.settings.mergeConflictMaxAttempts)}).`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: mergeOutcome.worktreePath,
|
||||
conflictFiles: mergeOutcome.conflictFiles,
|
||||
attempts,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
additionalEvents,
|
||||
handoffPayloadPatch: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
mergeConflictStatus: exhausted ? "unresolved" : "conflict",
|
||||
mergeConflictAttempts: attempts,
|
||||
mergeConflictMaxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
mergeConflictFiles: mergeOutcome.conflictFiles,
|
||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
||||
} as JsonObject,
|
||||
};
|
||||
}
|
||||
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: "failed",
|
||||
worktreePath: task.worktreePath,
|
||||
title: task.taskId,
|
||||
metadata: {
|
||||
...(task.metadata ?? {}),
|
||||
mergeConflict: {
|
||||
attempts: attemptsBeforeMerge,
|
||||
maxAttempts: this.settings.mergeConflictMaxAttempts,
|
||||
status: "fatal_error",
|
||||
error: mergeOutcome.error,
|
||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
emitEvent("merge_conflict_unresolved", {
|
||||
summary: `Fatal merge error for task "${task.taskId}".`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: mergeOutcome.worktreePath,
|
||||
error: mergeOutcome.error,
|
||||
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
|
||||
},
|
||||
});
|
||||
emitEvent("task_blocked", {
|
||||
summary: `Task "${task.taskId}" blocked due to fatal merge error.`,
|
||||
details: {
|
||||
taskId: task.taskId,
|
||||
error: mergeOutcome.error,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
additionalEvents,
|
||||
handoffPayloadPatch: {
|
||||
taskId: task.taskId,
|
||||
worktreePath: task.worktreePath,
|
||||
mergeStatus: "fatal_error",
|
||||
mergeError: mergeOutcome.error,
|
||||
} as JsonObject,
|
||||
};
|
||||
}
|
||||
|
||||
const nextMetadata = task.metadata
|
||||
? {
|
||||
...task.metadata,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
await input.projectContextStore.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: task.taskId,
|
||||
id: task.taskId,
|
||||
status: "review",
|
||||
worktreePath: task.worktreePath,
|
||||
title: task.taskId,
|
||||
...(nextMetadata ? { metadata: nextMetadata } : {}),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (additionalEvents.length > 0) {
|
||||
return {
|
||||
additionalEvents,
|
||||
};
|
||||
}
|
||||
|
||||
return;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private assertRelationshipConstraints(): void {
|
||||
for (const [parent, edges] of this.childrenByParent.entries()) {
|
||||
if (edges.length > this.settings.maxChildren) {
|
||||
|
||||
@@ -63,6 +63,7 @@ export type ActorExecutionResult = {
|
||||
export type ActorToolPermissionResult =
|
||||
| {
|
||||
behavior: "allow";
|
||||
updatedInput?: Record<string, unknown>;
|
||||
toolUseID?: string;
|
||||
}
|
||||
| {
|
||||
@@ -107,6 +108,8 @@ export type ResolvedExecutionContext = {
|
||||
|
||||
export type ActorExecutionInput = {
|
||||
sessionId: string;
|
||||
attempt: number;
|
||||
depth: number;
|
||||
node: PipelineNode;
|
||||
prompt: string;
|
||||
context: NodeExecutionContext;
|
||||
@@ -153,6 +156,7 @@ export type PipelineExecutorOptions = {
|
||||
securityViolationHandling?: SecurityViolationHandling;
|
||||
securityContext?: ActorExecutionSecurityContext;
|
||||
runtimeEventPublisher?: RuntimeEventPublisher;
|
||||
taskLifecycle?: TaskExecutionLifecycle;
|
||||
};
|
||||
|
||||
export type ActorExecutionSecurityContext = {
|
||||
@@ -166,6 +170,35 @@ export type ActorExecutionSecurityContext = {
|
||||
}) => SecureCommandExecutor;
|
||||
};
|
||||
|
||||
export type TaskExecutionResolution = {
|
||||
taskId: string;
|
||||
workingDirectory: string;
|
||||
worktreePath: string;
|
||||
statusAtStart: string;
|
||||
metadata?: JsonObject;
|
||||
};
|
||||
|
||||
export type TaskExecutionLifecycle = {
|
||||
prepareTaskExecution: (input: {
|
||||
sessionId: string;
|
||||
node: PipelineNode;
|
||||
context: NodeExecutionContext;
|
||||
}) => Promise<TaskExecutionResolution>;
|
||||
finalizeTaskExecution: (input: {
|
||||
sessionId: string;
|
||||
node: PipelineNode;
|
||||
task: TaskExecutionResolution;
|
||||
result: ActorExecutionResult;
|
||||
domainEvents: DomainEvent[];
|
||||
}) => Promise<
|
||||
| void
|
||||
| {
|
||||
additionalEvents?: DomainEventEmission[];
|
||||
handoffPayloadPatch?: JsonObject;
|
||||
}
|
||||
>;
|
||||
};
|
||||
|
||||
type QueueItem = {
|
||||
nodeId: string;
|
||||
depth: number;
|
||||
@@ -458,6 +491,38 @@ function toToolNameCandidates(toolName: string): string[] {
|
||||
return dedupeStrings(candidates);
|
||||
}
|
||||
|
||||
function buildCaseInsensitiveToolLookup(tools: readonly string[]): Map<string, string> {
|
||||
const lookup = new Map<string, string>();
|
||||
for (const tool of tools) {
|
||||
const normalized = tool.trim().toLowerCase();
|
||||
if (!normalized || lookup.has(normalized)) {
|
||||
continue;
|
||||
}
|
||||
lookup.set(normalized, tool);
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
function resolveAllowedToolMatch(input: {
|
||||
candidates: readonly string[];
|
||||
allowset: ReadonlySet<string>;
|
||||
caseInsensitiveLookup: ReadonlyMap<string, string>;
|
||||
}): string | undefined {
|
||||
const direct = input.candidates.find((candidate) => input.allowset.has(candidate));
|
||||
if (direct) {
|
||||
return direct;
|
||||
}
|
||||
|
||||
for (const candidate of input.candidates) {
|
||||
const match = input.caseInsensitiveLookup.get(candidate.toLowerCase());
|
||||
if (match) {
|
||||
return match;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload {
|
||||
if (status === "success") {
|
||||
return {
|
||||
@@ -580,9 +645,11 @@ export class PipelineExecutor {
|
||||
globalFlags: { ...projectContext.globalFlags },
|
||||
artifactPointers: { ...projectContext.artifactPointers },
|
||||
taskQueue: projectContext.taskQueue.map((task) => ({
|
||||
id: task.id,
|
||||
title: task.title,
|
||||
taskId: task.taskId,
|
||||
id: task.id ?? task.taskId,
|
||||
...(task.title ? { title: task.title } : {}),
|
||||
status: task.status,
|
||||
...(task.worktreePath ? { worktreePath: task.worktreePath } : {}),
|
||||
...(task.assignee ? { assignee: task.assignee } : {}),
|
||||
...(task.metadata ? { metadata: task.metadata } : {}),
|
||||
})),
|
||||
@@ -854,6 +921,13 @@ export class PipelineExecutor {
|
||||
})();
|
||||
|
||||
const context = await this.stateManager.buildFreshNodeContext(sessionId, node.id);
|
||||
const taskResolution = this.options.taskLifecycle
|
||||
? await this.options.taskLifecycle.prepareTaskExecution({
|
||||
sessionId,
|
||||
node,
|
||||
context,
|
||||
})
|
||||
: undefined;
|
||||
const prompt = this.personaRegistry.renderSystemPrompt({
|
||||
personaId: node.personaId,
|
||||
runtimeContext: {
|
||||
@@ -869,10 +943,13 @@ export class PipelineExecutor {
|
||||
node,
|
||||
toolClearance,
|
||||
prompt,
|
||||
worktreePathOverride: taskResolution?.workingDirectory,
|
||||
});
|
||||
|
||||
const result = await this.invokeActorExecutor({
|
||||
sessionId,
|
||||
attempt,
|
||||
depth: recursiveDepth,
|
||||
node,
|
||||
prompt,
|
||||
context,
|
||||
@@ -889,12 +966,50 @@ export class PipelineExecutor {
|
||||
customEvents: result.events,
|
||||
});
|
||||
const topologyKind: NodeTopologyKind = node.topology?.kind ?? "sequential";
|
||||
const payloadForNext = result.payload ?? context.handoff.payload;
|
||||
let payloadForNext: JsonObject = {
|
||||
...context.handoff.payload,
|
||||
...(result.payload ?? {}),
|
||||
...(taskResolution
|
||||
? {
|
||||
taskId: taskResolution.taskId,
|
||||
workingDirectory: taskResolution.workingDirectory,
|
||||
worktreePath: taskResolution.worktreePath,
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
const shouldRetry =
|
||||
result.status === "validation_fail" &&
|
||||
this.shouldRetryValidation(node) &&
|
||||
attempt <= maxRetriesForNode;
|
||||
|
||||
if (taskResolution && this.options.taskLifecycle) {
|
||||
const finalization = await this.options.taskLifecycle.finalizeTaskExecution({
|
||||
sessionId,
|
||||
node,
|
||||
task: taskResolution,
|
||||
result,
|
||||
domainEvents,
|
||||
});
|
||||
for (const eventEmission of finalization?.additionalEvents ?? []) {
|
||||
domainEvents.push(
|
||||
createDomainEvent({
|
||||
type: eventEmission.type,
|
||||
source: "pipeline",
|
||||
sessionId,
|
||||
nodeId: node.id,
|
||||
attempt,
|
||||
payload: eventEmission.payload,
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (finalization?.handoffPayloadPatch) {
|
||||
payloadForNext = {
|
||||
...payloadForNext,
|
||||
...finalization.handoffPayloadPatch,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
await this.lifecycleObserver.onNodeAttempt({
|
||||
sessionId,
|
||||
node,
|
||||
@@ -989,6 +1104,8 @@ export class PipelineExecutor {
|
||||
|
||||
private async invokeActorExecutor(input: {
|
||||
sessionId: string;
|
||||
attempt: number;
|
||||
depth: number;
|
||||
node: PipelineNode;
|
||||
prompt: string;
|
||||
context: NodeExecutionContext;
|
||||
@@ -1001,12 +1118,20 @@ export class PipelineExecutor {
|
||||
|
||||
return await input.executor({
|
||||
sessionId: input.sessionId,
|
||||
attempt: input.attempt,
|
||||
depth: input.depth,
|
||||
node: input.node,
|
||||
prompt: input.prompt,
|
||||
context: input.context,
|
||||
signal: input.signal,
|
||||
executionContext: input.executionContext,
|
||||
mcp: this.buildActorMcpContext(input.executionContext, input.prompt),
|
||||
mcp: this.buildActorMcpContext({
|
||||
sessionId: input.sessionId,
|
||||
nodeId: input.node.id,
|
||||
attempt: input.attempt,
|
||||
executionContext: input.executionContext,
|
||||
prompt: input.prompt,
|
||||
}),
|
||||
security: this.securityContext,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -1047,9 +1172,15 @@ export class PipelineExecutor {
|
||||
node: PipelineNode;
|
||||
toolClearance: ToolClearancePolicy;
|
||||
prompt: string;
|
||||
worktreePathOverride?: string;
|
||||
}): ResolvedExecutionContext {
|
||||
const normalizedToolClearance = parseToolClearancePolicy(input.toolClearance);
|
||||
const toolUniverse = this.resolveAvailableToolsForAttempt(normalizedToolClearance, input.prompt);
|
||||
const worktreePath = input.worktreePathOverride ?? this.options.resolvedExecutionSecurityConstraints.worktreePath;
|
||||
const toolUniverse = this.resolveAvailableToolsForAttempt({
|
||||
toolClearance: normalizedToolClearance,
|
||||
prompt: input.prompt,
|
||||
worktreePath,
|
||||
});
|
||||
const allowedTools = this.resolveAllowedToolsForAttempt({
|
||||
toolClearance: normalizedToolClearance,
|
||||
toolUniverse,
|
||||
@@ -1065,6 +1196,7 @@ export class PipelineExecutor {
|
||||
allowedTools,
|
||||
security: {
|
||||
...this.options.resolvedExecutionSecurityConstraints,
|
||||
worktreePath,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1087,15 +1219,20 @@ export class PipelineExecutor {
|
||||
return [];
|
||||
}
|
||||
|
||||
private resolveAvailableToolsForAttempt(toolClearance: ToolClearancePolicy, prompt: string): string[] {
|
||||
private resolveAvailableToolsForAttempt(input: {
|
||||
toolClearance: ToolClearancePolicy;
|
||||
prompt: string;
|
||||
worktreePath: string;
|
||||
}): string[] {
|
||||
if (!this.options.resolveMcpConfig) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const resolved = this.options.resolveMcpConfig({
|
||||
providerHint: "codex",
|
||||
prompt,
|
||||
toolClearance,
|
||||
prompt: input.prompt,
|
||||
workingDirectory: input.worktreePath,
|
||||
toolClearance: input.toolClearance,
|
||||
});
|
||||
|
||||
const rawServers = resolved.codexConfig?.mcp_servers;
|
||||
@@ -1115,10 +1252,14 @@ export class PipelineExecutor {
|
||||
return dedupeStrings(tools);
|
||||
}
|
||||
|
||||
private buildActorMcpContext(
|
||||
executionContext: ResolvedExecutionContext,
|
||||
prompt: string,
|
||||
): ActorExecutionMcpContext {
|
||||
private buildActorMcpContext(input: {
|
||||
sessionId: string;
|
||||
nodeId: string;
|
||||
attempt: number;
|
||||
executionContext: ResolvedExecutionContext;
|
||||
prompt: string;
|
||||
}): ActorExecutionMcpContext {
|
||||
const { executionContext, prompt } = input;
|
||||
const toolPolicy = toAllowedToolPolicy(executionContext.allowedTools);
|
||||
const filterToolsForProvider = (tools: string[]): string[] => {
|
||||
const deduped = dedupeStrings(tools);
|
||||
@@ -1129,6 +1270,7 @@ export class PipelineExecutor {
|
||||
? this.options.resolveMcpConfig({
|
||||
providerHint: "both",
|
||||
prompt,
|
||||
workingDirectory: executionContext.security.worktreePath,
|
||||
toolClearance: toolPolicy,
|
||||
})
|
||||
: {};
|
||||
@@ -1137,7 +1279,12 @@ export class PipelineExecutor {
|
||||
executionContext.allowedTools,
|
||||
);
|
||||
const resolveConfig = (context: McpLoadContext = {}): LoadedMcpConfig => {
|
||||
if (context.providerHint === "codex") {
|
||||
const withWorkingDirectory: McpLoadContext = {
|
||||
...context,
|
||||
...(context.workingDirectory ? {} : { workingDirectory: executionContext.security.worktreePath }),
|
||||
};
|
||||
|
||||
if (withWorkingDirectory.providerHint === "codex") {
|
||||
return {
|
||||
...(resolvedConfig.codexConfig ? { codexConfig: cloneMcpConfig(resolvedConfig).codexConfig } : {}),
|
||||
...(resolvedConfig.sourcePath ? { sourcePath: resolvedConfig.sourcePath } : {}),
|
||||
@@ -1147,7 +1294,7 @@ export class PipelineExecutor {
|
||||
};
|
||||
}
|
||||
|
||||
if (context.providerHint === "claude") {
|
||||
if (withWorkingDirectory.providerHint === "claude") {
|
||||
return {
|
||||
...(resolvedConfig.claudeMcpServers
|
||||
? { claudeMcpServers: cloneMcpConfig(resolvedConfig).claudeMcpServers }
|
||||
@@ -1163,7 +1310,13 @@ export class PipelineExecutor {
|
||||
};
|
||||
|
||||
const createToolPermissionHandler = (): ActorToolPermissionHandler =>
|
||||
this.createToolPermissionHandler(executionContext.allowedTools);
|
||||
this.createToolPermissionHandler({
|
||||
allowedTools: executionContext.allowedTools,
|
||||
violationMode: executionContext.security.violationMode,
|
||||
sessionId: input.sessionId,
|
||||
nodeId: input.nodeId,
|
||||
attempt: input.attempt,
|
||||
});
|
||||
|
||||
return {
|
||||
allowedTools: [...executionContext.allowedTools],
|
||||
@@ -1175,12 +1328,24 @@ export class PipelineExecutor {
|
||||
};
|
||||
}
|
||||
|
||||
private createToolPermissionHandler(allowedTools: readonly string[]): ActorToolPermissionHandler {
|
||||
const allowset = new Set(allowedTools);
|
||||
private createToolPermissionHandler(input: {
|
||||
allowedTools: readonly string[];
|
||||
violationMode: SecurityViolationHandling;
|
||||
sessionId: string;
|
||||
nodeId: string;
|
||||
attempt: number;
|
||||
}): ActorToolPermissionHandler {
|
||||
const allowset = new Set(input.allowedTools);
|
||||
const caseInsensitiveAllowLookup = buildCaseInsensitiveToolLookup(input.allowedTools);
|
||||
const rulesEngine = this.securityContext?.rulesEngine;
|
||||
const toolPolicy = toAllowedToolPolicy(allowedTools);
|
||||
const toolPolicy = toAllowedToolPolicy(input.allowedTools);
|
||||
const toolAuditContext = {
|
||||
sessionId: input.sessionId,
|
||||
nodeId: input.nodeId,
|
||||
attempt: input.attempt,
|
||||
};
|
||||
|
||||
return async (toolName, _input, options) => {
|
||||
return async (toolName, toolInput, options) => {
|
||||
const toolUseID = options.toolUseID;
|
||||
if (options.signal.aborted) {
|
||||
return {
|
||||
@@ -1192,12 +1357,34 @@ export class PipelineExecutor {
|
||||
}
|
||||
|
||||
const candidates = toToolNameCandidates(toolName);
|
||||
const allowMatch = candidates.find((candidate) => allowset.has(candidate));
|
||||
const allowMatch = resolveAllowedToolMatch({
|
||||
candidates,
|
||||
allowset,
|
||||
caseInsensitiveLookup: caseInsensitiveAllowLookup,
|
||||
});
|
||||
if (!allowMatch) {
|
||||
rulesEngine?.assertToolInvocationAllowed({
|
||||
tool: candidates[0] ?? toolName,
|
||||
toolClearance: toolPolicy,
|
||||
});
|
||||
if (rulesEngine) {
|
||||
try {
|
||||
rulesEngine.assertToolInvocationAllowed({
|
||||
tool: candidates[0] ?? toolName,
|
||||
toolClearance: toolPolicy,
|
||||
context: toolAuditContext,
|
||||
});
|
||||
} catch (error) {
|
||||
if (
|
||||
!(input.violationMode === "dangerous_warn_only" && error instanceof SecurityViolationError)
|
||||
) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (input.violationMode === "dangerous_warn_only") {
|
||||
return {
|
||||
behavior: "allow",
|
||||
updatedInput: toolInput,
|
||||
...(toolUseID ? { toolUseID } : {}),
|
||||
};
|
||||
}
|
||||
return {
|
||||
behavior: "deny",
|
||||
message: `Tool "${toolName}" is not in the resolved execution allowlist.`,
|
||||
@@ -1209,10 +1396,12 @@ export class PipelineExecutor {
|
||||
rulesEngine?.assertToolInvocationAllowed({
|
||||
tool: allowMatch,
|
||||
toolClearance: toolPolicy,
|
||||
context: toolAuditContext,
|
||||
});
|
||||
|
||||
return {
|
||||
behavior: "allow",
|
||||
updatedInput: toolInput,
|
||||
...(toolUseID ? { toolUseID } : {}),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -5,12 +5,23 @@ import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./type
|
||||
|
||||
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
|
||||
|
||||
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
|
||||
export type ProjectTaskStatus =
|
||||
| "pending"
|
||||
| "in_progress"
|
||||
| "review"
|
||||
| "conflict"
|
||||
| "resolving_conflict"
|
||||
| "merged"
|
||||
| "failed"
|
||||
| "blocked"
|
||||
| "done";
|
||||
|
||||
export type ProjectTask = {
|
||||
id: string;
|
||||
title: string;
|
||||
taskId: string;
|
||||
id?: string;
|
||||
title?: string;
|
||||
status: ProjectTaskStatus;
|
||||
worktreePath?: string;
|
||||
assignee?: string;
|
||||
metadata?: JsonObject;
|
||||
};
|
||||
@@ -52,7 +63,17 @@ function toJsonObject(value: unknown, label: string): JsonObject {
|
||||
}
|
||||
|
||||
function toTaskStatus(value: unknown, label: string): ProjectTaskStatus {
|
||||
if (value === "pending" || value === "in_progress" || value === "blocked" || value === "done") {
|
||||
if (
|
||||
value === "pending" ||
|
||||
value === "in_progress" ||
|
||||
value === "review" ||
|
||||
value === "conflict" ||
|
||||
value === "resolving_conflict" ||
|
||||
value === "merged" ||
|
||||
value === "failed" ||
|
||||
value === "blocked" ||
|
||||
value === "done"
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
throw new Error(`${label} has unsupported status "${String(value)}".`);
|
||||
@@ -68,10 +89,28 @@ function toProjectTask(value: unknown, label: string): ProjectTask {
|
||||
throw new Error(`${label}.assignee must be a non-empty string when provided.`);
|
||||
}
|
||||
|
||||
const taskIdCandidate = value.taskId ?? value.id;
|
||||
const taskId = assertNonEmptyString(taskIdCandidate, `${label}.taskId`);
|
||||
|
||||
const titleRaw = value.title;
|
||||
if (titleRaw !== undefined && (typeof titleRaw !== "string" || titleRaw.trim().length === 0)) {
|
||||
throw new Error(`${label}.title must be a non-empty string when provided.`);
|
||||
}
|
||||
|
||||
const worktreePathRaw = value.worktreePath;
|
||||
if (
|
||||
worktreePathRaw !== undefined &&
|
||||
(typeof worktreePathRaw !== "string" || worktreePathRaw.trim().length === 0)
|
||||
) {
|
||||
throw new Error(`${label}.worktreePath must be a non-empty string when provided.`);
|
||||
}
|
||||
|
||||
return {
|
||||
id: assertNonEmptyString(value.id, `${label}.id`),
|
||||
title: assertNonEmptyString(value.title, `${label}.title`),
|
||||
taskId,
|
||||
id: taskId,
|
||||
...(typeof titleRaw === "string" ? { title: titleRaw.trim() } : {}),
|
||||
status: toTaskStatus(value.status, `${label}.status`),
|
||||
...(typeof worktreePathRaw === "string" ? { worktreePath: worktreePathRaw.trim() } : {}),
|
||||
...(typeof assignee === "string" ? { assignee: assignee.trim() } : {}),
|
||||
...(value.metadata !== undefined
|
||||
? { metadata: toJsonObject(value.metadata, `${label}.metadata`) }
|
||||
@@ -157,10 +196,10 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
|
||||
|
||||
const byId = new Map<string, ProjectTask>();
|
||||
for (const task of current) {
|
||||
byId.set(task.id, task);
|
||||
byId.set(task.taskId, task);
|
||||
}
|
||||
for (const task of upserts) {
|
||||
byId.set(task.id, task);
|
||||
byId.set(task.taskId, task);
|
||||
}
|
||||
|
||||
return [...byId.values()];
|
||||
|
||||
@@ -9,14 +9,16 @@ import {
|
||||
import { isDomainEventType, type DomainEventEmission } from "../agents/domain-events.js";
|
||||
import type { ActorExecutionInput, ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
|
||||
import { isRecord, type JsonObject, type JsonValue } from "../agents/types.js";
|
||||
import { createSessionContext, type SessionContext } from "../examples/session-context.js";
|
||||
import { ClaudeObservabilityLogger } from "../ui/claude-observability.js";
|
||||
import { z } from "zod";
|
||||
|
||||
export type RunProvider = "codex" | "claude";
|
||||
|
||||
export type ProviderRunRuntime = {
|
||||
provider: RunProvider;
|
||||
config: Readonly<AppConfig>;
|
||||
sessionContext: SessionContext;
|
||||
sharedEnv: Record<string, string>;
|
||||
claudeObservability: ClaudeObservabilityLogger;
|
||||
close: () => Promise<void>;
|
||||
};
|
||||
|
||||
@@ -28,6 +30,16 @@ type ProviderUsage = {
|
||||
costUsd?: number;
|
||||
};
|
||||
|
||||
function sanitizeEnv(input: Record<string, string | undefined>): Record<string, string> {
|
||||
const output: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(input)) {
|
||||
if (typeof value === "string") {
|
||||
output[key] = value;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
const ACTOR_RESPONSE_SCHEMA = {
|
||||
type: "object",
|
||||
additionalProperties: true,
|
||||
@@ -72,6 +84,15 @@ const CLAUDE_OUTPUT_FORMAT = {
|
||||
schema: ACTOR_RESPONSE_SCHEMA,
|
||||
} as const;
|
||||
|
||||
const ActorResponseSchema = z.object({
|
||||
status: z.enum(["success", "validation_fail", "failure"]),
|
||||
payload: z.unknown().optional(),
|
||||
stateFlags: z.unknown().optional(),
|
||||
stateMetadata: z.unknown().optional(),
|
||||
events: z.unknown().optional(),
|
||||
failureKind: z.unknown().optional(),
|
||||
failureCode: z.unknown().optional(),
|
||||
});
|
||||
function toErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
@@ -79,6 +100,23 @@ function toErrorMessage(error: unknown): string {
|
||||
return String(error);
|
||||
}
|
||||
|
||||
export function resolveProviderWorkingDirectory(actorInput: ActorExecutionInput): string {
|
||||
return actorInput.executionContext.security.worktreePath;
|
||||
}
|
||||
|
||||
export function buildProviderRuntimeEnv(input: {
|
||||
runtime: ProviderRunRuntime;
|
||||
actorInput: ActorExecutionInput;
|
||||
includeClaudeAuth?: boolean;
|
||||
}): Record<string, string> {
|
||||
const workingDirectory = resolveProviderWorkingDirectory(input.actorInput);
|
||||
return sanitizeEnv({
|
||||
...input.runtime.sharedEnv,
|
||||
...(input.includeClaudeAuth ? buildClaudeAuthEnv(input.runtime.config.provider) : {}),
|
||||
AGENT_WORKTREE_PATH: workingDirectory,
|
||||
});
|
||||
}
|
||||
|
||||
function toJsonValue(value: unknown): JsonValue {
|
||||
return JSON.parse(JSON.stringify(value)) as JsonValue;
|
||||
}
|
||||
@@ -234,8 +272,8 @@ function ensureUsageMetadata(input: {
|
||||
result: ActorExecutionResult;
|
||||
providerUsage: ProviderUsage;
|
||||
}): ActorExecutionResult {
|
||||
const stateMetadata = toJsonObject(input.result.stateMetadata) ?? {};
|
||||
const existingUsage = toJsonObject(stateMetadata.usage) ?? {};
|
||||
const stateMetadata = (input.result.stateMetadata as JsonObject | undefined) ?? {};
|
||||
const existingUsage = (stateMetadata.usage as JsonObject | undefined) ?? {};
|
||||
|
||||
const usage: JsonObject = {
|
||||
...existingUsage,
|
||||
@@ -270,7 +308,9 @@ export function parseActorExecutionResultFromModelOutput(input: {
|
||||
structuredOutput?: unknown;
|
||||
}): ActorExecutionResult {
|
||||
const parsed = tryParseResponseObject(input.rawText, input.structuredOutput);
|
||||
if (!isRecord(parsed)) {
|
||||
|
||||
const result = ActorResponseSchema.safeParse(parsed);
|
||||
if (!result.success) {
|
||||
return {
|
||||
status: "success",
|
||||
payload: {
|
||||
@@ -279,31 +319,22 @@ export function parseActorExecutionResultFromModelOutput(input: {
|
||||
};
|
||||
}
|
||||
|
||||
const status = parsed.status;
|
||||
if (status !== "success" && status !== "validation_fail" && status !== "failure") {
|
||||
return {
|
||||
status: "success",
|
||||
payload: {
|
||||
assistantResponse: input.rawText.trim(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const payload = toJsonObject(parsed.payload) ?? {
|
||||
const { data } = result;
|
||||
const payload = toJsonObject(data.payload) ?? {
|
||||
assistantResponse: input.rawText.trim(),
|
||||
};
|
||||
const stateMetadata = toJsonObject(parsed.stateMetadata);
|
||||
const stateFlags = toBooleanRecord(parsed.stateFlags);
|
||||
const events = toEventEmissions(parsed.events);
|
||||
const failureKind = parsed.failureKind === "soft" || parsed.failureKind === "hard"
|
||||
? parsed.failureKind
|
||||
const stateMetadata = toJsonObject(data.stateMetadata);
|
||||
const stateFlags = toBooleanRecord(data.stateFlags);
|
||||
const events = toEventEmissions(data.events);
|
||||
const failureKind = data.failureKind === "soft" || data.failureKind === "hard"
|
||||
? data.failureKind
|
||||
: undefined;
|
||||
const failureCode = typeof parsed.failureCode === "string"
|
||||
? parsed.failureCode
|
||||
const failureCode = typeof data.failureCode === "string"
|
||||
? data.failureCode
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
status,
|
||||
status: data.status,
|
||||
payload,
|
||||
...(stateFlags ? { stateFlags } : {}),
|
||||
...(stateMetadata ? { stateMetadata } : {}),
|
||||
@@ -331,7 +362,7 @@ function buildActorPrompt(input: ActorExecutionInput): string {
|
||||
},
|
||||
events: [
|
||||
{
|
||||
type: "requirements_defined | tasks_planned | code_committed | task_blocked | validation_passed | validation_failed | branch_merged",
|
||||
type: "requirements_defined | tasks_planned | code_committed | task_ready_for_review | task_blocked | validation_passed | validation_failed | branch_merged | merge_conflict_detected | merge_conflict_resolved | merge_conflict_unresolved | merge_retry_started",
|
||||
payload: {
|
||||
summary: "optional",
|
||||
details: {},
|
||||
@@ -363,6 +394,7 @@ async function runCodexActor(input: {
|
||||
const prompt = buildActorPrompt(actorInput);
|
||||
const startedAt = Date.now();
|
||||
const apiKey = resolveOpenAiApiKey(runtime.config.provider);
|
||||
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
|
||||
|
||||
const codex = new Codex({
|
||||
...(apiKey ? { apiKey } : {}),
|
||||
@@ -372,28 +404,29 @@ async function runCodexActor(input: {
|
||||
...(actorInput.mcp.resolvedConfig.codexConfig
|
||||
? { config: actorInput.mcp.resolvedConfig.codexConfig }
|
||||
: {}),
|
||||
env: runtime.sessionContext.runtimeInjection.env,
|
||||
env: buildProviderRuntimeEnv({
|
||||
runtime,
|
||||
actorInput,
|
||||
}),
|
||||
});
|
||||
|
||||
const thread = codex.startThread({
|
||||
workingDirectory: runtime.sessionContext.runtimeInjection.workingDirectory,
|
||||
workingDirectory,
|
||||
skipGitRepoCheck: runtime.config.provider.codexSkipGitCheck,
|
||||
});
|
||||
|
||||
const turn = await runtime.sessionContext.runInSession(() =>
|
||||
thread.run(prompt, {
|
||||
signal: actorInput.signal,
|
||||
outputSchema: ACTOR_RESPONSE_SCHEMA,
|
||||
}),
|
||||
);
|
||||
const turn = await thread.run(prompt, {
|
||||
signal: actorInput.signal,
|
||||
outputSchema: ACTOR_RESPONSE_SCHEMA,
|
||||
});
|
||||
|
||||
const usage: ProviderUsage = {
|
||||
...(turn.usage
|
||||
? {
|
||||
tokenInput: turn.usage.input_tokens + turn.usage.cached_input_tokens,
|
||||
tokenOutput: turn.usage.output_tokens,
|
||||
tokenTotal: turn.usage.input_tokens + turn.usage.cached_input_tokens + turn.usage.output_tokens,
|
||||
}
|
||||
tokenInput: turn.usage.input_tokens + turn.usage.cached_input_tokens,
|
||||
tokenOutput: turn.usage.output_tokens,
|
||||
tokenTotal: turn.usage.input_tokens + turn.usage.cached_input_tokens + turn.usage.output_tokens,
|
||||
}
|
||||
: {}),
|
||||
durationMs: Date.now() - startedAt,
|
||||
};
|
||||
@@ -414,26 +447,63 @@ type ClaudeTurnResult = {
|
||||
usage: ProviderUsage;
|
||||
};
|
||||
|
||||
function toClaudeTraceContext(actorInput: ActorExecutionInput): {
|
||||
sessionId: string;
|
||||
nodeId: string;
|
||||
attempt: number;
|
||||
depth: number;
|
||||
} {
|
||||
return {
|
||||
sessionId: actorInput.sessionId,
|
||||
nodeId: actorInput.node.id,
|
||||
attempt: actorInput.attempt,
|
||||
depth: actorInput.depth,
|
||||
};
|
||||
}
|
||||
|
||||
function toProviderUsageJson(usage: ProviderUsage): JsonObject {
|
||||
const output: JsonObject = {};
|
||||
if (typeof usage.tokenInput === "number") {
|
||||
output.tokenInput = usage.tokenInput;
|
||||
}
|
||||
if (typeof usage.tokenOutput === "number") {
|
||||
output.tokenOutput = usage.tokenOutput;
|
||||
}
|
||||
if (typeof usage.tokenTotal === "number") {
|
||||
output.tokenTotal = usage.tokenTotal;
|
||||
}
|
||||
if (typeof usage.durationMs === "number") {
|
||||
output.durationMs = usage.durationMs;
|
||||
}
|
||||
if (typeof usage.costUsd === "number") {
|
||||
output.costUsd = usage.costUsd;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function buildClaudeOptions(input: {
|
||||
runtime: ProviderRunRuntime;
|
||||
actorInput: ActorExecutionInput;
|
||||
}): Options {
|
||||
const { runtime, actorInput } = input;
|
||||
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
|
||||
|
||||
const authOptionOverrides = runtime.config.provider.anthropicOauthToken
|
||||
? { authToken: runtime.config.provider.anthropicOauthToken }
|
||||
: (() => {
|
||||
const token = resolveAnthropicToken(runtime.config.provider);
|
||||
return token ? { apiKey: token } : {};
|
||||
})();
|
||||
const token = resolveAnthropicToken(runtime.config.provider);
|
||||
return token ? { apiKey: token } : {};
|
||||
})();
|
||||
|
||||
const runtimeEnv = {
|
||||
...runtime.sessionContext.runtimeInjection.env,
|
||||
...buildClaudeAuthEnv(runtime.config.provider),
|
||||
};
|
||||
const runtimeEnv = buildProviderRuntimeEnv({
|
||||
runtime,
|
||||
actorInput,
|
||||
includeClaudeAuth: true,
|
||||
});
|
||||
const traceContext = toClaudeTraceContext(actorInput);
|
||||
|
||||
return {
|
||||
maxTurns: 1,
|
||||
maxTurns: runtime.config.provider.claudeMaxTurns,
|
||||
...(runtime.config.provider.claudeModel
|
||||
? { model: runtime.config.provider.claudeModel }
|
||||
: {}),
|
||||
@@ -445,8 +515,11 @@ function buildClaudeOptions(input: {
|
||||
? { mcpServers: actorInput.mcp.resolvedConfig.claudeMcpServers as Options["mcpServers"] }
|
||||
: {}),
|
||||
canUseTool: actorInput.mcp.createClaudeCanUseTool(),
|
||||
cwd: runtime.sessionContext.runtimeInjection.workingDirectory,
|
||||
cwd: workingDirectory,
|
||||
env: runtimeEnv,
|
||||
...runtime.claudeObservability.toOptionOverrides({
|
||||
context: traceContext,
|
||||
}),
|
||||
outputFormat: CLAUDE_OUTPUT_FORMAT,
|
||||
};
|
||||
}
|
||||
@@ -456,10 +529,19 @@ async function runClaudeTurn(input: {
|
||||
actorInput: ActorExecutionInput;
|
||||
prompt: string;
|
||||
}): Promise<ClaudeTurnResult> {
|
||||
const traceContext = toClaudeTraceContext(input.actorInput);
|
||||
const options = buildClaudeOptions({
|
||||
runtime: input.runtime,
|
||||
actorInput: input.actorInput,
|
||||
});
|
||||
input.runtime.claudeObservability.recordQueryStarted({
|
||||
context: traceContext,
|
||||
data: {
|
||||
...(options.model ? { model: options.model } : {}),
|
||||
maxTurns: options.maxTurns ?? input.runtime.config.provider.claudeMaxTurns,
|
||||
...(typeof options.cwd === "string" ? { cwd: options.cwd } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
const startedAt = Date.now();
|
||||
const stream = query({
|
||||
@@ -470,6 +552,7 @@ async function runClaudeTurn(input: {
|
||||
let resultText = "";
|
||||
let structuredOutput: unknown;
|
||||
let usage: ProviderUsage = {};
|
||||
let messageCount = 0;
|
||||
|
||||
const onAbort = (): void => {
|
||||
stream.close();
|
||||
@@ -479,6 +562,12 @@ async function runClaudeTurn(input: {
|
||||
|
||||
try {
|
||||
for await (const message of stream as AsyncIterable<SDKMessage>) {
|
||||
messageCount += 1;
|
||||
input.runtime.claudeObservability.recordMessage({
|
||||
context: traceContext,
|
||||
message,
|
||||
});
|
||||
|
||||
if (message.type !== "result") {
|
||||
continue;
|
||||
}
|
||||
@@ -500,6 +589,12 @@ async function runClaudeTurn(input: {
|
||||
costUsd: message.total_cost_usd,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
input.runtime.claudeObservability.recordQueryError({
|
||||
context: traceContext,
|
||||
error,
|
||||
});
|
||||
throw error;
|
||||
} finally {
|
||||
input.actorInput.signal.removeEventListener("abort", onAbort);
|
||||
stream.close();
|
||||
@@ -510,9 +605,22 @@ async function runClaudeTurn(input: {
|
||||
}
|
||||
|
||||
if (!resultText) {
|
||||
throw new Error("Claude run completed without a final result.");
|
||||
const error = new Error("Claude run completed without a final result.");
|
||||
input.runtime.claudeObservability.recordQueryError({
|
||||
context: traceContext,
|
||||
error,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
input.runtime.claudeObservability.recordQueryCompleted({
|
||||
context: traceContext,
|
||||
data: {
|
||||
messageCount,
|
||||
usage: toProviderUsageJson(usage),
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
text: resultText,
|
||||
structuredOutput,
|
||||
@@ -528,13 +636,11 @@ async function runClaudeActor(input: {
|
||||
actorInput: ActorExecutionInput;
|
||||
}): Promise<ActorExecutionResult> {
|
||||
const prompt = buildActorPrompt(input.actorInput);
|
||||
const turn = await input.runtime.sessionContext.runInSession(() =>
|
||||
runClaudeTurn({
|
||||
runtime: input.runtime,
|
||||
actorInput: input.actorInput,
|
||||
prompt,
|
||||
}),
|
||||
);
|
||||
const turn = await runClaudeTurn({
|
||||
runtime: input.runtime,
|
||||
actorInput: input.actorInput,
|
||||
prompt,
|
||||
});
|
||||
|
||||
const parsed = parseActorExecutionResultFromModelOutput({
|
||||
rawText: turn.text,
|
||||
@@ -549,21 +655,21 @@ async function runClaudeActor(input: {
|
||||
|
||||
export async function createProviderRunRuntime(input: {
|
||||
provider: RunProvider;
|
||||
initialPrompt: string;
|
||||
config: Readonly<AppConfig>;
|
||||
observabilityRootPath?: string;
|
||||
baseEnv?: Record<string, string | undefined>;
|
||||
}): Promise<ProviderRunRuntime> {
|
||||
const sessionContext = await createSessionContext(input.provider, {
|
||||
prompt: input.initialPrompt,
|
||||
config: input.config,
|
||||
const claudeObservability = new ClaudeObservabilityLogger({
|
||||
workspaceRoot: input.observabilityRootPath ?? process.cwd(),
|
||||
config: input.config.provider.claudeObservability,
|
||||
});
|
||||
|
||||
return {
|
||||
provider: input.provider,
|
||||
config: input.config,
|
||||
sessionContext,
|
||||
close: async () => {
|
||||
await sessionContext.close();
|
||||
},
|
||||
sharedEnv: sanitizeEnv(input.baseEnv ?? process.env),
|
||||
claudeObservability,
|
||||
close: async () => claudeObservability.close(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { createHash } from "node:crypto";
|
||||
import { mkdir, open, unlink, writeFile } from "node:fs/promises";
|
||||
import { mkdir, open, stat, unlink, writeFile } from "node:fs/promises";
|
||||
import { dirname, isAbsolute, resolve } from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
@@ -197,9 +197,9 @@ export class ResourceProvisioningOrchestrator {
|
||||
async provisionSession(input: {
|
||||
sessionId: string;
|
||||
resources: ResourceRequest[];
|
||||
workspaceRoot?: string;
|
||||
workspaceRoot: string;
|
||||
}): Promise<ProvisionedResources> {
|
||||
const workspaceRoot = resolve(input.workspaceRoot ?? process.cwd());
|
||||
const workspaceRoot = resolve(input.workspaceRoot);
|
||||
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
|
||||
const releases: ProvisionedResourcesState["releases"] = [];
|
||||
const env: Record<string, string> = {};
|
||||
@@ -272,6 +272,7 @@ export class ResourceProvisioningOrchestrator {
|
||||
export type GitWorktreeProviderConfig = {
|
||||
rootDirectory: string;
|
||||
baseRef: string;
|
||||
targetPath?: string;
|
||||
};
|
||||
|
||||
export type PortRangeProviderConfig = {
|
||||
@@ -313,6 +314,10 @@ export function createGitWorktreeProvider(
|
||||
provision: async ({ sessionId, workspaceRoot, options }) => {
|
||||
const rootDirectory = readOptionalString(options, "rootDirectory", config.rootDirectory);
|
||||
const baseRef = readOptionalString(options, "baseRef", config.baseRef);
|
||||
const targetPath = normalizeWorktreeTargetPath(
|
||||
readOptionalStringOrUndefined(options, "targetPath") ?? config.targetPath,
|
||||
"targetPath",
|
||||
);
|
||||
|
||||
const repoRoot = await runGit(["-C", workspaceRoot, "rev-parse", "--show-toplevel"]);
|
||||
const worktreeRoot = resolvePath(repoRoot, rootDirectory);
|
||||
@@ -321,6 +326,18 @@ export function createGitWorktreeProvider(
|
||||
const worktreeName = buildScopedName(sessionId);
|
||||
const worktreePath = resolve(worktreeRoot, worktreeName);
|
||||
await runGit(["-C", repoRoot, "worktree", "add", "--detach", worktreePath, baseRef]);
|
||||
if (targetPath) {
|
||||
await runGit(["-C", worktreePath, "sparse-checkout", "init", "--cone"]);
|
||||
await runGit(["-C", worktreePath, "sparse-checkout", "set", targetPath]);
|
||||
}
|
||||
|
||||
const preferredWorkingDirectory = targetPath ? resolve(worktreePath, targetPath) : worktreePath;
|
||||
await assertDirectoryExists(
|
||||
preferredWorkingDirectory,
|
||||
targetPath
|
||||
? `Configured worktree target path "${targetPath}" is not a directory in ref "${baseRef}".`
|
||||
: `Provisioned worktree path "${preferredWorkingDirectory}" does not exist.`,
|
||||
);
|
||||
|
||||
return {
|
||||
kind: "git-worktree",
|
||||
@@ -329,6 +346,7 @@ export function createGitWorktreeProvider(
|
||||
worktreeRoot,
|
||||
worktreePath,
|
||||
baseRef,
|
||||
...(targetPath ? { targetPath } : {}),
|
||||
},
|
||||
soft: {
|
||||
env: {
|
||||
@@ -339,12 +357,14 @@ export function createGitWorktreeProvider(
|
||||
promptSections: [
|
||||
`Git worktree: ${worktreePath}`,
|
||||
`Worktree base ref: ${baseRef}`,
|
||||
...(targetPath ? [`Worktree target path: ${targetPath} (sparse-checkout enabled)`] : []),
|
||||
],
|
||||
metadata: {
|
||||
git_worktree_path: worktreePath,
|
||||
git_worktree_base_ref: baseRef,
|
||||
...(targetPath ? { git_worktree_target_path: targetPath } : {}),
|
||||
},
|
||||
preferredWorkingDirectory: worktreePath,
|
||||
preferredWorkingDirectory,
|
||||
},
|
||||
release: async () => {
|
||||
await runGit(["-C", repoRoot, "worktree", "remove", "--force", worktreePath]);
|
||||
@@ -576,6 +596,21 @@ function readOptionalString(
|
||||
return value.trim();
|
||||
}
|
||||
|
||||
function readOptionalStringOrUndefined(
|
||||
options: Record<string, unknown>,
|
||||
key: string,
|
||||
): string | undefined {
|
||||
const value = options[key];
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
throw new Error(`Option "${key}" must be a string when provided.`);
|
||||
}
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length > 0 ? trimmed : undefined;
|
||||
}
|
||||
|
||||
function readOptionalInteger(
|
||||
options: Record<string, unknown>,
|
||||
key: string,
|
||||
@@ -595,6 +630,46 @@ function readOptionalInteger(
|
||||
return value;
|
||||
}
|
||||
|
||||
function normalizeWorktreeTargetPath(value: string | undefined, key: string): string | undefined {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const slashNormalized = value.replace(/\\/g, "/");
|
||||
if (isAbsolute(slashNormalized) || /^[a-zA-Z]:\//.test(slashNormalized)) {
|
||||
throw new Error(`Option "${key}" must be a relative path within the repository worktree.`);
|
||||
}
|
||||
|
||||
const normalizedSegments = slashNormalized
|
||||
.split("/")
|
||||
.map((segment) => segment.trim())
|
||||
.filter((segment) => segment.length > 0 && segment !== ".");
|
||||
|
||||
if (normalizedSegments.some((segment) => segment === "..")) {
|
||||
throw new Error(`Option "${key}" must not contain ".." path segments.`);
|
||||
}
|
||||
|
||||
if (normalizedSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return normalizedSegments.join("/");
|
||||
}
|
||||
|
||||
async function assertDirectoryExists(path: string, errorMessage: string): Promise<void> {
|
||||
try {
|
||||
const stats = await stat(path);
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function readNumberFromAllocation(allocation: Record<string, JsonValue>, key: string): number {
|
||||
const value = allocation[key];
|
||||
if (typeof value !== "number" || !Number.isInteger(value)) {
|
||||
@@ -642,6 +717,8 @@ export function buildChildResourceRequests(input: ChildResourceSuballocationInpu
|
||||
const parentWorktreePath = readStringFromAllocation(parentGit, "worktreePath");
|
||||
const baseRefRaw = parentGit.baseRef;
|
||||
const baseRef = typeof baseRefRaw === "string" && baseRefRaw.trim().length > 0 ? baseRefRaw : "HEAD";
|
||||
const targetPathRaw = parentGit.targetPath;
|
||||
const targetPath = typeof targetPathRaw === "string" ? targetPathRaw.trim() : "";
|
||||
|
||||
requests.push({
|
||||
kind: "git-worktree",
|
||||
@@ -652,6 +729,7 @@ export function buildChildResourceRequests(input: ChildResourceSuballocationInpu
|
||||
buildScopedName(input.parentSnapshot.sessionId),
|
||||
),
|
||||
baseRef,
|
||||
...(targetPath ? { targetPath } : {}),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ function toProvisioningConfig(input: Readonly<AppConfig>): BuiltInProvisioningCo
|
||||
gitWorktree: {
|
||||
rootDirectory: input.provisioning.gitWorktree.rootDirectory,
|
||||
baseRef: input.provisioning.gitWorktree.baseRef,
|
||||
targetPath: input.provisioning.gitWorktree.targetPath,
|
||||
},
|
||||
portRange: {
|
||||
basePort: input.provisioning.portRange.basePort,
|
||||
|
||||
872
src/agents/session-lifecycle.ts
Normal file
872
src/agents/session-lifecycle.ts
Normal file
@@ -0,0 +1,872 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { mkdir, readFile, readdir, stat } from "node:fs/promises";
|
||||
import { dirname, isAbsolute, resolve } from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const SESSION_METADATA_FILE_NAME = "session-metadata.json";
|
||||
|
||||
export type SessionStatus = "active" | "suspended" | "closed" | "closed_with_conflicts";
|
||||
|
||||
export type SessionMetadata = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionStatus: SessionStatus;
|
||||
baseWorkspacePath: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
|
||||
export type CreateSessionRequest = {
|
||||
projectPath: string;
|
||||
};
|
||||
|
||||
export type MergeTaskIntoBaseOutcome =
|
||||
| {
|
||||
kind: "success";
|
||||
taskId: string;
|
||||
worktreePath: string;
|
||||
baseWorkspacePath: string;
|
||||
}
|
||||
| {
|
||||
kind: "conflict";
|
||||
taskId: string;
|
||||
worktreePath: string;
|
||||
baseWorkspacePath: string;
|
||||
conflictFiles: string[];
|
||||
mergeBase?: string;
|
||||
}
|
||||
| {
|
||||
kind: "fatal_error";
|
||||
taskId: string;
|
||||
worktreePath: string;
|
||||
baseWorkspacePath: string;
|
||||
error: string;
|
||||
mergeBase?: string;
|
||||
};
|
||||
|
||||
export type CloseSessionOutcome =
|
||||
| {
|
||||
kind: "success";
|
||||
sessionId: string;
|
||||
mergedToProject: boolean;
|
||||
}
|
||||
| {
|
||||
kind: "conflict";
|
||||
sessionId: string;
|
||||
worktreePath: string;
|
||||
conflictFiles: string[];
|
||||
mergeBase?: string;
|
||||
baseBranch?: string;
|
||||
}
|
||||
| {
|
||||
kind: "fatal_error";
|
||||
sessionId: string;
|
||||
error: string;
|
||||
baseBranch?: string;
|
||||
mergeBase?: string;
|
||||
};
|
||||
|
||||
type GitExecutionResult = {
|
||||
exitCode: number;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
};
|
||||
|
||||
type GitWorktreeRecord = {
|
||||
path: string;
|
||||
branchRef?: string;
|
||||
};
|
||||
|
||||
function toErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
return String(error);
|
||||
}
|
||||
|
||||
function assertAbsolutePath(path: string, label: string): string {
|
||||
if (!isAbsolute(path)) {
|
||||
throw new Error(`${label} must be an absolute path.`);
|
||||
}
|
||||
return resolve(path);
|
||||
}
|
||||
|
||||
function normalizeWorktreePath(path: string): string {
|
||||
const normalized = resolve(path);
|
||||
return normalized.startsWith("/private/var/") ? normalized.slice("/private".length) : normalized;
|
||||
}
|
||||
|
||||
function assertNonEmptyString(value: unknown, label: string): string {
|
||||
if (typeof value !== "string" || value.trim().length === 0) {
|
||||
throw new Error(`${label} must be a non-empty string.`);
|
||||
}
|
||||
return value.trim();
|
||||
}
|
||||
|
||||
function toSessionStatus(value: unknown): SessionStatus {
|
||||
if (
|
||||
value === "active" ||
|
||||
value === "suspended" ||
|
||||
value === "closed" ||
|
||||
value === "closed_with_conflicts"
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
throw new Error(`Session status "${String(value)}" is not supported.`);
|
||||
}
|
||||
|
||||
function toSessionMetadata(value: unknown): SessionMetadata {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
throw new Error("Session metadata file is malformed.");
|
||||
}
|
||||
|
||||
const raw = value as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
sessionId: assertNonEmptyString(raw.sessionId, "sessionId"),
|
||||
projectPath: assertAbsolutePath(assertNonEmptyString(raw.projectPath, "projectPath"), "projectPath"),
|
||||
baseWorkspacePath: assertAbsolutePath(
|
||||
assertNonEmptyString(raw.baseWorkspacePath, "baseWorkspacePath"),
|
||||
"baseWorkspacePath",
|
||||
),
|
||||
sessionStatus: toSessionStatus(raw.sessionStatus),
|
||||
createdAt: assertNonEmptyString(raw.createdAt, "createdAt"),
|
||||
updatedAt: assertNonEmptyString(raw.updatedAt, "updatedAt"),
|
||||
};
|
||||
}
|
||||
|
||||
async function runGit(args: string[]): Promise<string> {
|
||||
const result = await runGitWithResult(args);
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(`git ${args.join(" ")} failed: ${result.stderr || result.stdout || "unknown git error"}`);
|
||||
}
|
||||
return result.stdout.trim();
|
||||
}
|
||||
|
||||
async function runGitWithResult(args: string[]): Promise<GitExecutionResult> {
|
||||
try {
|
||||
const { stdout, stderr } = await execFileAsync("git", args, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
return {
|
||||
exitCode: 0,
|
||||
stdout: stdout.trim(),
|
||||
stderr: stderr.trim(),
|
||||
};
|
||||
} catch (error) {
|
||||
const failure = error as {
|
||||
code?: number | string;
|
||||
stdout?: string;
|
||||
stderr?: string;
|
||||
};
|
||||
if (typeof failure.code === "number") {
|
||||
return {
|
||||
exitCode: failure.code,
|
||||
stdout: String(failure.stdout ?? "").trim(),
|
||||
stderr: String(failure.stderr ?? "").trim(),
|
||||
};
|
||||
}
|
||||
throw new Error(`git ${args.join(" ")} failed: ${toErrorMessage(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function pathExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await stat(path);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeSegment(value: string, fallback: string): string {
|
||||
const normalized = value
|
||||
.trim()
|
||||
.replace(/[^a-zA-Z0-9_-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-+/, "")
|
||||
.replace(/-+$/, "");
|
||||
return normalized || fallback;
|
||||
}
|
||||
|
||||
function toGitFailureMessage(result: GitExecutionResult): string {
|
||||
const details = result.stderr || result.stdout || "unknown git error";
|
||||
return `git command failed with exit code ${String(result.exitCode)}: ${details}`;
|
||||
}
|
||||
|
||||
function toStringLines(value: string): string[] {
|
||||
return value
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0);
|
||||
}
|
||||
|
||||
function parseGitWorktreeRecords(value: string): GitWorktreeRecord[] {
|
||||
const lines = value.split("\n");
|
||||
const records: GitWorktreeRecord[] = [];
|
||||
let current: GitWorktreeRecord | undefined;
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) {
|
||||
if (current) {
|
||||
records.push(current);
|
||||
current = undefined;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (line.startsWith("worktree ")) {
|
||||
if (current) {
|
||||
records.push(current);
|
||||
}
|
||||
current = {
|
||||
path: line.slice("worktree ".length).trim(),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (line.startsWith("branch ") && current) {
|
||||
current.branchRef = line.slice("branch ".length).trim();
|
||||
}
|
||||
}
|
||||
|
||||
if (current) {
|
||||
records.push(current);
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
export class FileSystemSessionMetadataStore {
|
||||
private readonly stateRoot: string;
|
||||
|
||||
constructor(input: { stateRoot: string }) {
|
||||
this.stateRoot = resolve(input.stateRoot);
|
||||
}
|
||||
|
||||
getStateRoot(): string {
|
||||
return this.stateRoot;
|
||||
}
|
||||
|
||||
getSessionDirectory(sessionId: string): string {
|
||||
return resolve(this.stateRoot, sessionId);
|
||||
}
|
||||
|
||||
getSessionMetadataPath(sessionId: string): string {
|
||||
return resolve(this.getSessionDirectory(sessionId), SESSION_METADATA_FILE_NAME);
|
||||
}
|
||||
|
||||
getSessionProjectContextPath(sessionId: string): string {
|
||||
return resolve(this.getSessionDirectory(sessionId), "project-context.json");
|
||||
}
|
||||
|
||||
async createSession(input: {
|
||||
projectPath: string;
|
||||
baseWorkspacePath: string;
|
||||
sessionId?: string;
|
||||
}): Promise<SessionMetadata> {
|
||||
const sessionId = input.sessionId?.trim() || randomUUID();
|
||||
const now = new Date().toISOString();
|
||||
const metadata: SessionMetadata = {
|
||||
sessionId,
|
||||
projectPath: assertAbsolutePath(input.projectPath, "projectPath"),
|
||||
baseWorkspacePath: assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath"),
|
||||
sessionStatus: "active",
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
};
|
||||
|
||||
const sessionDirectory = this.getSessionDirectory(sessionId);
|
||||
await mkdir(sessionDirectory, { recursive: true });
|
||||
await this.writeSessionMetadata(metadata);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
|
||||
const metadataPath = this.getSessionMetadataPath(sessionId);
|
||||
|
||||
try {
|
||||
const content = await readFile(metadataPath, "utf8");
|
||||
return toSessionMetadata(JSON.parse(content) as unknown);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return undefined;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async listSessions(): Promise<SessionMetadata[]> {
|
||||
try {
|
||||
const entries = await readdir(this.stateRoot, { withFileTypes: true });
|
||||
const sessions: SessionMetadata[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const metadata = await this.readSession(entry.name);
|
||||
if (metadata) {
|
||||
sessions.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
sessions.sort((left, right) => right.createdAt.localeCompare(left.createdAt));
|
||||
return sessions;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return [];
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async updateSession(
|
||||
sessionId: string,
|
||||
patch: Partial<Pick<SessionMetadata, "projectPath" | "baseWorkspacePath" | "sessionStatus">>,
|
||||
): Promise<SessionMetadata> {
|
||||
const current = await this.readSession(sessionId);
|
||||
if (!current) {
|
||||
throw new Error(`Session "${sessionId}" does not exist.`);
|
||||
}
|
||||
|
||||
const next: SessionMetadata = {
|
||||
...current,
|
||||
...(patch.projectPath ? { projectPath: assertAbsolutePath(patch.projectPath, "projectPath") } : {}),
|
||||
...(patch.baseWorkspacePath
|
||||
? { baseWorkspacePath: assertAbsolutePath(patch.baseWorkspacePath, "baseWorkspacePath") }
|
||||
: {}),
|
||||
...(patch.sessionStatus ? { sessionStatus: patch.sessionStatus } : {}),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await this.writeSessionMetadata(next);
|
||||
return next;
|
||||
}
|
||||
|
||||
private async writeSessionMetadata(metadata: SessionMetadata): Promise<void> {
|
||||
const metadataPath = this.getSessionMetadataPath(metadata.sessionId);
|
||||
await mkdir(dirname(metadataPath), { recursive: true });
|
||||
await withFileLock(`${metadataPath}.lock`, async () => {
|
||||
await writeUtf8FileAtomic(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class SessionWorktreeManager {
|
||||
private readonly worktreeRoot: string;
|
||||
private readonly baseRef: string;
|
||||
private readonly targetPath?: string;
|
||||
|
||||
constructor(input: {
|
||||
worktreeRoot: string;
|
||||
baseRef: string;
|
||||
targetPath?: string;
|
||||
}) {
|
||||
this.worktreeRoot = assertAbsolutePath(input.worktreeRoot, "worktreeRoot");
|
||||
this.baseRef = assertNonEmptyString(input.baseRef, "baseRef");
|
||||
this.targetPath = normalizeWorktreeTargetPath(input.targetPath, "targetPath");
|
||||
}
|
||||
|
||||
resolveBaseWorkspacePath(sessionId: string): string {
|
||||
const scoped = sanitizeSegment(sessionId, "session");
|
||||
return resolve(this.worktreeRoot, scoped, "base");
|
||||
}
|
||||
|
||||
resolveTaskWorktreePath(sessionId: string, taskId: string): string {
|
||||
const scopedSession = sanitizeSegment(sessionId, "session");
|
||||
const scopedTask = sanitizeSegment(taskId, "task");
|
||||
return resolve(this.worktreeRoot, scopedSession, "tasks", scopedTask);
|
||||
}
|
||||
|
||||
resolveWorkingDirectoryForWorktree(worktreePath: string): string {
|
||||
const normalizedWorktreePath = assertAbsolutePath(worktreePath, "worktreePath");
|
||||
return this.targetPath ? resolve(normalizedWorktreePath, this.targetPath) : normalizedWorktreePath;
|
||||
}
|
||||
|
||||
private resolveBaseBranchName(sessionId: string): string {
|
||||
const scoped = sanitizeSegment(sessionId, "session");
|
||||
return `ai-ops/${scoped}/base`;
|
||||
}
|
||||
|
||||
private resolveTaskBranchName(sessionId: string, taskId: string): string {
|
||||
const scopedSession = sanitizeSegment(sessionId, "session");
|
||||
const scopedTask = sanitizeSegment(taskId, "task");
|
||||
return `ai-ops/${scopedSession}/task/${scopedTask}`;
|
||||
}
|
||||
|
||||
async initializeSessionBaseWorkspace(input: {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
baseWorkspacePath: string;
|
||||
}): Promise<void> {
|
||||
const projectPath = assertAbsolutePath(input.projectPath, "projectPath");
|
||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
||||
|
||||
await mkdir(dirname(baseWorkspacePath), { recursive: true });
|
||||
|
||||
if (!(await pathExists(baseWorkspacePath))) {
|
||||
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
|
||||
const branchName = this.resolveBaseBranchName(input.sessionId);
|
||||
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
|
||||
}
|
||||
|
||||
await this.ensureWorktreeTargetPath(baseWorkspacePath);
|
||||
}
|
||||
|
||||
async ensureTaskWorktree(input: {
|
||||
sessionId: string;
|
||||
taskId: string;
|
||||
baseWorkspacePath: string;
|
||||
existingWorktreePath?: string;
|
||||
}): Promise<{
|
||||
taskWorktreePath: string;
|
||||
taskWorkingDirectory: string;
|
||||
}> {
|
||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
||||
const maybeExisting = input.existingWorktreePath?.trim();
|
||||
const worktreePath = maybeExisting
|
||||
? assertAbsolutePath(maybeExisting, "existingWorktreePath")
|
||||
: this.resolveTaskWorktreePath(input.sessionId, input.taskId);
|
||||
const branchName = this.resolveTaskBranchName(input.sessionId, input.taskId);
|
||||
const attachedWorktree = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
|
||||
|
||||
const normalizedWorktreePath = normalizeWorktreePath(worktreePath);
|
||||
const normalizedAttachedWorktree = attachedWorktree ? normalizeWorktreePath(attachedWorktree) : undefined;
|
||||
|
||||
if (normalizedAttachedWorktree && normalizedAttachedWorktree !== normalizedWorktreePath) {
|
||||
throw new Error(
|
||||
`Task branch "${branchName}" is already attached to worktree "${attachedWorktree}", ` +
|
||||
`expected "${worktreePath}".`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!(await pathExists(worktreePath))) {
|
||||
await runGit(["-C", baseWorkspacePath, "worktree", "prune", "--expire", "now"]);
|
||||
}
|
||||
|
||||
if (!(await pathExists(worktreePath))) {
|
||||
await mkdir(dirname(worktreePath), { recursive: true });
|
||||
const addResult = await runGitWithResult([
|
||||
"-C",
|
||||
baseWorkspacePath,
|
||||
"worktree",
|
||||
"add",
|
||||
"-B",
|
||||
branchName,
|
||||
worktreePath,
|
||||
"HEAD",
|
||||
]);
|
||||
if (addResult.exitCode !== 0) {
|
||||
const attachedAfterFailure = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
|
||||
if (
|
||||
attachedAfterFailure &&
|
||||
normalizeWorktreePath(attachedAfterFailure) === normalizedWorktreePath &&
|
||||
(await pathExists(worktreePath))
|
||||
) {
|
||||
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
|
||||
return {
|
||||
taskWorktreePath: worktreePath,
|
||||
taskWorkingDirectory,
|
||||
};
|
||||
}
|
||||
throw new Error(
|
||||
`git -C ${baseWorkspacePath} worktree add -B ${branchName} ${worktreePath} HEAD failed: ` +
|
||||
`${toGitFailureMessage(addResult)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
|
||||
return {
|
||||
taskWorktreePath: worktreePath,
|
||||
taskWorkingDirectory,
|
||||
};
|
||||
}
|
||||
|
||||
async mergeTaskIntoBase(input: {
|
||||
taskId: string;
|
||||
baseWorkspacePath: string;
|
||||
taskWorktreePath: string;
|
||||
}): Promise<MergeTaskIntoBaseOutcome> {
|
||||
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
|
||||
const taskWorktreePath = assertAbsolutePath(input.taskWorktreePath, "taskWorktreePath");
|
||||
const taskId = input.taskId;
|
||||
|
||||
if (!(await pathExists(baseWorkspacePath))) {
|
||||
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
|
||||
}
|
||||
if (!(await pathExists(taskWorktreePath))) {
|
||||
throw new Error(`Task worktree "${taskWorktreePath}" does not exist.`);
|
||||
}
|
||||
|
||||
let mergeBase: string | undefined;
|
||||
try {
|
||||
await runGit(["-C", taskWorktreePath, "add", "-A"]);
|
||||
const hasPending = await this.hasStagedChanges(taskWorktreePath);
|
||||
if (hasPending) {
|
||||
await runGit(["-C", taskWorktreePath, "commit", "-m", `ai_ops: finalize task ${taskId}`]);
|
||||
}
|
||||
|
||||
const branchName = await runGit(["-C", taskWorktreePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
||||
const baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
||||
mergeBase = await this.tryReadMergeBase(baseWorkspacePath, baseBranch, branchName);
|
||||
|
||||
if (await this.hasOngoingMerge(taskWorktreePath)) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
conflictFiles: await this.readConflictFiles(taskWorktreePath),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
const syncTaskBranch = await runGitWithResult([
|
||||
"-C",
|
||||
taskWorktreePath,
|
||||
"merge",
|
||||
"--no-ff",
|
||||
"--no-edit",
|
||||
baseBranch,
|
||||
]);
|
||||
|
||||
if (syncTaskBranch.exitCode === 1) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
conflictFiles: await this.readConflictFiles(taskWorktreePath),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
if (syncTaskBranch.exitCode !== 0) {
|
||||
return {
|
||||
kind: "fatal_error",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
error: toGitFailureMessage(syncTaskBranch),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
if (await this.hasOngoingMerge(baseWorkspacePath)) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
taskId,
|
||||
worktreePath: baseWorkspacePath,
|
||||
baseWorkspacePath,
|
||||
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
const mergeIntoBase = await runGitWithResult([
|
||||
"-C",
|
||||
baseWorkspacePath,
|
||||
"merge",
|
||||
"--no-ff",
|
||||
"--no-edit",
|
||||
branchName,
|
||||
]);
|
||||
|
||||
if (mergeIntoBase.exitCode === 1) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
taskId,
|
||||
worktreePath: baseWorkspacePath,
|
||||
baseWorkspacePath,
|
||||
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
if (mergeIntoBase.exitCode !== 0) {
|
||||
return {
|
||||
kind: "fatal_error",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
error: toGitFailureMessage(mergeIntoBase),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
await this.removeWorktree({
|
||||
repoPath: baseWorkspacePath,
|
||||
worktreePath: taskWorktreePath,
|
||||
});
|
||||
|
||||
return {
|
||||
kind: "success",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
kind: "fatal_error",
|
||||
taskId,
|
||||
worktreePath: taskWorktreePath,
|
||||
baseWorkspacePath,
|
||||
error: toErrorMessage(error),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async closeSession(input: {
|
||||
session: SessionMetadata;
|
||||
taskWorktreePaths: string[];
|
||||
mergeBaseIntoProject?: boolean;
|
||||
}): Promise<CloseSessionOutcome> {
|
||||
const projectPath = assertAbsolutePath(input.session.projectPath, "projectPath");
|
||||
const baseWorkspacePath = assertAbsolutePath(input.session.baseWorkspacePath, "baseWorkspacePath");
|
||||
if (!(await pathExists(projectPath))) {
|
||||
throw new Error(`Project path "${projectPath}" does not exist.`);
|
||||
}
|
||||
if (!(await pathExists(baseWorkspacePath))) {
|
||||
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
|
||||
}
|
||||
|
||||
let baseBranch: string | undefined;
|
||||
let mergeBase: string | undefined;
|
||||
|
||||
try {
|
||||
for (const taskWorktreePath of input.taskWorktreePaths) {
|
||||
if (!taskWorktreePath.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await this.removeWorktree({
|
||||
repoPath: baseWorkspacePath,
|
||||
worktreePath: taskWorktreePath,
|
||||
});
|
||||
}
|
||||
|
||||
if (input.mergeBaseIntoProject) {
|
||||
baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
|
||||
mergeBase = await this.tryReadMergeBase(projectPath, "HEAD", baseBranch);
|
||||
|
||||
if (await this.hasOngoingMerge(projectPath)) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
sessionId: input.session.sessionId,
|
||||
worktreePath: projectPath,
|
||||
conflictFiles: await this.readConflictFiles(projectPath),
|
||||
...(baseBranch ? { baseBranch } : {}),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
const mergeResult = await runGitWithResult([
|
||||
"-C",
|
||||
projectPath,
|
||||
"merge",
|
||||
"--no-ff",
|
||||
"--no-edit",
|
||||
baseBranch,
|
||||
]);
|
||||
if (mergeResult.exitCode === 1) {
|
||||
return {
|
||||
kind: "conflict",
|
||||
sessionId: input.session.sessionId,
|
||||
worktreePath: projectPath,
|
||||
conflictFiles: await this.readConflictFiles(projectPath),
|
||||
...(baseBranch ? { baseBranch } : {}),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
if (mergeResult.exitCode !== 0) {
|
||||
return {
|
||||
kind: "fatal_error",
|
||||
sessionId: input.session.sessionId,
|
||||
error: toGitFailureMessage(mergeResult),
|
||||
...(baseBranch ? { baseBranch } : {}),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
await this.removeWorktree({
|
||||
repoPath: projectPath,
|
||||
worktreePath: baseWorkspacePath,
|
||||
});
|
||||
|
||||
return {
|
||||
kind: "success",
|
||||
sessionId: input.session.sessionId,
|
||||
mergedToProject: input.mergeBaseIntoProject === true,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
kind: "fatal_error",
|
||||
sessionId: input.session.sessionId,
|
||||
error: toErrorMessage(error),
|
||||
...(baseBranch ? { baseBranch } : {}),
|
||||
...(mergeBase ? { mergeBase } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async removeWorktree(input: {
|
||||
repoPath: string;
|
||||
worktreePath: string;
|
||||
}): Promise<void> {
|
||||
if (!(await pathExists(input.worktreePath))) {
|
||||
return;
|
||||
}
|
||||
|
||||
await runGit(["-C", input.repoPath, "worktree", "remove", "--force", input.worktreePath]);
|
||||
await runGit(["-C", input.repoPath, "worktree", "prune"]);
|
||||
}
|
||||
|
||||
private async hasStagedChanges(worktreePath: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("git", ["-C", worktreePath, "diff", "--cached", "--quiet"], {
|
||||
encoding: "utf8",
|
||||
});
|
||||
return false;
|
||||
} catch (error) {
|
||||
const exitCode = (error as { code?: number }).code;
|
||||
if (exitCode === 1) {
|
||||
return true;
|
||||
}
|
||||
throw new Error(`Unable to inspect staged changes: ${toErrorMessage(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async hasOngoingMerge(worktreePath: string): Promise<boolean> {
|
||||
const result = await runGitWithResult([
|
||||
"-C",
|
||||
worktreePath,
|
||||
"rev-parse",
|
||||
"-q",
|
||||
"--verify",
|
||||
"MERGE_HEAD",
|
||||
]);
|
||||
return result.exitCode === 0;
|
||||
}
|
||||
|
||||
private async readConflictFiles(worktreePath: string): Promise<string[]> {
|
||||
const result = await runGitWithResult([
|
||||
"-C",
|
||||
worktreePath,
|
||||
"diff",
|
||||
"--name-only",
|
||||
"--diff-filter=U",
|
||||
]);
|
||||
if (result.exitCode !== 0) {
|
||||
return [];
|
||||
}
|
||||
return toStringLines(result.stdout);
|
||||
}
|
||||
|
||||
private async tryReadMergeBase(
|
||||
repoPath: string,
|
||||
leftRef: string,
|
||||
rightRef: string,
|
||||
): Promise<string | undefined> {
|
||||
const result = await runGitWithResult(["-C", repoPath, "merge-base", leftRef, rightRef]);
|
||||
if (result.exitCode !== 0) {
|
||||
return undefined;
|
||||
}
|
||||
const mergeBase = result.stdout.trim();
|
||||
return mergeBase || undefined;
|
||||
}
|
||||
|
||||
private async findWorktreePathForBranch(
|
||||
repoPath: string,
|
||||
branchName: string,
|
||||
): Promise<string | undefined> {
|
||||
const branchRef = `refs/heads/${branchName}`;
|
||||
const records = await this.listWorktreeRecords(repoPath);
|
||||
const matched = records.find((record) => record.branchRef === branchRef);
|
||||
if (!matched) {
|
||||
return undefined;
|
||||
}
|
||||
return resolve(matched.path);
|
||||
}
|
||||
|
||||
private async listWorktreeRecords(repoPath: string): Promise<GitWorktreeRecord[]> {
|
||||
const result = await runGitWithResult(["-C", repoPath, "worktree", "list", "--porcelain"]);
|
||||
if (result.exitCode !== 0) {
|
||||
return [];
|
||||
}
|
||||
return parseGitWorktreeRecords(result.stdout);
|
||||
}
|
||||
|
||||
private async ensureWorktreeTargetPath(worktreePath: string): Promise<string> {
|
||||
if (this.targetPath) {
|
||||
await runGit(["-C", worktreePath, "sparse-checkout", "init", "--cone"]);
|
||||
await runGit(["-C", worktreePath, "sparse-checkout", "set", this.targetPath]);
|
||||
}
|
||||
|
||||
const workingDirectory = this.resolveWorkingDirectoryForWorktree(worktreePath);
|
||||
let workingDirectoryStats;
|
||||
try {
|
||||
workingDirectoryStats = await stat(workingDirectory);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
if (this.targetPath) {
|
||||
throw new Error(
|
||||
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
|
||||
);
|
||||
}
|
||||
throw new Error(`Worktree path "${workingDirectory}" does not exist.`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!workingDirectoryStats.isDirectory()) {
|
||||
if (this.targetPath) {
|
||||
throw new Error(
|
||||
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
|
||||
);
|
||||
}
|
||||
throw new Error(`Worktree path "${workingDirectory}" is not a directory.`);
|
||||
}
|
||||
|
||||
return workingDirectory;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeWorktreeTargetPath(value: string | undefined, key: string): string | undefined {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const slashNormalized = trimmed.replace(/\\/g, "/");
|
||||
if (isAbsolute(slashNormalized) || /^[a-zA-Z]:\//.test(slashNormalized)) {
|
||||
throw new Error(`${key} must be a relative path within the repository worktree.`);
|
||||
}
|
||||
|
||||
const normalizedSegments = slashNormalized
|
||||
.split("/")
|
||||
.map((segment) => segment.trim())
|
||||
.filter((segment) => segment.length > 0 && segment !== ".");
|
||||
|
||||
if (normalizedSegments.some((segment) => segment === "..")) {
|
||||
throw new Error(`${key} must not contain ".." path segments.`);
|
||||
}
|
||||
|
||||
if (normalizedSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return normalizedSegments.join("/");
|
||||
}
|
||||
137
src/config.ts
137
src/config.ts
@@ -16,9 +16,22 @@ export type ProviderRuntimeConfig = {
|
||||
anthropicApiKey?: string;
|
||||
claudeModel?: string;
|
||||
claudeCodePath?: string;
|
||||
claudeMaxTurns: number;
|
||||
claudeObservability: ClaudeObservabilityRuntimeConfig;
|
||||
};
|
||||
|
||||
export type OpenAiAuthMode = "auto" | "chatgpt" | "api_key";
|
||||
export type ClaudeObservabilityMode = "off" | "stdout" | "file" | "both";
|
||||
export type ClaudeObservabilityVerbosity = "summary" | "full";
|
||||
|
||||
export type ClaudeObservabilityRuntimeConfig = {
|
||||
mode: ClaudeObservabilityMode;
|
||||
verbosity: ClaudeObservabilityVerbosity;
|
||||
logPath: string;
|
||||
includePartialMessages: boolean;
|
||||
debug: boolean;
|
||||
debugLogPath?: string;
|
||||
};
|
||||
|
||||
export type McpRuntimeConfig = {
|
||||
configPath: string;
|
||||
@@ -30,6 +43,7 @@ export type OrchestrationRuntimeConfig = {
|
||||
maxDepth: number;
|
||||
maxRetries: number;
|
||||
maxChildren: number;
|
||||
mergeConflictMaxAttempts: number;
|
||||
};
|
||||
|
||||
export type DiscoveryRuntimeConfig = {
|
||||
@@ -77,6 +91,7 @@ const DEFAULT_ORCHESTRATION: OrchestrationRuntimeConfig = {
|
||||
maxDepth: 4,
|
||||
maxRetries: 2,
|
||||
maxChildren: 4,
|
||||
mergeConflictMaxAttempts: 2,
|
||||
};
|
||||
|
||||
const DEFAULT_PROVISIONING: BuiltInProvisioningConfig = {
|
||||
@@ -113,6 +128,17 @@ const DEFAULT_RUNTIME_EVENTS: RuntimeEventRuntimeConfig = {
|
||||
discordAlwaysNotifyTypes: ["session.started", "session.completed", "session.failed"],
|
||||
};
|
||||
|
||||
const DEFAULT_CLAUDE_OBSERVABILITY: ClaudeObservabilityRuntimeConfig = {
|
||||
mode: "off",
|
||||
verbosity: "summary",
|
||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
||||
includePartialMessages: false,
|
||||
debug: false,
|
||||
debugLogPath: undefined,
|
||||
};
|
||||
|
||||
const DEFAULT_CLAUDE_MAX_TURNS = 2;
|
||||
|
||||
function readOptionalString(
|
||||
env: NodeJS.ProcessEnv,
|
||||
key: string,
|
||||
@@ -124,6 +150,50 @@ function readOptionalString(
|
||||
return value;
|
||||
}
|
||||
|
||||
function readOptionalRelativePath(
|
||||
env: NodeJS.ProcessEnv,
|
||||
key: string,
|
||||
): string | undefined {
|
||||
const value = readOptionalString(env, key);
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const slashNormalized = value.replace(/\\/g, "/");
|
||||
if (slashNormalized.startsWith("/") || /^[a-zA-Z]:\//.test(slashNormalized)) {
|
||||
throw new Error(`Environment variable ${key} must be a relative path.`);
|
||||
}
|
||||
|
||||
const normalizedSegments = slashNormalized
|
||||
.split("/")
|
||||
.map((segment) => segment.trim())
|
||||
.filter((segment) => segment.length > 0 && segment !== ".");
|
||||
|
||||
if (normalizedSegments.some((segment) => segment === "..")) {
|
||||
throw new Error(`Environment variable ${key} must not contain ".." path segments.`);
|
||||
}
|
||||
|
||||
if (normalizedSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return normalizedSegments.join("/");
|
||||
}
|
||||
|
||||
function normalizeClaudeModel(value: string | undefined): string | undefined {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const anthropicPrefix = "anthropic/";
|
||||
if (!value.startsWith(anthropicPrefix)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const normalized = value.slice(anthropicPrefix.length).trim();
|
||||
return normalized || undefined;
|
||||
}
|
||||
|
||||
function readStringWithFallback(
|
||||
env: NodeJS.ProcessEnv,
|
||||
key: string,
|
||||
@@ -228,6 +298,26 @@ function parseOpenAiAuthMode(raw: string): OpenAiAuthMode {
|
||||
);
|
||||
}
|
||||
|
||||
function parseClaudeObservabilityMode(raw: string): ClaudeObservabilityMode {
|
||||
if (raw === "off" || raw === "stdout" || raw === "file" || raw === "both") {
|
||||
return raw;
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'Environment variable CLAUDE_OBSERVABILITY_MODE must be one of: "off", "stdout", "file", "both".',
|
||||
);
|
||||
}
|
||||
|
||||
function parseClaudeObservabilityVerbosity(raw: string): ClaudeObservabilityVerbosity {
|
||||
if (raw === "summary" || raw === "full") {
|
||||
return raw;
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'Environment variable CLAUDE_OBSERVABILITY_VERBOSITY must be one of: "summary", "full".',
|
||||
);
|
||||
}
|
||||
|
||||
function deepFreeze<T>(value: T): Readonly<T> {
|
||||
if (value === null || typeof value !== "object") {
|
||||
return value;
|
||||
@@ -312,8 +402,46 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
|
||||
codexSkipGitCheck: readBooleanWithFallback(env, "CODEX_SKIP_GIT_CHECK", true),
|
||||
anthropicOauthToken,
|
||||
anthropicApiKey,
|
||||
claudeModel: readOptionalString(env, "CLAUDE_MODEL"),
|
||||
claudeModel: normalizeClaudeModel(readOptionalString(env, "CLAUDE_MODEL")),
|
||||
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
|
||||
claudeMaxTurns: readIntegerWithBounds(
|
||||
env,
|
||||
"CLAUDE_MAX_TURNS",
|
||||
DEFAULT_CLAUDE_MAX_TURNS,
|
||||
{ min: 1 },
|
||||
),
|
||||
claudeObservability: {
|
||||
mode: parseClaudeObservabilityMode(
|
||||
readStringWithFallback(
|
||||
env,
|
||||
"CLAUDE_OBSERVABILITY_MODE",
|
||||
DEFAULT_CLAUDE_OBSERVABILITY.mode,
|
||||
),
|
||||
),
|
||||
verbosity: parseClaudeObservabilityVerbosity(
|
||||
readStringWithFallback(
|
||||
env,
|
||||
"CLAUDE_OBSERVABILITY_VERBOSITY",
|
||||
DEFAULT_CLAUDE_OBSERVABILITY.verbosity,
|
||||
),
|
||||
),
|
||||
logPath: readStringWithFallback(
|
||||
env,
|
||||
"CLAUDE_OBSERVABILITY_LOG_PATH",
|
||||
DEFAULT_CLAUDE_OBSERVABILITY.logPath,
|
||||
),
|
||||
includePartialMessages: readBooleanWithFallback(
|
||||
env,
|
||||
"CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL",
|
||||
DEFAULT_CLAUDE_OBSERVABILITY.includePartialMessages,
|
||||
),
|
||||
debug: readBooleanWithFallback(
|
||||
env,
|
||||
"CLAUDE_OBSERVABILITY_DEBUG",
|
||||
DEFAULT_CLAUDE_OBSERVABILITY.debug,
|
||||
),
|
||||
debugLogPath: readOptionalString(env, "CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH"),
|
||||
},
|
||||
},
|
||||
mcp: {
|
||||
configPath: readStringWithFallback(env, "MCP_CONFIG_PATH", "./mcp.config.json"),
|
||||
@@ -367,6 +495,12 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
|
||||
DEFAULT_ORCHESTRATION.maxChildren,
|
||||
{ min: 1 },
|
||||
),
|
||||
mergeConflictMaxAttempts: readIntegerWithBounds(
|
||||
env,
|
||||
"AGENT_MERGE_CONFLICT_MAX_ATTEMPTS",
|
||||
DEFAULT_ORCHESTRATION.mergeConflictMaxAttempts,
|
||||
{ min: 1 },
|
||||
),
|
||||
},
|
||||
provisioning: {
|
||||
gitWorktree: {
|
||||
@@ -380,6 +514,7 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
|
||||
"AGENT_WORKTREE_BASE_REF",
|
||||
DEFAULT_PROVISIONING.gitWorktree.baseRef,
|
||||
),
|
||||
targetPath: readOptionalRelativePath(env, "AGENT_WORKTREE_TARGET_PATH"),
|
||||
},
|
||||
portRange: {
|
||||
basePort: readIntegerWithBounds(
|
||||
|
||||
@@ -19,7 +19,7 @@ function requiredPrompt(argv: string[]): string {
|
||||
|
||||
function buildOptions(config = getConfig()): Options {
|
||||
return {
|
||||
maxTurns: 1,
|
||||
maxTurns: config.provider.claudeMaxTurns,
|
||||
...(config.provider.claudeModel ? { model: config.provider.claudeModel } : {}),
|
||||
...(config.provider.claudeCodePath
|
||||
? { pathToClaudeCodeExecutable: config.provider.claudeCodePath }
|
||||
@@ -85,6 +85,7 @@ export async function runClaudePrompt(
|
||||
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
||||
const sessionContext = await createSessionContextFn("claude", {
|
||||
prompt,
|
||||
workspaceRoot: process.cwd(),
|
||||
config,
|
||||
});
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ export async function runCodexPrompt(
|
||||
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
|
||||
const sessionContext = await createSessionContextFn("codex", {
|
||||
prompt,
|
||||
workspaceRoot: process.cwd(),
|
||||
config,
|
||||
});
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ export async function createSessionContext(
|
||||
provider: SessionProvider,
|
||||
input: {
|
||||
prompt: string;
|
||||
workspaceRoot: string;
|
||||
config?: Readonly<AppConfig>;
|
||||
mcpRegistry?: McpRegistry;
|
||||
},
|
||||
@@ -58,6 +59,7 @@ export async function createSessionContext(
|
||||
provisionedResources = await resourceProvisioning.provisionSession({
|
||||
sessionId: agentSession.id,
|
||||
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
|
||||
workspaceRoot: input.workspaceRoot,
|
||||
});
|
||||
|
||||
const providerAuthEnv =
|
||||
@@ -82,6 +84,7 @@ export async function createSessionContext(
|
||||
{
|
||||
providerHint: provider,
|
||||
prompt: input.prompt,
|
||||
workingDirectory: runtimeInjection.workingDirectory,
|
||||
},
|
||||
{
|
||||
config,
|
||||
|
||||
18
src/mcp.ts
18
src/mcp.ts
@@ -1,5 +1,5 @@
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { isAbsolute, resolve } from "node:path";
|
||||
import type { CodexOptions } from "@openai/codex-sdk";
|
||||
import { getConfig, type AppConfig } from "./config.js";
|
||||
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
|
||||
@@ -23,12 +23,17 @@ import type {
|
||||
import { parseMcpConfig } from "./mcp/types.js";
|
||||
import type { ToolClearancePolicy } from "./security/schemas.js";
|
||||
|
||||
function readConfigFile(configPath: string): {
|
||||
function readConfigFile(input: {
|
||||
configPath: string;
|
||||
workingDirectory?: string;
|
||||
}): {
|
||||
config?: SharedMcpConfigFile;
|
||||
sourcePath?: string;
|
||||
} {
|
||||
const candidatePath = configPath.trim() || "./mcp.config.json";
|
||||
const resolvedPath = resolve(process.cwd(), candidatePath);
|
||||
const candidatePath = input.configPath.trim() || "./mcp.config.json";
|
||||
const resolvedPath = isAbsolute(candidatePath)
|
||||
? candidatePath
|
||||
: resolve(input.workingDirectory ?? process.cwd(), candidatePath);
|
||||
|
||||
if (!existsSync(resolvedPath)) {
|
||||
if (candidatePath !== "./mcp.config.json") {
|
||||
@@ -83,7 +88,10 @@ export function loadMcpConfigFromEnv(
|
||||
const registry = options?.registry ?? defaultMcpRegistry;
|
||||
const warn = options?.warn ?? ((message: string) => console.warn(message));
|
||||
|
||||
const { config, sourcePath } = readConfigFile(runtimeConfig.mcp.configPath);
|
||||
const { config, sourcePath } = readConfigFile({
|
||||
configPath: runtimeConfig.mcp.configPath,
|
||||
workingDirectory: context.workingDirectory,
|
||||
});
|
||||
if (!config) {
|
||||
return {};
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ export type SharedMcpConfigFile = {
|
||||
export type McpLoadContext = {
|
||||
providerHint?: "codex" | "claude" | "both";
|
||||
prompt?: string;
|
||||
workingDirectory?: string;
|
||||
};
|
||||
|
||||
export type LoadedMcpConfig = {
|
||||
|
||||
@@ -1,16 +1,27 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { mkdir, readFile, writeFile } from "node:fs/promises";
|
||||
import { resolve } from "node:path";
|
||||
import { JSONFilePreset } from "lowdb/node";
|
||||
import { SchemaDrivenExecutionEngine } from "../agents/orchestration.js";
|
||||
import { parseAgentManifest, type AgentManifest } from "../agents/manifest.js";
|
||||
import type { ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
|
||||
import { FileSystemProjectContextStore } from "../agents/project-context.js";
|
||||
import type {
|
||||
ActorExecutionResult,
|
||||
ActorExecutor,
|
||||
PipelineAggregateStatus,
|
||||
} from "../agents/pipeline.js";
|
||||
import {
|
||||
FileSystemSessionMetadataStore,
|
||||
SessionWorktreeManager,
|
||||
type SessionMetadata,
|
||||
} from "../agents/session-lifecycle.js";
|
||||
import { loadConfig, type AppConfig } from "../config.js";
|
||||
import { parseEnvFile } from "./env-store.js";
|
||||
import { parseEnvFile } from "../store/env-store.js";
|
||||
import {
|
||||
createProviderActorExecutor,
|
||||
createProviderRunRuntime,
|
||||
type RunProvider,
|
||||
} from "./provider-executor.js";
|
||||
} from "../agents/provider-executor.js";
|
||||
|
||||
const RUN_META_FILE_NAME = "ui-run-meta.json";
|
||||
|
||||
@@ -43,6 +54,10 @@ export type RunRecord = {
|
||||
error?: string;
|
||||
};
|
||||
|
||||
function toRunStatus(status: PipelineAggregateStatus): Extract<RunStatus, "success" | "failure"> {
|
||||
return status === "success" ? "success" : "failure";
|
||||
}
|
||||
|
||||
type ActiveRun = {
|
||||
controller: AbortController;
|
||||
record: RunRecord;
|
||||
@@ -232,6 +247,19 @@ async function loadRuntimeConfig(envPath: string): Promise<Readonly<AppConfig>>
|
||||
});
|
||||
}
|
||||
|
||||
function resolveRuntimePaths(input: {
|
||||
workspaceRoot: string;
|
||||
config: Readonly<AppConfig>;
|
||||
}): {
|
||||
stateRoot: string;
|
||||
worktreeRoot: string;
|
||||
} {
|
||||
return {
|
||||
stateRoot: resolve(input.workspaceRoot, input.config.orchestration.stateRoot),
|
||||
worktreeRoot: resolve(input.workspaceRoot, input.config.provisioning.gitWorktree.rootDirectory),
|
||||
};
|
||||
}
|
||||
|
||||
async function writeRunMeta(input: {
|
||||
stateRoot: string;
|
||||
sessionId: string;
|
||||
@@ -240,7 +268,9 @@ async function writeRunMeta(input: {
|
||||
const sessionDirectory = resolve(input.stateRoot, input.sessionId);
|
||||
await mkdir(sessionDirectory, { recursive: true });
|
||||
const path = resolve(sessionDirectory, RUN_META_FILE_NAME);
|
||||
await writeFile(path, `${JSON.stringify(input.run, null, 2)}\n`, "utf8");
|
||||
const db = await JSONFilePreset<RunRecord>(path, input.run);
|
||||
db.data = input.run;
|
||||
await db.write();
|
||||
}
|
||||
|
||||
export async function readRunMetaBySession(input: {
|
||||
@@ -271,9 +301,9 @@ export async function readRunMetaBySession(input: {
|
||||
sessionId: record.sessionId,
|
||||
status:
|
||||
record.status === "running" ||
|
||||
record.status === "success" ||
|
||||
record.status === "failure" ||
|
||||
record.status === "cancelled"
|
||||
record.status === "success" ||
|
||||
record.status === "failure" ||
|
||||
record.status === "cancelled"
|
||||
? record.status
|
||||
: "failure",
|
||||
startedAt: record.startedAt,
|
||||
@@ -311,6 +341,103 @@ export class UiRunService {
|
||||
this.envFilePath = resolve(this.workspaceRoot, input.envFilePath ?? ".env");
|
||||
}
|
||||
|
||||
private async loadRuntime(): Promise<{
|
||||
config: Readonly<AppConfig>;
|
||||
stateRoot: string;
|
||||
sessionStore: FileSystemSessionMetadataStore;
|
||||
worktreeManager: SessionWorktreeManager;
|
||||
}> {
|
||||
const config = await loadRuntimeConfig(this.envFilePath);
|
||||
const paths = resolveRuntimePaths({
|
||||
workspaceRoot: this.workspaceRoot,
|
||||
config,
|
||||
});
|
||||
|
||||
return {
|
||||
config,
|
||||
stateRoot: paths.stateRoot,
|
||||
sessionStore: new FileSystemSessionMetadataStore({
|
||||
stateRoot: paths.stateRoot,
|
||||
}),
|
||||
worktreeManager: new SessionWorktreeManager({
|
||||
worktreeRoot: paths.worktreeRoot,
|
||||
baseRef: config.provisioning.gitWorktree.baseRef,
|
||||
targetPath: config.provisioning.gitWorktree.targetPath,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
async createSession(input: {
|
||||
projectPath: string;
|
||||
sessionId?: string;
|
||||
}): Promise<SessionMetadata> {
|
||||
const runtime = await this.loadRuntime();
|
||||
const sessionId = input.sessionId?.trim() || toSessionId();
|
||||
const baseWorkspacePath = runtime.worktreeManager.resolveBaseWorkspacePath(sessionId);
|
||||
const session = await runtime.sessionStore.createSession({
|
||||
sessionId,
|
||||
projectPath: resolve(input.projectPath),
|
||||
baseWorkspacePath,
|
||||
});
|
||||
|
||||
await runtime.worktreeManager.initializeSessionBaseWorkspace({
|
||||
sessionId: session.sessionId,
|
||||
projectPath: session.projectPath,
|
||||
baseWorkspacePath: session.baseWorkspacePath,
|
||||
});
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
async listSessions(): Promise<SessionMetadata[]> {
|
||||
const runtime = await this.loadRuntime();
|
||||
return runtime.sessionStore.listSessions();
|
||||
}
|
||||
|
||||
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
|
||||
const runtime = await this.loadRuntime();
|
||||
return runtime.sessionStore.readSession(sessionId);
|
||||
}
|
||||
|
||||
async closeSession(input: {
|
||||
sessionId: string;
|
||||
mergeToProject?: boolean;
|
||||
}): Promise<SessionMetadata> {
|
||||
const runtime = await this.loadRuntime();
|
||||
const session = await runtime.sessionStore.readSession(input.sessionId);
|
||||
if (!session) {
|
||||
throw new Error(`Session \"${input.sessionId}\" does not exist.`);
|
||||
}
|
||||
|
||||
const sessionProjectContextStore = new FileSystemProjectContextStore({
|
||||
filePath: runtime.sessionStore.getSessionProjectContextPath(session.sessionId),
|
||||
});
|
||||
const projectContext = await sessionProjectContextStore.readState();
|
||||
const taskWorktreePaths = projectContext.taskQueue
|
||||
.map((task) => task.worktreePath)
|
||||
.filter((path): path is string => typeof path === "string" && path.trim().length > 0);
|
||||
|
||||
const outcome = await runtime.worktreeManager.closeSession({
|
||||
session,
|
||||
taskWorktreePaths,
|
||||
mergeBaseIntoProject: input.mergeToProject === true,
|
||||
});
|
||||
|
||||
if (outcome.kind === "fatal_error") {
|
||||
throw new Error(`Session close failed: ${outcome.error}`);
|
||||
}
|
||||
|
||||
if (outcome.kind === "conflict") {
|
||||
return runtime.sessionStore.updateSession(session.sessionId, {
|
||||
sessionStatus: "closed_with_conflicts",
|
||||
});
|
||||
}
|
||||
|
||||
return runtime.sessionStore.updateSession(session.sessionId, {
|
||||
sessionStatus: "closed",
|
||||
});
|
||||
}
|
||||
|
||||
listRuns(): RunRecord[] {
|
||||
const output = [...this.runHistory.values()].sort((left, right) => {
|
||||
return right.startedAt.localeCompare(left.startedAt);
|
||||
@@ -323,11 +450,24 @@ export class UiRunService {
|
||||
}
|
||||
|
||||
async startRun(input: StartRunInput): Promise<RunRecord> {
|
||||
const config = await loadRuntimeConfig(this.envFilePath);
|
||||
const runtime = await this.loadRuntime();
|
||||
const config = runtime.config;
|
||||
const manifest = parseAgentManifest(input.manifest);
|
||||
const executionMode = input.executionMode ?? "mock";
|
||||
const provider = input.provider ?? "codex";
|
||||
const sessionId = input.sessionId?.trim() || toSessionId();
|
||||
const session = input.sessionId?.trim()
|
||||
? await runtime.sessionStore.readSession(sessionId)
|
||||
: undefined;
|
||||
if (input.sessionId?.trim() && !session) {
|
||||
throw new Error(`Session \"${sessionId}\" does not exist.`);
|
||||
}
|
||||
if (
|
||||
session &&
|
||||
(session.sessionStatus === "closed" || session.sessionStatus === "closed_with_conflicts")
|
||||
) {
|
||||
throw new Error(`Session \"${sessionId}\" is closed and cannot run new tasks.`);
|
||||
}
|
||||
const runId = randomUUID();
|
||||
const controller = new AbortController();
|
||||
|
||||
@@ -349,8 +489,9 @@ export class UiRunService {
|
||||
if (executionMode === "provider") {
|
||||
providerRuntime = await createProviderRunRuntime({
|
||||
provider,
|
||||
initialPrompt: input.prompt,
|
||||
config,
|
||||
observabilityRootPath: this.workspaceRoot,
|
||||
baseEnv: process.env,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -358,21 +499,30 @@ export class UiRunService {
|
||||
executionMode === "provider" && providerRuntime
|
||||
? createSingleExecutorMap(manifest, createProviderActorExecutor(providerRuntime))
|
||||
: createMockActorExecutors(manifest, {
|
||||
prompt: input.prompt,
|
||||
topologyHint: input.topologyHint,
|
||||
simulateValidationNodeIds: new Set(input.simulateValidationNodeIds ?? []),
|
||||
});
|
||||
prompt: input.prompt,
|
||||
topologyHint: input.topologyHint,
|
||||
simulateValidationNodeIds: new Set(input.simulateValidationNodeIds ?? []),
|
||||
});
|
||||
|
||||
const engine = new SchemaDrivenExecutionEngine({
|
||||
manifest,
|
||||
actorExecutors,
|
||||
settings: {
|
||||
workspaceRoot: this.workspaceRoot,
|
||||
stateRoot: config.orchestration.stateRoot,
|
||||
projectContextPath: config.orchestration.projectContextPath,
|
||||
stateRoot: runtime.stateRoot,
|
||||
projectContextPath: session
|
||||
? runtime.sessionStore.getSessionProjectContextPath(sessionId)
|
||||
: resolve(this.workspaceRoot, config.orchestration.projectContextPath),
|
||||
runtimeContext: {
|
||||
ui_mode: executionMode,
|
||||
run_provider: provider,
|
||||
...(session
|
||||
? {
|
||||
session_id: sessionId,
|
||||
project_path: session.projectPath,
|
||||
base_workspace_path: session.baseWorkspacePath,
|
||||
}
|
||||
: {}),
|
||||
...(input.runtimeContextOverrides ?? {}),
|
||||
},
|
||||
},
|
||||
@@ -380,12 +530,12 @@ export class UiRunService {
|
||||
});
|
||||
|
||||
await writeRunMeta({
|
||||
stateRoot: config.orchestration.stateRoot,
|
||||
stateRoot: runtime.stateRoot,
|
||||
sessionId,
|
||||
run: record,
|
||||
});
|
||||
|
||||
await engine.runSession({
|
||||
const summary = await engine.runSession({
|
||||
sessionId,
|
||||
initialPayload: {
|
||||
prompt: input.prompt,
|
||||
@@ -396,6 +546,7 @@ export class UiRunService {
|
||||
},
|
||||
},
|
||||
signal: controller.signal,
|
||||
...(session ? { sessionMetadata: session } : {}),
|
||||
});
|
||||
|
||||
const completedRecord = this.runHistory.get(runId);
|
||||
@@ -405,13 +556,13 @@ export class UiRunService {
|
||||
|
||||
const next: RunRecord = {
|
||||
...completedRecord,
|
||||
status: "success",
|
||||
status: toRunStatus(summary.status),
|
||||
endedAt: new Date().toISOString(),
|
||||
};
|
||||
this.runHistory.set(runId, next);
|
||||
|
||||
await writeRunMeta({
|
||||
stateRoot: config.orchestration.stateRoot,
|
||||
stateRoot: runtime.stateRoot,
|
||||
sessionId,
|
||||
run: next,
|
||||
});
|
||||
@@ -431,7 +582,7 @@ export class UiRunService {
|
||||
this.runHistory.set(runId, next);
|
||||
|
||||
await writeRunMeta({
|
||||
stateRoot: config.orchestration.stateRoot,
|
||||
stateRoot: runtime.stateRoot,
|
||||
sessionId,
|
||||
run: next,
|
||||
});
|
||||
@@ -8,46 +8,49 @@ import {
|
||||
import {
|
||||
parseShellValidationPolicy,
|
||||
parseToolClearancePolicy,
|
||||
type SecurityViolationHandling,
|
||||
type ShellValidationPolicy,
|
||||
type ToolClearancePolicy,
|
||||
} from "./schemas.js";
|
||||
|
||||
export type SecurityAuditEvent =
|
||||
| {
|
||||
| ({
|
||||
type: "shell.command_profiled";
|
||||
timestamp: string;
|
||||
command: string;
|
||||
cwd: string;
|
||||
parsed: ParsedShellScript;
|
||||
}
|
||||
| {
|
||||
} & SecurityAuditContext)
|
||||
| ({
|
||||
type: "shell.command_allowed";
|
||||
timestamp: string;
|
||||
command: string;
|
||||
cwd: string;
|
||||
commandCount: number;
|
||||
}
|
||||
| {
|
||||
} & SecurityAuditContext)
|
||||
| ({
|
||||
type: "shell.command_blocked";
|
||||
timestamp: string;
|
||||
command: string;
|
||||
cwd: string;
|
||||
reason: string;
|
||||
code: string;
|
||||
details?: Record<string, unknown>;
|
||||
}
|
||||
| {
|
||||
} & SecurityAuditContext)
|
||||
| ({
|
||||
type: "tool.invocation_allowed";
|
||||
timestamp: string;
|
||||
tool: string;
|
||||
}
|
||||
| {
|
||||
} & SecurityAuditContext)
|
||||
| ({
|
||||
type: "tool.invocation_blocked";
|
||||
timestamp: string;
|
||||
tool: string;
|
||||
reason: string;
|
||||
code: string;
|
||||
};
|
||||
} & SecurityAuditContext);
|
||||
|
||||
export type SecurityAuditContext = {
|
||||
timestamp: string;
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
attempt?: number;
|
||||
};
|
||||
|
||||
export type SecurityAuditSink = (event: SecurityAuditEvent) => void;
|
||||
|
||||
@@ -60,6 +63,10 @@ function normalizeToken(value: string): string {
|
||||
return value.trim();
|
||||
}
|
||||
|
||||
function normalizeLookupToken(value: string): string {
|
||||
return normalizeToken(value).toLowerCase();
|
||||
}
|
||||
|
||||
function hasPathTraversalSegment(token: string): boolean {
|
||||
const normalized = token.replaceAll("\\", "/");
|
||||
if (normalized === ".." || normalized.startsWith("../") || normalized.endsWith("/..")) {
|
||||
@@ -98,10 +105,44 @@ function toToolSet(values: readonly string[]): Set<string> {
|
||||
return out;
|
||||
}
|
||||
|
||||
function toCaseInsensitiveLookup(values: readonly string[]): Map<string, string> {
|
||||
const out = new Map<string, string>();
|
||||
for (const value of values) {
|
||||
const normalized = normalizeLookupToken(value);
|
||||
if (!normalized || out.has(normalized)) {
|
||||
continue;
|
||||
}
|
||||
out.set(normalized, value);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function toNow(): string {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
function toAuditContext(input?: {
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
attempt?: number;
|
||||
}): SecurityAuditContext {
|
||||
const output: SecurityAuditContext = {
|
||||
timestamp: toNow(),
|
||||
};
|
||||
|
||||
if (input?.sessionId) {
|
||||
output.sessionId = input.sessionId;
|
||||
}
|
||||
if (input?.nodeId) {
|
||||
output.nodeId = input.nodeId;
|
||||
}
|
||||
if (typeof input?.attempt === "number" && Number.isInteger(input.attempt) && input.attempt >= 1) {
|
||||
output.attempt = input.attempt;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
export class SecurityRulesEngine {
|
||||
private readonly policy: ShellValidationPolicy;
|
||||
private readonly allowedBinaries: Set<string>;
|
||||
@@ -109,10 +150,14 @@ export class SecurityRulesEngine {
|
||||
private readonly blockedEnvAssignments: Set<string>;
|
||||
private readonly worktreeRoot: string;
|
||||
private readonly protectedPaths: string[];
|
||||
private readonly violationHandling: SecurityViolationHandling;
|
||||
|
||||
constructor(
|
||||
policy: ShellValidationPolicy,
|
||||
private readonly auditSink?: SecurityAuditSink,
|
||||
options?: {
|
||||
violationHandling?: SecurityViolationHandling;
|
||||
},
|
||||
) {
|
||||
this.policy = parseShellValidationPolicy(policy);
|
||||
this.allowedBinaries = toToolSet(this.policy.allowedBinaries);
|
||||
@@ -120,6 +165,7 @@ export class SecurityRulesEngine {
|
||||
this.blockedEnvAssignments = toToolSet(this.policy.blockedEnvAssignments);
|
||||
this.worktreeRoot = resolve(this.policy.worktreeRoot);
|
||||
this.protectedPaths = this.policy.protectedPaths.map((path) => resolve(path));
|
||||
this.violationHandling = options?.violationHandling ?? "hard_abort";
|
||||
}
|
||||
|
||||
getPolicy(): ShellValidationPolicy {
|
||||
@@ -136,6 +182,11 @@ export class SecurityRulesEngine {
|
||||
command: string;
|
||||
cwd: string;
|
||||
toolClearance?: ToolClearancePolicy;
|
||||
context?: {
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
attempt?: number;
|
||||
};
|
||||
}): Promise<ValidatedShellCommand> {
|
||||
const resolvedCwd = resolve(input.cwd);
|
||||
|
||||
@@ -147,22 +198,22 @@ export class SecurityRulesEngine {
|
||||
: undefined;
|
||||
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "shell.command_profiled",
|
||||
timestamp: toNow(),
|
||||
command: input.command,
|
||||
cwd: resolvedCwd,
|
||||
parsed,
|
||||
});
|
||||
|
||||
for (const command of parsed.commands) {
|
||||
this.assertBinaryAllowed(command, toolClearance);
|
||||
this.assertBinaryAllowed(command, toolClearance, input.context);
|
||||
this.assertAssignmentsAllowed(command);
|
||||
this.assertArgumentPaths(command, resolvedCwd);
|
||||
}
|
||||
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "shell.command_allowed",
|
||||
timestamp: toNow(),
|
||||
command: input.command,
|
||||
cwd: resolvedCwd,
|
||||
commandCount: parsed.commandCount,
|
||||
@@ -175,14 +226,23 @@ export class SecurityRulesEngine {
|
||||
} catch (error) {
|
||||
if (error instanceof SecurityViolationError) {
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "shell.command_blocked",
|
||||
timestamp: toNow(),
|
||||
command: input.command,
|
||||
cwd: resolvedCwd,
|
||||
reason: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
});
|
||||
if (this.violationHandling === "dangerous_warn_only") {
|
||||
return {
|
||||
cwd: resolvedCwd,
|
||||
parsed: {
|
||||
commandCount: 0,
|
||||
commands: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -196,13 +256,21 @@ export class SecurityRulesEngine {
|
||||
assertToolInvocationAllowed(input: {
|
||||
tool: string;
|
||||
toolClearance: ToolClearancePolicy;
|
||||
context?: {
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
attempt?: number;
|
||||
};
|
||||
}): void {
|
||||
const policy = parseToolClearancePolicy(input.toolClearance);
|
||||
const normalizedTool = normalizeLookupToken(input.tool);
|
||||
const banlistLookup = toCaseInsensitiveLookup(policy.banlist);
|
||||
const allowlistLookup = toCaseInsensitiveLookup(policy.allowlist);
|
||||
|
||||
if (policy.banlist.includes(input.tool)) {
|
||||
if (banlistLookup.has(normalizedTool)) {
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "tool.invocation_blocked",
|
||||
timestamp: toNow(),
|
||||
tool: input.tool,
|
||||
reason: `Tool "${input.tool}" is explicitly banned by policy.`,
|
||||
code: "TOOL_BANNED",
|
||||
@@ -218,10 +286,10 @@ export class SecurityRulesEngine {
|
||||
);
|
||||
}
|
||||
|
||||
if (policy.allowlist.length > 0 && !policy.allowlist.includes(input.tool)) {
|
||||
if (policy.allowlist.length > 0 && !allowlistLookup.has(normalizedTool)) {
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "tool.invocation_blocked",
|
||||
timestamp: toNow(),
|
||||
tool: input.tool,
|
||||
reason: `Tool "${input.tool}" is not present in allowlist.`,
|
||||
code: "TOOL_NOT_ALLOWED",
|
||||
@@ -238,21 +306,23 @@ export class SecurityRulesEngine {
|
||||
}
|
||||
|
||||
this.emit({
|
||||
...toAuditContext(input.context),
|
||||
type: "tool.invocation_allowed",
|
||||
timestamp: toNow(),
|
||||
tool: input.tool,
|
||||
});
|
||||
}
|
||||
|
||||
filterAllowedTools(tools: string[], toolClearance: ToolClearancePolicy): string[] {
|
||||
const policy = parseToolClearancePolicy(toolClearance);
|
||||
const allowlistLookup = toCaseInsensitiveLookup(policy.allowlist);
|
||||
const banlistLookup = toCaseInsensitiveLookup(policy.banlist);
|
||||
|
||||
const allowedByAllowlist =
|
||||
policy.allowlist.length === 0
|
||||
? tools
|
||||
: tools.filter((tool) => policy.allowlist.includes(tool));
|
||||
: tools.filter((tool) => allowlistLookup.has(normalizeLookupToken(tool)));
|
||||
|
||||
return allowedByAllowlist.filter((tool) => !policy.banlist.includes(tool));
|
||||
return allowedByAllowlist.filter((tool) => !banlistLookup.has(normalizeLookupToken(tool)));
|
||||
}
|
||||
|
||||
private assertCwdBoundary(cwd: string): void {
|
||||
@@ -290,6 +360,11 @@ export class SecurityRulesEngine {
|
||||
private assertBinaryAllowed(
|
||||
command: ParsedShellCommand,
|
||||
toolClearance?: ToolClearancePolicy,
|
||||
context?: {
|
||||
sessionId?: string;
|
||||
nodeId?: string;
|
||||
attempt?: number;
|
||||
},
|
||||
): void {
|
||||
const binaryToken = normalizeToken(command.binary);
|
||||
const binaryName = basename(binaryToken);
|
||||
@@ -313,6 +388,7 @@ export class SecurityRulesEngine {
|
||||
this.assertToolInvocationAllowed({
|
||||
tool: binaryName,
|
||||
toolClearance,
|
||||
context,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -157,11 +157,15 @@ export function parseParsedShellScript(input: unknown): ParsedShellScript {
|
||||
};
|
||||
}
|
||||
|
||||
export type SecurityViolationHandling = "hard_abort" | "validation_fail";
|
||||
export type SecurityViolationHandling =
|
||||
| "hard_abort"
|
||||
| "validation_fail"
|
||||
| "dangerous_warn_only";
|
||||
|
||||
export const securityViolationHandlingSchema = z.union([
|
||||
z.literal("hard_abort"),
|
||||
z.literal("validation_fail"),
|
||||
z.literal("dangerous_warn_only"),
|
||||
]);
|
||||
|
||||
export function parseSecurityViolationHandling(input: unknown): SecurityViolationHandling {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { resolve } from "node:path";
|
||||
import { loadConfig, type AppConfig } from "../config.js";
|
||||
import type { SecurityViolationHandling } from "../security/index.js";
|
||||
import { parseEnvFile, writeEnvFileUpdates } from "./env-store.js";
|
||||
|
||||
export type RuntimeNotificationSettings = {
|
||||
@@ -9,7 +10,7 @@ export type RuntimeNotificationSettings = {
|
||||
};
|
||||
|
||||
export type SecurityPolicySettings = {
|
||||
violationMode: "hard_abort" | "validation_fail";
|
||||
violationMode: SecurityViolationHandling;
|
||||
allowedBinaries: string[];
|
||||
commandTimeoutMs: number;
|
||||
inheritedEnv: string[];
|
||||
@@ -23,6 +24,7 @@ export type LimitSettings = {
|
||||
topologyMaxDepth: number;
|
||||
topologyMaxRetries: number;
|
||||
relationshipMaxChildren: number;
|
||||
mergeConflictMaxAttempts: number;
|
||||
portBase: number;
|
||||
portBlockSize: number;
|
||||
portBlockCount: number;
|
||||
@@ -38,6 +40,7 @@ export type UiConfigSnapshot = {
|
||||
stateRoot: string;
|
||||
projectContextPath: string;
|
||||
runtimeEventLogPath: string;
|
||||
claudeTraceLogPath: string;
|
||||
securityAuditLogPath: string;
|
||||
};
|
||||
};
|
||||
@@ -88,6 +91,7 @@ function toLimits(config: Readonly<AppConfig>): LimitSettings {
|
||||
topologyMaxDepth: config.orchestration.maxDepth,
|
||||
topologyMaxRetries: config.orchestration.maxRetries,
|
||||
relationshipMaxChildren: config.orchestration.maxChildren,
|
||||
mergeConflictMaxAttempts: config.orchestration.mergeConflictMaxAttempts,
|
||||
portBase: config.provisioning.portRange.basePort,
|
||||
portBlockSize: config.provisioning.portRange.blockSize,
|
||||
portBlockCount: config.provisioning.portRange.blockCount,
|
||||
@@ -105,6 +109,7 @@ function toSnapshot(config: Readonly<AppConfig>, envFilePath: string): UiConfigS
|
||||
stateRoot: config.orchestration.stateRoot,
|
||||
projectContextPath: config.orchestration.projectContextPath,
|
||||
runtimeEventLogPath: config.runtimeEvents.logPath,
|
||||
claudeTraceLogPath: config.provider.claudeObservability.logPath,
|
||||
securityAuditLogPath: config.security.auditLogPath,
|
||||
},
|
||||
};
|
||||
@@ -170,6 +175,7 @@ export class UiConfigStore {
|
||||
AGENT_TOPOLOGY_MAX_DEPTH: String(input.topologyMaxDepth),
|
||||
AGENT_TOPOLOGY_MAX_RETRIES: String(input.topologyMaxRetries),
|
||||
AGENT_RELATIONSHIP_MAX_CHILDREN: String(input.relationshipMaxChildren),
|
||||
AGENT_MERGE_CONFLICT_MAX_ATTEMPTS: String(input.mergeConflictMaxAttempts),
|
||||
AGENT_PORT_BASE: String(input.portBase),
|
||||
AGENT_PORT_BLOCK_SIZE: String(input.portBlockSize),
|
||||
AGENT_PORT_BLOCK_COUNT: String(input.portBlockCount),
|
||||
821
src/ui/claude-observability.ts
Normal file
821
src/ui/claude-observability.ts
Normal file
@@ -0,0 +1,821 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { appendFile, mkdir } from "node:fs/promises";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import type { Options, SDKMessage } from "@anthropic-ai/claude-agent-sdk";
|
||||
import type {
|
||||
ClaudeObservabilityMode,
|
||||
ClaudeObservabilityRuntimeConfig,
|
||||
ClaudeObservabilityVerbosity,
|
||||
} from "../config.js";
|
||||
import type { JsonObject, JsonValue } from "../agents/types.js";
|
||||
|
||||
const MAX_STRING_LENGTH = 320;
|
||||
const MAX_ARRAY_ITEMS = 20;
|
||||
const MAX_OBJECT_KEYS = 60;
|
||||
const MAX_DEPTH = 6;
|
||||
|
||||
const NON_SECRET_TOKEN_KEYS = new Set([
|
||||
"input_tokens",
|
||||
"output_tokens",
|
||||
"total_tokens",
|
||||
"cache_creation_input_tokens",
|
||||
"cache_read_input_tokens",
|
||||
"ephemeral_1h_input_tokens",
|
||||
"ephemeral_5m_input_tokens",
|
||||
"token_input",
|
||||
"token_output",
|
||||
"token_total",
|
||||
"tokencount",
|
||||
"token_count",
|
||||
"tool_use_id",
|
||||
"parent_tool_use_id",
|
||||
"task_id",
|
||||
"session_id",
|
||||
]);
|
||||
|
||||
type ClaudeTraceContext = {
|
||||
sessionId: string;
|
||||
nodeId: string;
|
||||
attempt: number;
|
||||
depth: number;
|
||||
};
|
||||
|
||||
type ClaudeTraceRecord = {
|
||||
id: string;
|
||||
timestamp: string;
|
||||
source: "claude_sdk";
|
||||
stage:
|
||||
| "query.started"
|
||||
| "query.message"
|
||||
| "query.stderr"
|
||||
| "query.completed"
|
||||
| "query.error";
|
||||
message: string;
|
||||
sessionId: string;
|
||||
nodeId: string;
|
||||
attempt: number;
|
||||
depth: number;
|
||||
sdkSessionId?: string;
|
||||
sdkMessageType?: string;
|
||||
sdkMessageSubtype?: string;
|
||||
data?: JsonObject;
|
||||
};
|
||||
|
||||
function truncate(value: string, maxLength = MAX_STRING_LENGTH): string {
|
||||
if (value.length <= maxLength) {
|
||||
return value;
|
||||
}
|
||||
return `${value.slice(0, maxLength)}...`;
|
||||
}
|
||||
|
||||
function isSensitiveKey(key: string): boolean {
|
||||
const normalized = key.trim().toLowerCase();
|
||||
if (!normalized) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (NON_SECRET_TOKEN_KEYS.has(normalized)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (/(api[_-]?key|secret|password|authorization|cookie)/i.test(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (/(auth[_-]?token|access[_-]?token|refresh[_-]?token|id[_-]?token|oauth)/i.test(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return normalized === "token";
|
||||
}
|
||||
|
||||
function toJsonPrimitive(value: unknown): JsonValue {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
return truncate(value);
|
||||
}
|
||||
if (typeof value === "number") {
|
||||
return Number.isFinite(value) ? value : String(value);
|
||||
}
|
||||
if (typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === "bigint") {
|
||||
return String(value);
|
||||
}
|
||||
if (typeof value === "undefined") {
|
||||
return null;
|
||||
}
|
||||
return truncate(String(value));
|
||||
}
|
||||
|
||||
function sanitizeJsonValue(value: unknown, depth = 0): JsonValue {
|
||||
if (depth >= MAX_DEPTH) {
|
||||
return "[depth_limit]";
|
||||
}
|
||||
|
||||
if (
|
||||
value === null ||
|
||||
typeof value === "string" ||
|
||||
typeof value === "number" ||
|
||||
typeof value === "boolean" ||
|
||||
typeof value === "bigint" ||
|
||||
typeof value === "undefined"
|
||||
) {
|
||||
return toJsonPrimitive(value);
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const output = value.slice(0, MAX_ARRAY_ITEMS).map((entry) => sanitizeJsonValue(entry, depth + 1));
|
||||
if (value.length > MAX_ARRAY_ITEMS) {
|
||||
output.push(`[+${String(value.length - MAX_ARRAY_ITEMS)} more]`);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
const output: JsonObject = {};
|
||||
const entries = Object.entries(value as Record<string, unknown>);
|
||||
const limited = entries.slice(0, MAX_OBJECT_KEYS);
|
||||
for (const [key, entryValue] of limited) {
|
||||
if (isSensitiveKey(key)) {
|
||||
output[key] = "[redacted]";
|
||||
continue;
|
||||
}
|
||||
output[key] = sanitizeJsonValue(entryValue, depth + 1);
|
||||
}
|
||||
if (entries.length > MAX_OBJECT_KEYS) {
|
||||
output.__truncated_keys = entries.length - MAX_OBJECT_KEYS;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
return truncate(String(value));
|
||||
}
|
||||
|
||||
function readString(value: unknown): string | undefined {
|
||||
if (typeof value !== "string") {
|
||||
return undefined;
|
||||
}
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length > 0 ? trimmed : undefined;
|
||||
}
|
||||
|
||||
function readNumber(value: unknown): number | undefined {
|
||||
return typeof value === "number" && Number.isFinite(value) ? value : undefined;
|
||||
}
|
||||
|
||||
function readBoolean(value: unknown): boolean | undefined {
|
||||
return typeof value === "boolean" ? value : undefined;
|
||||
}
|
||||
|
||||
function toMessageRecord(message: SDKMessage): Record<string, unknown> {
|
||||
return message as unknown as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function toMessageSubtype(message: SDKMessage): string | undefined {
|
||||
return readString(toMessageRecord(message).subtype);
|
||||
}
|
||||
|
||||
function toMessageSessionId(message: SDKMessage): string | undefined {
|
||||
return readString(toMessageRecord(message).session_id);
|
||||
}
|
||||
|
||||
function toTaskNotificationSummary(message: SDKMessage): {
|
||||
summary: string;
|
||||
data?: JsonObject;
|
||||
} {
|
||||
const raw = toMessageRecord(message);
|
||||
const status = readString(raw.status) ?? "unknown";
|
||||
const data: JsonObject = {
|
||||
status,
|
||||
};
|
||||
|
||||
const taskId = readString(raw.task_id);
|
||||
if (taskId) {
|
||||
data.taskId = taskId;
|
||||
}
|
||||
|
||||
const summaryText = readString(raw.summary);
|
||||
if (summaryText) {
|
||||
data.summary = truncate(summaryText);
|
||||
}
|
||||
|
||||
const outputFile = readString(raw.output_file);
|
||||
if (outputFile) {
|
||||
data.outputFile = outputFile;
|
||||
}
|
||||
|
||||
if (raw.usage !== undefined) {
|
||||
data.usage = sanitizeJsonValue(raw.usage);
|
||||
}
|
||||
|
||||
return {
|
||||
summary: `Task notification: ${status}.`,
|
||||
data,
|
||||
};
|
||||
}
|
||||
|
||||
function toTaskStartedSummary(message: SDKMessage): {
|
||||
summary: string;
|
||||
data?: JsonObject;
|
||||
} {
|
||||
const raw = toMessageRecord(message);
|
||||
const data: JsonObject = {};
|
||||
|
||||
const taskId = readString(raw.task_id);
|
||||
if (taskId) {
|
||||
data.taskId = taskId;
|
||||
}
|
||||
|
||||
const description = readString(raw.description);
|
||||
if (description) {
|
||||
data.description = truncate(description);
|
||||
}
|
||||
|
||||
const taskType = readString(raw.task_type);
|
||||
if (taskType) {
|
||||
data.taskType = taskType;
|
||||
}
|
||||
|
||||
const toolUseId = readString(raw.tool_use_id);
|
||||
if (toolUseId) {
|
||||
data.toolUseId = toolUseId;
|
||||
}
|
||||
|
||||
return {
|
||||
summary: "Task started.",
|
||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
function toMessageSummary(message: SDKMessage): {
|
||||
summary: string;
|
||||
data?: JsonObject;
|
||||
} {
|
||||
const subtype = toMessageSubtype(message);
|
||||
const raw = toMessageRecord(message);
|
||||
|
||||
if (message.type === "result") {
|
||||
if (message.subtype === "success") {
|
||||
return {
|
||||
summary: "Claude query result success.",
|
||||
data: {
|
||||
stopReason: message.stop_reason ?? null,
|
||||
numTurns: message.num_turns,
|
||||
usage: sanitizeJsonValue(message.usage) as JsonObject,
|
||||
totalCostUsd: message.total_cost_usd,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
summary: `Claude query result ${message.subtype}.`,
|
||||
data: {
|
||||
stopReason: message.stop_reason ?? null,
|
||||
numTurns: message.num_turns,
|
||||
usage: sanitizeJsonValue(message.usage) as JsonObject,
|
||||
totalCostUsd: message.total_cost_usd,
|
||||
errors: sanitizeJsonValue(message.errors),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "tool_progress") {
|
||||
return {
|
||||
summary: `Tool progress: ${message.tool_name}.`,
|
||||
data: {
|
||||
toolName: message.tool_name,
|
||||
toolUseId: message.tool_use_id,
|
||||
elapsedTimeSeconds: message.elapsed_time_seconds,
|
||||
parentToolUseId: message.parent_tool_use_id ?? null,
|
||||
...(message.task_id ? { taskId: message.task_id } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "tool_use_summary") {
|
||||
return {
|
||||
summary: "Tool use summary emitted.",
|
||||
data: {
|
||||
summary: truncate(message.summary),
|
||||
precedingToolUseIds: sanitizeJsonValue(message.preceding_tool_use_ids),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "stream_event") {
|
||||
const data: JsonObject = {};
|
||||
const eventType = readString((raw.event as Record<string, unknown> | undefined)?.type);
|
||||
if (eventType) {
|
||||
data.eventType = eventType;
|
||||
}
|
||||
const parentToolUseId = readString(raw.parent_tool_use_id);
|
||||
if (parentToolUseId) {
|
||||
data.parentToolUseId = parentToolUseId;
|
||||
}
|
||||
return {
|
||||
summary: "Partial assistant stream event emitted.",
|
||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "auth_status") {
|
||||
return {
|
||||
summary: message.isAuthenticating ? "Authentication in progress." : "Authentication status update.",
|
||||
data: {
|
||||
isAuthenticating: message.isAuthenticating,
|
||||
output: sanitizeJsonValue(message.output),
|
||||
...(message.error ? { error: truncate(message.error) } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "assistant") {
|
||||
return {
|
||||
summary: "Assistant message emitted.",
|
||||
data: {
|
||||
parentToolUseId: message.parent_tool_use_id ?? null,
|
||||
...(message.error ? { error: message.error } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "user") {
|
||||
const data: JsonObject = {
|
||||
parentToolUseId: (message as { parent_tool_use_id?: string | null }).parent_tool_use_id ?? null,
|
||||
};
|
||||
const isSynthetic = readBoolean(raw.isSynthetic);
|
||||
if (isSynthetic !== undefined) {
|
||||
data.isSynthetic = isSynthetic;
|
||||
}
|
||||
const isReplay = readBoolean(raw.isReplay);
|
||||
if (isReplay !== undefined) {
|
||||
data.isReplay = isReplay;
|
||||
}
|
||||
return {
|
||||
summary: "User message emitted.",
|
||||
data,
|
||||
};
|
||||
}
|
||||
|
||||
if (subtype === "task_notification") {
|
||||
return toTaskNotificationSummary(message);
|
||||
}
|
||||
|
||||
if (subtype === "task_started") {
|
||||
return toTaskStartedSummary(message);
|
||||
}
|
||||
|
||||
if (message.type === "system" && subtype === "files_persisted") {
|
||||
const files = Array.isArray(raw.files) ? raw.files : [];
|
||||
const failed = Array.isArray(raw.failed) ? raw.failed : [];
|
||||
return {
|
||||
summary: "System event: files_persisted.",
|
||||
data: {
|
||||
persistedFileCount: files.length,
|
||||
failedFileCount: failed.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "system" && subtype === "compact_boundary") {
|
||||
return {
|
||||
summary: "System event: compact_boundary.",
|
||||
data: {
|
||||
compactMetadata: sanitizeJsonValue(raw.compact_metadata),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "system" && subtype === "status") {
|
||||
const data: JsonObject = {
|
||||
status: readString(raw.status) ?? "none",
|
||||
};
|
||||
const permissionMode = readString(raw.permissionMode);
|
||||
if (permissionMode) {
|
||||
data.permissionMode = permissionMode;
|
||||
}
|
||||
return {
|
||||
summary: "System event: status.",
|
||||
data,
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "system" && (subtype === "hook_started" || subtype === "hook_progress" || subtype === "hook_response")) {
|
||||
const data: JsonObject = {
|
||||
...(subtype ? { subtype } : {}),
|
||||
...(readString(raw.hook_id) ? { hookId: readString(raw.hook_id) } : {}),
|
||||
...(readString(raw.hook_name) ? { hookName: readString(raw.hook_name) } : {}),
|
||||
...(readString(raw.hook_event) ? { hookEvent: readString(raw.hook_event) } : {}),
|
||||
...(readString(raw.outcome) ? { outcome: readString(raw.outcome) } : {}),
|
||||
};
|
||||
if (raw.exit_code !== undefined) {
|
||||
data.exitCode = sanitizeJsonValue(raw.exit_code);
|
||||
}
|
||||
return {
|
||||
summary: `System event: ${subtype}.`,
|
||||
data,
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "system") {
|
||||
return {
|
||||
summary: subtype ? `System event: ${subtype}.` : "System event emitted.",
|
||||
data: subtype ? { subtype } : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "rate_limit") {
|
||||
return {
|
||||
summary: "Rate limit event emitted.",
|
||||
data: sanitizeJsonValue(raw) as JsonObject,
|
||||
};
|
||||
}
|
||||
|
||||
if (message.type === "prompt_suggestion") {
|
||||
const data: JsonObject = {
|
||||
...(readString(raw.prompt) ? { prompt: truncate(readString(raw.prompt) as string) } : {}),
|
||||
...(readString(raw.suggestion) ? { suggestion: truncate(readString(raw.suggestion) as string) } : {}),
|
||||
};
|
||||
return {
|
||||
summary: "Prompt suggestion emitted.",
|
||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
summary: `Claude SDK message received (${message.type}).`,
|
||||
};
|
||||
}
|
||||
|
||||
function toRecord(input: {
|
||||
stage: ClaudeTraceRecord["stage"];
|
||||
message: string;
|
||||
context: ClaudeTraceContext;
|
||||
sdkMessageType?: string;
|
||||
sdkMessageSubtype?: string;
|
||||
sdkSessionId?: string;
|
||||
data?: JsonObject;
|
||||
}): ClaudeTraceRecord {
|
||||
return {
|
||||
id: randomUUID(),
|
||||
timestamp: new Date().toISOString(),
|
||||
source: "claude_sdk",
|
||||
stage: input.stage,
|
||||
message: input.message,
|
||||
sessionId: input.context.sessionId,
|
||||
nodeId: input.context.nodeId,
|
||||
attempt: input.context.attempt,
|
||||
depth: input.context.depth,
|
||||
...(input.sdkMessageType ? { sdkMessageType: input.sdkMessageType } : {}),
|
||||
...(input.sdkMessageSubtype ? { sdkMessageSubtype: input.sdkMessageSubtype } : {}),
|
||||
...(input.sdkSessionId ? { sdkSessionId: input.sdkSessionId } : {}),
|
||||
...(input.data ? { data: input.data } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function summarizeClaudeMessage(
|
||||
message: SDKMessage,
|
||||
verbosity: ClaudeObservabilityVerbosity,
|
||||
): {
|
||||
messageType: string;
|
||||
messageSubtype?: string;
|
||||
sdkSessionId?: string;
|
||||
summary: string;
|
||||
data?: JsonObject;
|
||||
} {
|
||||
const messageSubtype = toMessageSubtype(message);
|
||||
const sdkSessionId = toMessageSessionId(message);
|
||||
const summary = toMessageSummary(message);
|
||||
if (verbosity === "full") {
|
||||
return {
|
||||
messageType: message.type,
|
||||
...(messageSubtype ? { messageSubtype } : {}),
|
||||
...(sdkSessionId ? { sdkSessionId } : {}),
|
||||
summary: summary.summary,
|
||||
data: {
|
||||
message: sanitizeJsonValue(message) as JsonObject,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
messageType: message.type,
|
||||
...(messageSubtype ? { messageSubtype } : {}),
|
||||
...(sdkSessionId ? { sdkSessionId } : {}),
|
||||
summary: summary.summary,
|
||||
...(summary.data ? { data: summary.data } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export class ClaudeObservabilityLogger {
|
||||
private readonly mode: ClaudeObservabilityMode;
|
||||
private readonly verbosity: ClaudeObservabilityVerbosity;
|
||||
private readonly logPath: string;
|
||||
private readonly includePartialMessages: boolean;
|
||||
private readonly debug: boolean;
|
||||
private readonly debugLogPath?: string;
|
||||
private readonly pendingWrites = new Set<Promise<void>>();
|
||||
private readonly stdoutProgressByKey = new Map<string, {
|
||||
lastEmittedAt: number;
|
||||
suppressed: number;
|
||||
}>();
|
||||
private readonly fileProgressByKey = new Map<string, {
|
||||
lastEmittedAt: number;
|
||||
suppressed: number;
|
||||
}>();
|
||||
private readonly stdoutStreamByKey = new Map<string, {
|
||||
lastEmittedAt: number;
|
||||
suppressed: number;
|
||||
}>();
|
||||
private readonly fileStreamByKey = new Map<string, {
|
||||
lastEmittedAt: number;
|
||||
suppressed: number;
|
||||
}>();
|
||||
private fileWriteFailureCount = 0;
|
||||
|
||||
constructor(input: {
|
||||
workspaceRoot: string;
|
||||
config: ClaudeObservabilityRuntimeConfig;
|
||||
}) {
|
||||
this.mode = input.config.mode;
|
||||
this.verbosity = input.config.verbosity;
|
||||
this.logPath = resolve(input.workspaceRoot, input.config.logPath);
|
||||
this.includePartialMessages = input.config.includePartialMessages;
|
||||
this.debug = input.config.debug;
|
||||
this.debugLogPath = input.config.debugLogPath
|
||||
? resolve(input.workspaceRoot, input.config.debugLogPath)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
isEnabled(): boolean {
|
||||
return this.mode !== "off";
|
||||
}
|
||||
|
||||
toOptionOverrides(input: {
|
||||
context: ClaudeTraceContext;
|
||||
}): Pick<Options, "includePartialMessages" | "debug" | "debugFile" | "stderr"> {
|
||||
return {
|
||||
includePartialMessages: this.includePartialMessages,
|
||||
debug: this.debug || this.debugLogPath !== undefined,
|
||||
...(this.debugLogPath ? { debugFile: this.debugLogPath } : {}),
|
||||
stderr: (data: string): void => {
|
||||
this.record({
|
||||
stage: "query.stderr",
|
||||
message: "Claude SDK stderr output.",
|
||||
context: input.context,
|
||||
data: {
|
||||
stderr: sanitizeJsonValue(data),
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
recordQueryStarted(input: {
|
||||
context: ClaudeTraceContext;
|
||||
data?: JsonObject;
|
||||
}): void {
|
||||
this.record({
|
||||
stage: "query.started",
|
||||
message: "Claude query started.",
|
||||
context: input.context,
|
||||
...(input.data ? { data: input.data } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
recordMessage(input: {
|
||||
context: ClaudeTraceContext;
|
||||
message: SDKMessage;
|
||||
}): void {
|
||||
const summarized = summarizeClaudeMessage(input.message, this.verbosity);
|
||||
this.record({
|
||||
stage: "query.message",
|
||||
message: summarized.summary,
|
||||
context: input.context,
|
||||
sdkMessageType: summarized.messageType,
|
||||
sdkMessageSubtype: summarized.messageSubtype,
|
||||
sdkSessionId: summarized.sdkSessionId,
|
||||
...(summarized.data ? { data: summarized.data } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
recordQueryCompleted(input: {
|
||||
context: ClaudeTraceContext;
|
||||
data?: JsonObject;
|
||||
}): void {
|
||||
this.record({
|
||||
stage: "query.completed",
|
||||
message: "Claude query completed.",
|
||||
context: input.context,
|
||||
...(input.data ? { data: input.data } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
recordQueryError(input: {
|
||||
context: ClaudeTraceContext;
|
||||
error: unknown;
|
||||
}): void {
|
||||
const errorMessage = input.error instanceof Error ? input.error.message : String(input.error);
|
||||
this.record({
|
||||
stage: "query.error",
|
||||
message: "Claude query failed.",
|
||||
context: input.context,
|
||||
data: {
|
||||
error: truncate(errorMessage),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
await Promise.all([...this.pendingWrites]);
|
||||
}
|
||||
|
||||
private record(input: {
|
||||
stage: ClaudeTraceRecord["stage"];
|
||||
message: string;
|
||||
context: ClaudeTraceContext;
|
||||
sdkMessageType?: string;
|
||||
sdkMessageSubtype?: string;
|
||||
sdkSessionId?: string;
|
||||
data?: JsonObject;
|
||||
}): void {
|
||||
if (!this.isEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const record = toRecord(input);
|
||||
|
||||
if (this.mode === "stdout" || this.mode === "both") {
|
||||
const stdoutRecord = this.toStdoutRecord(record);
|
||||
if (stdoutRecord) {
|
||||
console.log(`[claude-trace] ${JSON.stringify(stdoutRecord)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.mode === "file" || this.mode === "both") {
|
||||
const fileRecord = this.toFileRecord(record);
|
||||
if (!fileRecord) {
|
||||
return;
|
||||
}
|
||||
const line = JSON.stringify(fileRecord);
|
||||
const write = mkdir(dirname(this.logPath), { recursive: true })
|
||||
.then(() => appendFile(this.logPath, `${line}\n`, "utf8"))
|
||||
.catch((error: unknown) => {
|
||||
this.reportFileWriteFailure(error);
|
||||
})
|
||||
.finally(() => {
|
||||
this.pendingWrites.delete(write);
|
||||
});
|
||||
this.pendingWrites.add(write);
|
||||
}
|
||||
}
|
||||
|
||||
private toStdoutRecord(record: ClaudeTraceRecord): ClaudeTraceRecord | undefined {
|
||||
return this.toFilteredMessageRecord(record, "stdout");
|
||||
}
|
||||
|
||||
private toFileRecord(record: ClaudeTraceRecord): ClaudeTraceRecord | undefined {
|
||||
return this.toFilteredMessageRecord(record, "file");
|
||||
}
|
||||
|
||||
private toFilteredMessageRecord(
|
||||
record: ClaudeTraceRecord,
|
||||
destination: "stdout" | "file",
|
||||
): ClaudeTraceRecord | undefined {
|
||||
if (record.stage !== "query.message") {
|
||||
return record;
|
||||
}
|
||||
|
||||
if (!record.sdkMessageType) {
|
||||
return record;
|
||||
}
|
||||
|
||||
if (record.sdkMessageType === "tool_progress") {
|
||||
return this.toSampledToolProgressRecord(record, destination);
|
||||
}
|
||||
|
||||
if (record.sdkMessageType === "stream_event") {
|
||||
if (!this.includePartialMessages) {
|
||||
return undefined;
|
||||
}
|
||||
return this.toSampledStreamEventRecord(record, destination);
|
||||
}
|
||||
|
||||
if (record.sdkMessageType === "auth_status") {
|
||||
const data = record.data;
|
||||
const isAuthenticating = data?.isAuthenticating === true;
|
||||
const hasError = typeof data?.error === "string" && data.error.trim().length > 0;
|
||||
if (hasError || !isAuthenticating) {
|
||||
return record;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
private toSampledToolProgressRecord(
|
||||
record: ClaudeTraceRecord,
|
||||
destination: "stdout" | "file",
|
||||
): ClaudeTraceRecord | undefined {
|
||||
const now = Date.now();
|
||||
const minIntervalMs = destination === "stdout" ? 1000 : 2000;
|
||||
const rawToolUseId = record.data?.toolUseId;
|
||||
const toolUseId = typeof rawToolUseId === "string" ? rawToolUseId : "unknown";
|
||||
const key = `${destination}:${record.sessionId}:${record.nodeId}:${toolUseId}`;
|
||||
const progressByKey = destination === "stdout" ? this.stdoutProgressByKey : this.fileProgressByKey;
|
||||
const state = progressByKey.get(key);
|
||||
|
||||
if (!state) {
|
||||
progressByKey.set(key, {
|
||||
lastEmittedAt: now,
|
||||
suppressed: 0,
|
||||
});
|
||||
return record;
|
||||
}
|
||||
|
||||
if (now - state.lastEmittedAt < minIntervalMs) {
|
||||
state.suppressed += 1;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
state.lastEmittedAt = now;
|
||||
const suppressed = state.suppressed;
|
||||
state.suppressed = 0;
|
||||
|
||||
if (suppressed < 1) {
|
||||
return record;
|
||||
}
|
||||
|
||||
const nextData: JsonObject = {
|
||||
...(record.data ?? {}),
|
||||
suppressedSinceLastEmit: suppressed,
|
||||
};
|
||||
|
||||
return {
|
||||
...record,
|
||||
data: nextData,
|
||||
};
|
||||
}
|
||||
|
||||
private toSampledStreamEventRecord(
|
||||
record: ClaudeTraceRecord,
|
||||
destination: "stdout" | "file",
|
||||
): ClaudeTraceRecord | undefined {
|
||||
const now = Date.now();
|
||||
const minIntervalMs = destination === "stdout" ? 700 : 1200;
|
||||
const key = `${destination}:${record.sessionId}:${record.nodeId}:stream`;
|
||||
const streamByKey = destination === "stdout" ? this.stdoutStreamByKey : this.fileStreamByKey;
|
||||
const state = streamByKey.get(key);
|
||||
|
||||
if (!state) {
|
||||
streamByKey.set(key, {
|
||||
lastEmittedAt: now,
|
||||
suppressed: 0,
|
||||
});
|
||||
return record;
|
||||
}
|
||||
|
||||
if (now - state.lastEmittedAt < minIntervalMs) {
|
||||
state.suppressed += 1;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
state.lastEmittedAt = now;
|
||||
const suppressed = state.suppressed;
|
||||
state.suppressed = 0;
|
||||
|
||||
if (suppressed < 1) {
|
||||
return record;
|
||||
}
|
||||
|
||||
const nextData: JsonObject = {
|
||||
...(record.data ?? {}),
|
||||
suppressedStreamEventsSinceLastEmit: suppressed,
|
||||
};
|
||||
|
||||
return {
|
||||
...record,
|
||||
data: nextData,
|
||||
};
|
||||
}
|
||||
|
||||
private reportFileWriteFailure(error: unknown): void {
|
||||
this.fileWriteFailureCount += 1;
|
||||
if (this.fileWriteFailureCount <= 5) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(
|
||||
`[claude-trace] failed to append trace log to ${this.logPath}: ${truncate(message, 180)}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.fileWriteFailureCount === 6) {
|
||||
console.warn("[claude-trace] additional trace-log write failures suppressed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
85
src/ui/claude-trace-store.ts
Normal file
85
src/ui/claude-trace-store.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
export type ClaudeTraceEvent = {
|
||||
timestamp: string;
|
||||
message: string;
|
||||
stage?: string;
|
||||
sessionId?: string;
|
||||
sdkMessageType?: string;
|
||||
sdkMessageSubtype?: string;
|
||||
data?: unknown;
|
||||
} & Record<string, unknown>;
|
||||
|
||||
type ClaudeTraceFilter = {
|
||||
sessionId?: string;
|
||||
limit?: number;
|
||||
};
|
||||
|
||||
function safeParseLine(line: string): ClaudeTraceEvent | undefined {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed) as unknown;
|
||||
if (!parsed || typeof parsed !== "object") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const record = parsed as Record<string, unknown>;
|
||||
if (typeof record.timestamp !== "string" || typeof record.message !== "string") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return record as ClaudeTraceEvent;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export async function readClaudeTraceEvents(logPath: string): Promise<ClaudeTraceEvent[]> {
|
||||
const absolutePath = resolve(logPath);
|
||||
let content = "";
|
||||
|
||||
try {
|
||||
content = await readFile(absolutePath, "utf8");
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return [];
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const parsed: ClaudeTraceEvent[] = [];
|
||||
for (const line of content.split(/\r?\n/)) {
|
||||
const event = safeParseLine(line);
|
||||
if (event) {
|
||||
parsed.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
parsed.sort((left, right) => left.timestamp.localeCompare(right.timestamp));
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export function filterClaudeTraceEvents(
|
||||
events: readonly ClaudeTraceEvent[],
|
||||
filter: ClaudeTraceFilter,
|
||||
): ClaudeTraceEvent[] {
|
||||
const filtered: ClaudeTraceEvent[] = [];
|
||||
|
||||
for (const event of events) {
|
||||
if (filter.sessionId && event.sessionId !== filter.sessionId) {
|
||||
continue;
|
||||
}
|
||||
filtered.push(event);
|
||||
}
|
||||
|
||||
if (!filter.limit || filter.limit < 1 || filtered.length <= filter.limit) {
|
||||
return filtered;
|
||||
}
|
||||
|
||||
return filtered.slice(-filter.limit);
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import { createReadStream } from "node:fs";
|
||||
import { stat } from "node:fs/promises";
|
||||
import { extname, resolve } from "node:path";
|
||||
import type { IncomingMessage, ServerResponse } from "node:http";
|
||||
|
||||
const CONTENT_TYPES: Record<string, string> = {
|
||||
".html": "text/html; charset=utf-8",
|
||||
".js": "text/javascript; charset=utf-8",
|
||||
".css": "text/css; charset=utf-8",
|
||||
".json": "application/json; charset=utf-8",
|
||||
".svg": "image/svg+xml",
|
||||
};
|
||||
|
||||
export function sendJson(response: ServerResponse, statusCode: number, body: unknown): void {
|
||||
const payload = JSON.stringify(body);
|
||||
response.statusCode = statusCode;
|
||||
response.setHeader("Content-Type", "application/json; charset=utf-8");
|
||||
response.end(payload);
|
||||
}
|
||||
|
||||
export function sendText(response: ServerResponse, statusCode: number, body: string): void {
|
||||
response.statusCode = statusCode;
|
||||
response.setHeader("Content-Type", "text/plain; charset=utf-8");
|
||||
response.end(body);
|
||||
}
|
||||
|
||||
export async function parseJsonBody<T>(request: IncomingMessage): Promise<T> {
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
await new Promise<void>((resolveBody, rejectBody) => {
|
||||
request.on("data", (chunk: Buffer) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
request.on("end", () => resolveBody());
|
||||
request.on("error", rejectBody);
|
||||
});
|
||||
|
||||
const body = Buffer.concat(chunks).toString("utf8").trim();
|
||||
if (!body) {
|
||||
throw new Error("Request body is required.");
|
||||
}
|
||||
|
||||
return JSON.parse(body) as T;
|
||||
}
|
||||
|
||||
export function methodNotAllowed(response: ServerResponse): void {
|
||||
sendJson(response, 405, {
|
||||
ok: false,
|
||||
error: "Method not allowed.",
|
||||
});
|
||||
}
|
||||
|
||||
export function notFound(response: ServerResponse): void {
|
||||
sendJson(response, 404, {
|
||||
ok: false,
|
||||
error: "Not found.",
|
||||
});
|
||||
}
|
||||
|
||||
export async function serveStaticFile(input: {
|
||||
response: ServerResponse;
|
||||
filePath: string;
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const absolutePath = resolve(input.filePath);
|
||||
const fileStats = await stat(absolutePath);
|
||||
if (!fileStats.isFile()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const extension = extname(absolutePath).toLowerCase();
|
||||
const contentType = CONTENT_TYPES[extension] ?? "application/octet-stream";
|
||||
input.response.statusCode = 200;
|
||||
input.response.setHeader("Content-Type", contentType);
|
||||
|
||||
await new Promise<void>((resolveStream, rejectStream) => {
|
||||
const stream = createReadStream(absolutePath);
|
||||
stream.on("error", rejectStream);
|
||||
stream.on("end", () => resolveStream());
|
||||
stream.pipe(input.response);
|
||||
});
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ const state = {
|
||||
config: null,
|
||||
manifests: [],
|
||||
sessions: [],
|
||||
sessionMetadata: [],
|
||||
runs: [],
|
||||
selectedSessionId: "",
|
||||
selectedManifestPath: "",
|
||||
@@ -25,13 +26,22 @@ const dom = {
|
||||
runProvider: document.querySelector("#run-provider"),
|
||||
runTopologyHint: document.querySelector("#run-topology-hint"),
|
||||
runFlags: document.querySelector("#run-flags"),
|
||||
runRuntimeContext: document.querySelector("#run-runtime-context"),
|
||||
runValidationNodes: document.querySelector("#run-validation-nodes"),
|
||||
killRun: document.querySelector("#kill-run"),
|
||||
runStatus: document.querySelector("#run-status"),
|
||||
sessionForm: document.querySelector("#session-form"),
|
||||
sessionProjectPath: document.querySelector("#session-project-path"),
|
||||
sessionCreate: document.querySelector("#session-create"),
|
||||
sessionClose: document.querySelector("#session-close"),
|
||||
sessionCloseMerge: document.querySelector("#session-close-merge"),
|
||||
nodeInspector: document.querySelector("#node-inspector"),
|
||||
eventsLimit: document.querySelector("#events-limit"),
|
||||
eventsRefresh: document.querySelector("#events-refresh"),
|
||||
eventFeed: document.querySelector("#event-feed"),
|
||||
claudeEventsLimit: document.querySelector("#claude-events-limit"),
|
||||
claudeEventsRefresh: document.querySelector("#claude-events-refresh"),
|
||||
claudeEventFeed: document.querySelector("#claude-event-feed"),
|
||||
historyRefresh: document.querySelector("#history-refresh"),
|
||||
historyBody: document.querySelector("#history-body"),
|
||||
notificationsForm: document.querySelector("#notifications-form"),
|
||||
@@ -77,6 +87,7 @@ const dom = {
|
||||
cfgTopologyDepth: document.querySelector("#cfg-topology-depth"),
|
||||
cfgTopologyRetries: document.querySelector("#cfg-topology-retries"),
|
||||
cfgRelationshipChildren: document.querySelector("#cfg-relationship-children"),
|
||||
cfgMergeConflictAttempts: document.querySelector("#cfg-merge-conflict-attempts"),
|
||||
cfgPortBase: document.querySelector("#cfg-port-base"),
|
||||
cfgPortBlockSize: document.querySelector("#cfg-port-block-size"),
|
||||
cfgPortBlockCount: document.querySelector("#cfg-port-block-count"),
|
||||
@@ -111,10 +122,15 @@ const MANIFEST_EVENT_TRIGGERS = [
|
||||
"requirements_defined",
|
||||
"tasks_planned",
|
||||
"code_committed",
|
||||
"task_ready_for_review",
|
||||
"task_blocked",
|
||||
"validation_passed",
|
||||
"validation_failed",
|
||||
"branch_merged",
|
||||
"merge_conflict_detected",
|
||||
"merge_conflict_resolved",
|
||||
"merge_conflict_unresolved",
|
||||
"merge_retry_started",
|
||||
];
|
||||
|
||||
const RUN_MANIFEST_EDITOR_VALUE = "__editor__";
|
||||
@@ -129,8 +145,12 @@ const LABEL_HELP_BY_CONTROL = Object.freeze({
|
||||
"run-provider": "Choose which model provider backend handles provider-mode runs.",
|
||||
"run-topology-hint": "Optional hint that nudges orchestration toward a topology strategy.",
|
||||
"run-flags": "Optional JSON object passed in as initial run flags.",
|
||||
"run-runtime-context": "Optional JSON object of template values injected into persona prompts (for example repo or ticket).",
|
||||
"run-validation-nodes": "Optional comma-separated node IDs to simulate validation outcomes for.",
|
||||
"session-project-path": "Absolute project path used when creating an explicit managed session.",
|
||||
"session-close-merge": "When enabled, close will merge the session base branch back into the project branch.",
|
||||
"events-limit": "Set how many recent runtime events are loaded per refresh.",
|
||||
"claude-events-limit": "Set how many Claude SDK trace records are loaded per refresh.",
|
||||
"cfg-webhook-url": "Webhook endpoint that receives runtime event notifications.",
|
||||
"cfg-webhook-severity": "Minimum severity level that triggers webhook notifications.",
|
||||
"cfg-webhook-always": "Event types that should always notify, regardless of severity.",
|
||||
@@ -145,6 +165,7 @@ const LABEL_HELP_BY_CONTROL = Object.freeze({
|
||||
"cfg-topology-depth": "Maximum orchestration graph depth permitted by topology rules.",
|
||||
"cfg-topology-retries": "Maximum retry expansions allowed by topology orchestration.",
|
||||
"cfg-relationship-children": "Maximum children each persona relationship can spawn.",
|
||||
"cfg-merge-conflict-attempts": "Maximum merge-conflict resolution attempts before emitting unresolved conflict events.",
|
||||
"cfg-port-base": "Starting port number for provisioning port allocations.",
|
||||
"cfg-port-block-size": "Number of ports reserved per allocated block.",
|
||||
"cfg-port-block-count": "Number of port blocks available for allocation.",
|
||||
@@ -1029,6 +1050,7 @@ async function loadConfig() {
|
||||
dom.cfgTopologyDepth.value = String(limits.topologyMaxDepth);
|
||||
dom.cfgTopologyRetries.value = String(limits.topologyMaxRetries);
|
||||
dom.cfgRelationshipChildren.value = String(limits.relationshipMaxChildren);
|
||||
dom.cfgMergeConflictAttempts.value = String(limits.mergeConflictMaxAttempts);
|
||||
dom.cfgPortBase.value = String(limits.portBase);
|
||||
dom.cfgPortBlockSize.value = String(limits.portBlockSize);
|
||||
dom.cfgPortBlockCount.value = String(limits.portBlockCount);
|
||||
@@ -1060,11 +1082,28 @@ function statusChipClass(status) {
|
||||
return `status-chip status-${status || "unknown"}`;
|
||||
}
|
||||
|
||||
function getSessionLifecycleStatus(sessionId) {
|
||||
const metadata = state.sessionMetadata.find((entry) => entry?.sessionId === sessionId);
|
||||
if (!metadata) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const status = metadata.sessionStatus;
|
||||
if (status === "active" || status === "suspended" || status === "closed" || status === "closed_with_conflicts") {
|
||||
return status;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function renderRunsAndSessionsTable() {
|
||||
const rows = [];
|
||||
|
||||
for (const session of state.sessions) {
|
||||
const sessionStatus = session.status || "unknown";
|
||||
const lifecycleStatus = getSessionLifecycleStatus(session.sessionId);
|
||||
const sessionStatus =
|
||||
lifecycleStatus === "closed" || lifecycleStatus === "closed_with_conflicts"
|
||||
? lifecycleStatus
|
||||
: session.status || lifecycleStatus || "unknown";
|
||||
rows.push(`
|
||||
<tr data-session-id="${escapeHtml(session.sessionId)}">
|
||||
<td>${escapeHtml(session.sessionId)}</td>
|
||||
@@ -1092,6 +1131,7 @@ function renderRunsAndSessionsTable() {
|
||||
async function loadSessions() {
|
||||
const payload = await apiRequest("/api/sessions");
|
||||
state.sessions = payload.sessions || [];
|
||||
state.sessionMetadata = payload.sessionMetadata || [];
|
||||
state.runs = payload.runs || [];
|
||||
|
||||
if (!state.selectedSessionId && state.sessions.length > 0) {
|
||||
@@ -1457,6 +1497,43 @@ function renderEventFeed(events) {
|
||||
dom.eventFeed.innerHTML = rows || '<div class="event-row"><div class="event-time">-</div><div class="event-type">-</div><div>No runtime events.</div></div>';
|
||||
}
|
||||
|
||||
function toClaudeRowSeverity(event) {
|
||||
const stage = String(event?.stage || "");
|
||||
const type = String(event?.sdkMessageType || "");
|
||||
if (stage === "query.error") {
|
||||
return "critical";
|
||||
}
|
||||
if (stage === "query.stderr" || (type === "result" && String(event?.sdkMessageSubtype || "").startsWith("error_"))) {
|
||||
return "warning";
|
||||
}
|
||||
return "info";
|
||||
}
|
||||
|
||||
function renderClaudeTraceFeed(events) {
|
||||
const rows = [...events]
|
||||
.reverse()
|
||||
.map((event) => {
|
||||
const ts = new Date(event.timestamp).toLocaleTimeString();
|
||||
const stage = String(event.stage || "query.message");
|
||||
const sdkMessageType = String(event.sdkMessageType || "");
|
||||
const sdkMessageSubtype = String(event.sdkMessageSubtype || "");
|
||||
const typeLabel = sdkMessageType
|
||||
? `${stage}/${sdkMessageType}${sdkMessageSubtype ? `:${sdkMessageSubtype}` : ""}`
|
||||
: stage;
|
||||
const message = typeof event.message === "string" ? event.message : JSON.stringify(event.message || "");
|
||||
return `
|
||||
<div class="event-row ${escapeHtml(toClaudeRowSeverity(event))}">
|
||||
<div class="event-time">${escapeHtml(ts)}</div>
|
||||
<div class="event-type">${escapeHtml(typeLabel)}</div>
|
||||
<div>${escapeHtml(message)}</div>
|
||||
</div>
|
||||
`;
|
||||
})
|
||||
.join("");
|
||||
|
||||
dom.claudeEventFeed.innerHTML = rows || '<div class="event-row"><div class="event-time">-</div><div class="event-type">-</div><div>No Claude trace events.</div></div>';
|
||||
}
|
||||
|
||||
async function refreshEvents() {
|
||||
const limit = Number(dom.eventsLimit.value || "150");
|
||||
const params = new URLSearchParams({
|
||||
@@ -1471,6 +1548,20 @@ async function refreshEvents() {
|
||||
renderEventFeed(payload.events || []);
|
||||
}
|
||||
|
||||
async function refreshClaudeTrace() {
|
||||
const limit = Number(dom.claudeEventsLimit.value || "150");
|
||||
const params = new URLSearchParams({
|
||||
limit: String(limit),
|
||||
});
|
||||
|
||||
if (state.selectedSessionId) {
|
||||
params.set("sessionId", state.selectedSessionId);
|
||||
}
|
||||
|
||||
const payload = await apiRequest(`/api/claude-trace?${params.toString()}`);
|
||||
renderClaudeTraceFeed(payload.events || []);
|
||||
}
|
||||
|
||||
async function startRun(event) {
|
||||
event.preventDefault();
|
||||
|
||||
@@ -1486,6 +1577,12 @@ async function startRun(event) {
|
||||
return;
|
||||
}
|
||||
|
||||
const runtimeContext = parseJsonSafe(dom.runRuntimeContext.value, {});
|
||||
if (typeof runtimeContext !== "object" || Array.isArray(runtimeContext) || !runtimeContext) {
|
||||
showRunStatus("Runtime Context Overrides must be a JSON object.", true);
|
||||
return;
|
||||
}
|
||||
|
||||
const manifestSelection = dom.runManifestSelect.value.trim();
|
||||
|
||||
const payload = {
|
||||
@@ -1494,9 +1591,21 @@ async function startRun(event) {
|
||||
provider: dom.runProvider.value,
|
||||
topologyHint: dom.runTopologyHint.value.trim() || undefined,
|
||||
initialFlags: flags,
|
||||
runtimeContextOverrides: runtimeContext,
|
||||
simulateValidationNodeIds: fromCsv(dom.runValidationNodes.value),
|
||||
};
|
||||
|
||||
const selectedSessionMetadata = state.sessionMetadata.find(
|
||||
(entry) => entry?.sessionId === state.selectedSessionId,
|
||||
);
|
||||
if (
|
||||
selectedSessionMetadata &&
|
||||
(selectedSessionMetadata.sessionStatus === "active" ||
|
||||
selectedSessionMetadata.sessionStatus === "suspended")
|
||||
) {
|
||||
payload.sessionId = selectedSessionMetadata.sessionId;
|
||||
}
|
||||
|
||||
if (manifestSelection === RUN_MANIFEST_EDITOR_VALUE) {
|
||||
const manifestFromEditor = parseJsonSafe(dom.manifestEditor.value, null);
|
||||
if (!manifestFromEditor) {
|
||||
@@ -1527,6 +1636,7 @@ async function startRun(event) {
|
||||
dom.sessionSelect.value = run.sessionId;
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
} catch (error) {
|
||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||
}
|
||||
@@ -1547,6 +1657,67 @@ async function cancelActiveRun() {
|
||||
await loadSessions();
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
} catch (error) {
|
||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||
}
|
||||
}
|
||||
|
||||
async function createSessionFromUi() {
|
||||
const projectPath = dom.sessionProjectPath.value.trim();
|
||||
if (!projectPath) {
|
||||
showRunStatus("Project path is required to create a session.", true);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = await apiRequest("/api/sessions", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
projectPath,
|
||||
}),
|
||||
});
|
||||
|
||||
const created = payload.session;
|
||||
if (created?.sessionId) {
|
||||
state.selectedSessionId = created.sessionId;
|
||||
showRunStatus(`Session ${created.sessionId} created.`);
|
||||
} else {
|
||||
showRunStatus("Session created.");
|
||||
}
|
||||
await loadSessions();
|
||||
if (state.selectedSessionId) {
|
||||
dom.sessionSelect.value = state.selectedSessionId;
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
}
|
||||
} catch (error) {
|
||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||
}
|
||||
}
|
||||
|
||||
async function closeSelectedSessionFromUi() {
|
||||
const sessionId = state.selectedSessionId || dom.sessionSelect.value;
|
||||
if (!sessionId) {
|
||||
showRunStatus("Select a session before closing.", true);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = await apiRequest(`/api/sessions/${encodeURIComponent(sessionId)}/close`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
mergeToProject: dom.sessionCloseMerge.checked,
|
||||
}),
|
||||
});
|
||||
|
||||
const nextStatus = payload?.session?.sessionStatus || "closed";
|
||||
showRunStatus(`Session ${sessionId} closed with status ${nextStatus}.`);
|
||||
await loadSessions();
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
} catch (error) {
|
||||
showRunStatus(error instanceof Error ? error.message : String(error), true);
|
||||
}
|
||||
@@ -1597,6 +1768,7 @@ async function saveLimits(event) {
|
||||
topologyMaxDepth: Number(dom.cfgTopologyDepth.value),
|
||||
topologyMaxRetries: Number(dom.cfgTopologyRetries.value),
|
||||
relationshipMaxChildren: Number(dom.cfgRelationshipChildren.value),
|
||||
mergeConflictMaxAttempts: Number(dom.cfgMergeConflictAttempts.value),
|
||||
portBase: Number(dom.cfgPortBase.value),
|
||||
portBlockSize: Number(dom.cfgPortBlockSize.value),
|
||||
portBlockCount: Number(dom.cfgPortBlockCount.value),
|
||||
@@ -1695,6 +1867,7 @@ function bindUiEvents() {
|
||||
state.selectedSessionId = dom.sessionSelect.value;
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
});
|
||||
|
||||
dom.graphManifestSelect.addEventListener("change", async () => {
|
||||
@@ -1714,15 +1887,26 @@ function bindUiEvents() {
|
||||
await refreshEvents();
|
||||
});
|
||||
|
||||
dom.claudeEventsRefresh.addEventListener("click", async () => {
|
||||
await refreshClaudeTrace();
|
||||
});
|
||||
|
||||
dom.historyRefresh.addEventListener("click", async () => {
|
||||
await loadSessions();
|
||||
await refreshGraph();
|
||||
await refreshClaudeTrace();
|
||||
});
|
||||
|
||||
dom.runForm.addEventListener("submit", startRun);
|
||||
dom.killRun.addEventListener("click", () => {
|
||||
void cancelActiveRun();
|
||||
});
|
||||
dom.sessionCreate.addEventListener("click", () => {
|
||||
void createSessionFromUi();
|
||||
});
|
||||
dom.sessionClose.addEventListener("click", () => {
|
||||
void closeSelectedSessionFromUi();
|
||||
});
|
||||
|
||||
dom.notificationsForm.addEventListener("submit", (event) => {
|
||||
void saveNotifications(event);
|
||||
@@ -1830,6 +2014,7 @@ async function refreshAll() {
|
||||
|
||||
await refreshGraph();
|
||||
await refreshEvents();
|
||||
await refreshClaudeTrace();
|
||||
}
|
||||
|
||||
async function initialize() {
|
||||
@@ -1860,6 +2045,10 @@ async function initialize() {
|
||||
void refreshEvents();
|
||||
}, 3000);
|
||||
|
||||
setInterval(() => {
|
||||
void refreshClaudeTrace();
|
||||
}, 3000);
|
||||
|
||||
setInterval(() => {
|
||||
void refreshGraph();
|
||||
}, 7000);
|
||||
|
||||
@@ -75,6 +75,10 @@
|
||||
Initial Flags (JSON)
|
||||
<textarea id="run-flags" rows="3" placeholder='{"needs_bootstrap": true}'></textarea>
|
||||
</label>
|
||||
<label>
|
||||
Runtime Context Overrides (JSON)
|
||||
<textarea id="run-runtime-context" rows="3" placeholder='{"repo":"ai_ops","ticket":"AIOPS-123"}'></textarea>
|
||||
</label>
|
||||
<label>
|
||||
Simulate Validation Nodes (CSV)
|
||||
<input id="run-validation-nodes" type="text" placeholder="coder-1,qa-1" />
|
||||
@@ -86,6 +90,23 @@
|
||||
</form>
|
||||
<div id="run-status" class="subtle"></div>
|
||||
|
||||
<div class="divider"></div>
|
||||
<h3>Session Controls</h3>
|
||||
<form id="session-form" class="stacked-form">
|
||||
<label>
|
||||
Project Path (absolute)
|
||||
<input id="session-project-path" type="text" placeholder="/abs/path/to/project" />
|
||||
</label>
|
||||
<label class="inline-checkbox">
|
||||
<input id="session-close-merge" type="checkbox" />
|
||||
Merge base into project when closing selected session
|
||||
</label>
|
||||
<div class="inline-actions">
|
||||
<button id="session-create" type="button">Create Session</button>
|
||||
<button id="session-close" type="button">Close Selected Session</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="divider"></div>
|
||||
<h3>Node Inspector</h3>
|
||||
<div id="node-inspector" class="inspector empty">Select a graph node.</div>
|
||||
@@ -109,6 +130,24 @@
|
||||
<div id="event-feed" class="event-feed"></div>
|
||||
</section>
|
||||
|
||||
<section class="panel claude-panel">
|
||||
<div class="panel-head">
|
||||
<h2>Claude Trace</h2>
|
||||
<div class="panel-actions">
|
||||
<label>
|
||||
Limit
|
||||
<select id="claude-events-limit">
|
||||
<option value="80">80</option>
|
||||
<option value="150" selected>150</option>
|
||||
<option value="300">300</option>
|
||||
</select>
|
||||
</label>
|
||||
<button id="claude-events-refresh" type="button">Refresh</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="claude-event-feed" class="event-feed claude-event-feed"></div>
|
||||
</section>
|
||||
|
||||
<section class="panel history-panel">
|
||||
<div class="panel-head">
|
||||
<h2>Run History</h2>
|
||||
@@ -163,6 +202,7 @@
|
||||
<select id="cfg-security-mode">
|
||||
<option value="hard_abort">hard_abort</option>
|
||||
<option value="validation_fail">validation_fail</option>
|
||||
<option value="dangerous_warn_only">dangerous_warn_only</option>
|
||||
</select>
|
||||
</label>
|
||||
<label>
|
||||
@@ -192,6 +232,7 @@
|
||||
<label>AGENT_TOPOLOGY_MAX_DEPTH<input id="cfg-topology-depth" type="number" min="1" /></label>
|
||||
<label>AGENT_TOPOLOGY_MAX_RETRIES<input id="cfg-topology-retries" type="number" min="0" /></label>
|
||||
<label>AGENT_RELATIONSHIP_MAX_CHILDREN<input id="cfg-relationship-children" type="number" min="1" /></label>
|
||||
<label>AGENT_MERGE_CONFLICT_MAX_ATTEMPTS<input id="cfg-merge-conflict-attempts" type="number" min="1" /></label>
|
||||
<label>AGENT_PORT_BASE<input id="cfg-port-base" type="number" min="1" /></label>
|
||||
<label>AGENT_PORT_BLOCK_SIZE<input id="cfg-port-block-size" type="number" min="1" /></label>
|
||||
<label>AGENT_PORT_BLOCK_COUNT<input id="cfg-port-block-count" type="number" min="1" /></label>
|
||||
|
||||
@@ -79,7 +79,8 @@ p {
|
||||
grid-template-columns: minmax(0, 2fr) minmax(280px, 1fr);
|
||||
grid-template-areas:
|
||||
"graph side"
|
||||
"feed history"
|
||||
"feed claude"
|
||||
"history history"
|
||||
"config config";
|
||||
}
|
||||
|
||||
@@ -129,6 +130,10 @@ p {
|
||||
grid-area: history;
|
||||
}
|
||||
|
||||
.claude-panel {
|
||||
grid-area: claude;
|
||||
}
|
||||
|
||||
.config-panel {
|
||||
grid-area: config;
|
||||
}
|
||||
@@ -142,6 +147,12 @@ label {
|
||||
letter-spacing: 0.015em;
|
||||
}
|
||||
|
||||
label.inline-checkbox {
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 0.45rem;
|
||||
}
|
||||
|
||||
input,
|
||||
select,
|
||||
textarea,
|
||||
@@ -308,6 +319,14 @@ button.danger {
|
||||
color: var(--critical);
|
||||
}
|
||||
|
||||
.claude-event-feed .event-row {
|
||||
grid-template-columns: 110px 150px 1fr;
|
||||
}
|
||||
|
||||
.claude-event-feed .event-type {
|
||||
font-size: 0.7rem;
|
||||
}
|
||||
|
||||
.history-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
@@ -353,6 +372,22 @@ button.danger {
|
||||
border-color: rgba(255, 201, 74, 0.6);
|
||||
}
|
||||
|
||||
.status-active {
|
||||
color: var(--accent-cool);
|
||||
border-color: rgba(86, 195, 255, 0.6);
|
||||
}
|
||||
|
||||
.status-suspended,
|
||||
.status-closed_with_conflicts {
|
||||
color: var(--warn);
|
||||
border-color: rgba(255, 201, 74, 0.6);
|
||||
}
|
||||
|
||||
.status-closed {
|
||||
color: var(--muted);
|
||||
border-color: rgba(155, 184, 207, 0.45);
|
||||
}
|
||||
|
||||
.status-unknown {
|
||||
color: var(--muted);
|
||||
border-color: rgba(155, 184, 207, 0.45);
|
||||
@@ -463,6 +498,7 @@ button.danger {
|
||||
"graph"
|
||||
"side"
|
||||
"feed"
|
||||
"claude"
|
||||
"history"
|
||||
"config";
|
||||
}
|
||||
|
||||
840
src/ui/server.ts
840
src/ui/server.ts
@@ -1,27 +1,21 @@
|
||||
import "dotenv/config";
|
||||
import { createServer, type IncomingMessage, type ServerResponse } from "node:http";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { resolve } from "node:path";
|
||||
import { buildSessionGraphInsight, buildSessionSummaries } from "./session-insights.js";
|
||||
import { UiConfigStore, type LimitSettings, type RuntimeNotificationSettings, type SecurityPolicySettings } from "./config-store.js";
|
||||
import { ManifestStore } from "./manifest-store.js";
|
||||
import { filterRuntimeEvents, readRuntimeEvents } from "./runtime-events-store.js";
|
||||
import { parseJsonBody, sendJson, methodNotAllowed, notFound, serveStaticFile } from "./http-utils.js";
|
||||
import { readRunMetaBySession, UiRunService, type RunExecutionMode } from "./run-service.js";
|
||||
import type { RunProvider } from "./provider-executor.js";
|
||||
|
||||
type StartRunRequest = {
|
||||
prompt: string;
|
||||
manifestPath?: string;
|
||||
manifest?: unknown;
|
||||
sessionId?: string;
|
||||
topologyHint?: string;
|
||||
initialFlags?: Record<string, boolean>;
|
||||
runtimeContextOverrides?: Record<string, string | number | boolean>;
|
||||
simulateValidationNodeIds?: string[];
|
||||
executionMode?: RunExecutionMode;
|
||||
provider?: RunProvider;
|
||||
};
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import { z } from "zod";
|
||||
import { buildSessionGraphInsight, buildSessionSummaries } from "../telemetry/session-insights.js";
|
||||
import {
|
||||
UiConfigStore,
|
||||
type LimitSettings,
|
||||
type RuntimeNotificationSettings,
|
||||
type SecurityPolicySettings,
|
||||
} from "../store/config-store.js";
|
||||
import { ManifestStore } from "../agents/manifest-store.js";
|
||||
import { filterRuntimeEvents, readRuntimeEvents } from "../telemetry/runtime-events-store.js";
|
||||
import { filterClaudeTraceEvents, readClaudeTraceEvents } from "./claude-trace-store.js";
|
||||
import { readRunMetaBySession, UiRunService, type RunExecutionMode } from "../runs/run-service.js";
|
||||
import type { RunProvider } from "../agents/provider-executor.js";
|
||||
|
||||
function parsePort(value: string | undefined): number {
|
||||
const parsed = Number(value ?? "4317");
|
||||
@@ -31,7 +25,7 @@ function parsePort(value: string | undefined): number {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parseLimit(value: string | null, fallback: number): number {
|
||||
function parseLimit(value: string | null | undefined, fallback: number): number {
|
||||
if (!value) {
|
||||
return fallback;
|
||||
}
|
||||
@@ -42,448 +36,52 @@ function parseLimit(value: string | null, fallback: number): number {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function toRelativePathFromApi(urlPath: string): string {
|
||||
return decodeURIComponent(urlPath);
|
||||
}
|
||||
|
||||
function ensureBooleanRecord(value: unknown): Record<string, boolean> {
|
||||
if (!value || typeof value !== "object") {
|
||||
return {};
|
||||
function ensureNonEmptyString(value: unknown, field: string): string {
|
||||
if (typeof value !== "string" || value.trim().length === 0) {
|
||||
throw new Error(`Field "${field}" is required.`);
|
||||
}
|
||||
|
||||
const output: Record<string, boolean> = {};
|
||||
for (const [key, raw] of Object.entries(value as Record<string, unknown>)) {
|
||||
if (typeof raw === "boolean") {
|
||||
output[key] = raw;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function ensureStringArray(value: unknown): string[] {
|
||||
if (!Array.isArray(value)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const output: string[] = [];
|
||||
for (const item of value) {
|
||||
if (typeof item !== "string") {
|
||||
continue;
|
||||
}
|
||||
const normalized = item.trim();
|
||||
if (!normalized) {
|
||||
continue;
|
||||
}
|
||||
output.push(normalized);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
function ensureRuntimeContext(value: unknown): Record<string, string | number | boolean> {
|
||||
if (!value || typeof value !== "object") {
|
||||
return {};
|
||||
}
|
||||
|
||||
const output: Record<string, string | number | boolean> = {};
|
||||
for (const [key, raw] of Object.entries(value as Record<string, unknown>)) {
|
||||
if (typeof raw === "string" || typeof raw === "number" || typeof raw === "boolean") {
|
||||
output[key] = raw;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function ensureExecutionMode(value: unknown): RunExecutionMode {
|
||||
return value === "provider" ? "provider" : "mock";
|
||||
}
|
||||
|
||||
function ensureProvider(value: unknown): RunProvider {
|
||||
return value === "claude" ? "claude" : "codex";
|
||||
return value.trim();
|
||||
}
|
||||
|
||||
async function readRuntimePaths(configStore: UiConfigStore, workspaceRoot: string): Promise<{
|
||||
stateRoot: string;
|
||||
runtimeEventLogPath: string;
|
||||
claudeTraceLogPath: string;
|
||||
}> {
|
||||
const snapshot = await configStore.readSnapshot();
|
||||
return {
|
||||
stateRoot: resolve(workspaceRoot, snapshot.paths.stateRoot),
|
||||
runtimeEventLogPath: resolve(workspaceRoot, snapshot.paths.runtimeEventLogPath),
|
||||
claudeTraceLogPath: resolve(workspaceRoot, snapshot.paths.claudeTraceLogPath),
|
||||
};
|
||||
}
|
||||
|
||||
async function handleApiRequest(input: {
|
||||
request: IncomingMessage;
|
||||
response: ServerResponse;
|
||||
workspaceRoot: string;
|
||||
configStore: UiConfigStore;
|
||||
const StartRunSchema = z.object({
|
||||
prompt: z.string().min(1, 'Field "prompt" is required.'),
|
||||
manifestPath: z.string().optional(),
|
||||
manifest: z.unknown().optional(),
|
||||
sessionId: z.string().optional(),
|
||||
topologyHint: z.string().optional(),
|
||||
initialFlags: z.record(z.string(), z.boolean()).optional(),
|
||||
runtimeContextOverrides: z.record(z.string(), z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||
simulateValidationNodeIds: z.array(z.string()).optional(),
|
||||
executionMode: z.enum(["mock", "provider"]).optional(),
|
||||
provider: z.enum(["claude", "codex"]).optional(),
|
||||
});
|
||||
|
||||
type StartRunBody = z.infer<typeof StartRunSchema>;
|
||||
|
||||
async function resolveManifestFromRunRequest(input: {
|
||||
body: StartRunBody;
|
||||
manifestStore: ManifestStore;
|
||||
runService: UiRunService;
|
||||
}): Promise<boolean> {
|
||||
const { request, response, workspaceRoot, configStore, manifestStore, runService } = input;
|
||||
const requestUrl = new URL(request.url ?? "/", `http://${request.headers.host ?? "localhost"}`);
|
||||
const { pathname } = requestUrl;
|
||||
const method = request.method ?? "GET";
|
||||
|
||||
if (!pathname.startsWith("/api/")) {
|
||||
return false;
|
||||
}): Promise<unknown> {
|
||||
if (input.body.manifest !== undefined) {
|
||||
return input.body.manifest;
|
||||
}
|
||||
|
||||
try {
|
||||
if (pathname === "/api/health") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
now: new Date().toISOString(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/config") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const snapshot = await configStore.readSnapshot();
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
config: snapshot,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/config/runtime-events") {
|
||||
if (method !== "PUT") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const body = await parseJsonBody<RuntimeNotificationSettings>(request);
|
||||
const snapshot = await configStore.updateRuntimeEvents(body);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
config: snapshot,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/config/security") {
|
||||
if (method !== "PUT") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const body = await parseJsonBody<SecurityPolicySettings>(request);
|
||||
const snapshot = await configStore.updateSecurityPolicy(body);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
config: snapshot,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/config/limits") {
|
||||
if (method !== "PUT") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const body = await parseJsonBody<LimitSettings>(request);
|
||||
const snapshot = await configStore.updateLimits(body);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
config: snapshot,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/manifests") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const listing = await manifestStore.list();
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
manifests: listing.paths,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/manifests/read") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const manifestPath = requestUrl.searchParams.get("path");
|
||||
if (!manifestPath) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: 'Query parameter "path" is required.',
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const record = await manifestStore.read(manifestPath);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
manifest: record,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/manifests/validate") {
|
||||
if (method !== "POST") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const body = await parseJsonBody<{ manifest: unknown }>(request);
|
||||
const manifest = await manifestStore.validate(body.manifest);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
manifest,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/manifests/save") {
|
||||
if (method !== "PUT") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const body = await parseJsonBody<{ path: string; manifest: unknown }>(request);
|
||||
if (!body.path || typeof body.path !== "string") {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: 'Field "path" is required.',
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const record = await manifestStore.save(body.path, body.manifest);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
manifest: record,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/runtime-events") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const { runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const limit = parseLimit(requestUrl.searchParams.get("limit"), 200);
|
||||
const sessionId = requestUrl.searchParams.get("sessionId") ?? undefined;
|
||||
const events = filterRuntimeEvents(await readRuntimeEvents(runtimeEventLogPath), {
|
||||
...(sessionId ? { sessionId } : {}),
|
||||
limit,
|
||||
});
|
||||
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
events,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/sessions") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const { stateRoot, runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const sessions = await buildSessionSummaries({
|
||||
stateRoot,
|
||||
runtimeEventLogPath,
|
||||
});
|
||||
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
sessions,
|
||||
runs: runService.listRuns(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/sessions/graph") {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const sessionId = requestUrl.searchParams.get("sessionId") ?? "";
|
||||
if (!sessionId) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: 'Query parameter "sessionId" is required.',
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const { stateRoot, runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const explicitManifestPath = requestUrl.searchParams.get("manifestPath");
|
||||
const runMeta = await readRunMetaBySession({ stateRoot, sessionId });
|
||||
const manifestPath = explicitManifestPath ?? runMeta?.manifestPath;
|
||||
|
||||
if (!manifestPath) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: "No manifestPath available for this session. Provide one in query string.",
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const manifestRecord = await manifestStore.read(manifestPath);
|
||||
const graph = await buildSessionGraphInsight({
|
||||
stateRoot,
|
||||
runtimeEventLogPath,
|
||||
sessionId,
|
||||
manifest: manifestRecord.manifest,
|
||||
});
|
||||
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
graph,
|
||||
manifestPath: manifestRecord.path,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname === "/api/runs") {
|
||||
if (method === "GET") {
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
runs: runService.listRuns(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (method === "POST") {
|
||||
const body = await parseJsonBody<StartRunRequest>(request);
|
||||
if (typeof body.prompt !== "string" || body.prompt.trim().length === 0) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: 'Field "prompt" is required.',
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const manifestSource = (() => {
|
||||
if (body.manifest !== undefined) {
|
||||
return body.manifest;
|
||||
}
|
||||
if (typeof body.manifestPath === "string" && body.manifestPath.trim().length > 0) {
|
||||
return undefined;
|
||||
}
|
||||
return undefined;
|
||||
})();
|
||||
|
||||
const resolvedManifest = manifestSource ?? (() => {
|
||||
if (!body.manifestPath) {
|
||||
return undefined;
|
||||
}
|
||||
return body.manifestPath;
|
||||
})();
|
||||
|
||||
let manifest: unknown;
|
||||
if (typeof resolvedManifest === "string") {
|
||||
manifest = (await manifestStore.read(resolvedManifest)).source;
|
||||
} else if (resolvedManifest !== undefined) {
|
||||
manifest = resolvedManifest;
|
||||
}
|
||||
|
||||
if (!manifest) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: "A manifest or manifestPath is required to start a run.",
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
const record = await runService.startRun({
|
||||
prompt: body.prompt,
|
||||
manifest,
|
||||
manifestPath: body.manifestPath,
|
||||
sessionId: body.sessionId,
|
||||
topologyHint: body.topologyHint,
|
||||
initialFlags: ensureBooleanRecord(body.initialFlags),
|
||||
runtimeContextOverrides: ensureRuntimeContext(body.runtimeContextOverrides),
|
||||
simulateValidationNodeIds: ensureStringArray(body.simulateValidationNodeIds),
|
||||
executionMode: ensureExecutionMode(body.executionMode),
|
||||
provider: ensureProvider(body.provider),
|
||||
});
|
||||
|
||||
sendJson(response, 202, {
|
||||
ok: true,
|
||||
run: record,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname.startsWith("/api/runs/") && pathname.endsWith("/cancel")) {
|
||||
if (method !== "POST") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const runId = toRelativePathFromApi(pathname.slice("/api/runs/".length, -"/cancel".length));
|
||||
const run = await runService.cancelRun(runId);
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
run,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pathname.startsWith("/api/runs/")) {
|
||||
if (method !== "GET") {
|
||||
methodNotAllowed(response);
|
||||
return true;
|
||||
}
|
||||
|
||||
const runId = toRelativePathFromApi(pathname.slice("/api/runs/".length));
|
||||
const run = runService.getRun(runId);
|
||||
if (!run) {
|
||||
sendJson(response, 404, {
|
||||
ok: false,
|
||||
error: `Run \"${runId}\" was not found.`,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
sendJson(response, 200, {
|
||||
ok: true,
|
||||
run,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
notFound(response);
|
||||
return true;
|
||||
} catch (error) {
|
||||
sendJson(response, 400, {
|
||||
ok: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
return true;
|
||||
if (input.body.manifestPath) {
|
||||
return (await input.manifestStore.read(input.body.manifestPath)).source;
|
||||
}
|
||||
throw new Error("A manifest or manifestPath is required to start a run.");
|
||||
}
|
||||
|
||||
export async function startUiServer(input: {
|
||||
@@ -494,72 +92,352 @@ export async function startUiServer(input: {
|
||||
close: () => Promise<void>;
|
||||
}> {
|
||||
const workspaceRoot = resolve(input.workspaceRoot);
|
||||
const staticRoot = resolve(workspaceRoot, "src/ui/public");
|
||||
const staticRoot = resolve(workspaceRoot, "ui/dist");
|
||||
|
||||
const configStore = new UiConfigStore({ workspaceRoot });
|
||||
const manifestStore = new ManifestStore({ workspaceRoot });
|
||||
const runService = new UiRunService({ workspaceRoot });
|
||||
|
||||
const server = createServer(async (request, response) => {
|
||||
const handledApi = await handleApiRequest({
|
||||
request,
|
||||
response,
|
||||
workspaceRoot,
|
||||
configStore,
|
||||
manifestStore,
|
||||
runService,
|
||||
});
|
||||
const app = express();
|
||||
app.use(cors());
|
||||
app.use(express.json({ limit: "50mb" }));
|
||||
|
||||
if (handledApi) {
|
||||
app.get("/api/health", (req, res) => {
|
||||
res.json({ ok: true, now: new Date().toISOString() });
|
||||
});
|
||||
|
||||
app.get("/api/config", async (req, res, next) => {
|
||||
try {
|
||||
const snapshot = await configStore.readSnapshot();
|
||||
res.json({ ok: true, config: snapshot });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.put("/api/config/runtime-events", async (req, res, next) => {
|
||||
try {
|
||||
const snapshot = await configStore.updateRuntimeEvents(req.body as RuntimeNotificationSettings);
|
||||
res.json({ ok: true, config: snapshot });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.put("/api/config/security", async (req, res, next) => {
|
||||
try {
|
||||
const snapshot = await configStore.updateSecurityPolicy(req.body as SecurityPolicySettings);
|
||||
res.json({ ok: true, config: snapshot });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.put("/api/config/limits", async (req, res, next) => {
|
||||
try {
|
||||
const snapshot = await configStore.updateLimits(req.body as LimitSettings);
|
||||
res.json({ ok: true, config: snapshot });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/manifests", async (req, res, next) => {
|
||||
try {
|
||||
const listing = await manifestStore.list();
|
||||
res.json({ ok: true, manifests: listing.paths });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/manifests/read", async (req, res, next) => {
|
||||
try {
|
||||
const manifestPath = req.query.path as string | undefined;
|
||||
if (!manifestPath) {
|
||||
res.status(400).json({ ok: false, error: 'Query parameter "path" is required.' });
|
||||
return;
|
||||
}
|
||||
const record = await manifestStore.read(manifestPath);
|
||||
res.json({ ok: true, manifest: record });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/manifests/validate", async (req, res, next) => {
|
||||
try {
|
||||
const manifest = await manifestStore.validate((req.body as { manifest?: unknown }).manifest);
|
||||
res.json({ ok: true, manifest });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.put("/api/manifests/save", async (req, res, next) => {
|
||||
try {
|
||||
const { path, manifest } = req.body as { path?: unknown; manifest?: unknown };
|
||||
if (!path || typeof path !== "string") {
|
||||
res.status(400).json({ ok: false, error: 'Field "path" is required.' });
|
||||
return;
|
||||
}
|
||||
const record = await manifestStore.save(path, manifest);
|
||||
res.json({ ok: true, manifest: record });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/runtime-events", async (req, res, next) => {
|
||||
try {
|
||||
const { runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const limit = parseLimit(req.query.limit as string | undefined, 200);
|
||||
const sessionId = (req.query.sessionId as string) || undefined;
|
||||
const events = filterRuntimeEvents(await readRuntimeEvents(runtimeEventLogPath), {
|
||||
...(sessionId ? { sessionId } : {}),
|
||||
limit,
|
||||
});
|
||||
res.json({ ok: true, events });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/claude-trace", async (req, res, next) => {
|
||||
try {
|
||||
const { claudeTraceLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const limit = parseLimit(req.query.limit as string | undefined, 200);
|
||||
const sessionId = (req.query.sessionId as string) || undefined;
|
||||
const events = filterClaudeTraceEvents(await readClaudeTraceEvents(claudeTraceLogPath), {
|
||||
...(sessionId ? { sessionId } : {}),
|
||||
limit,
|
||||
});
|
||||
res.json({ ok: true, events });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/sessions", async (req, res, next) => {
|
||||
try {
|
||||
const projectPath = ensureNonEmptyString((req.body as { projectPath?: unknown }).projectPath, "projectPath");
|
||||
const session = await runService.createSession({ projectPath });
|
||||
res.status(201).json({ ok: true, session });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/sessions", async (req, res, next) => {
|
||||
try {
|
||||
const { stateRoot, runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const sessions = await buildSessionSummaries({ stateRoot, runtimeEventLogPath });
|
||||
const metadata = await runService.listSessions();
|
||||
res.json({ ok: true, sessions, sessionMetadata: metadata, runs: runService.listRuns() });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/sessions/graph", async (req, res, next) => {
|
||||
try {
|
||||
const sessionId = (req.query.sessionId as string) ?? "";
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ ok: false, error: 'Query parameter "sessionId" is required.' });
|
||||
return;
|
||||
}
|
||||
|
||||
const { stateRoot, runtimeEventLogPath } = await readRuntimePaths(configStore, workspaceRoot);
|
||||
const explicitManifestPath = req.query.manifestPath as string | undefined;
|
||||
const runMeta = await readRunMetaBySession({ stateRoot, sessionId });
|
||||
const manifestPath = explicitManifestPath ?? runMeta?.manifestPath;
|
||||
|
||||
if (!manifestPath) {
|
||||
res.status(400).json({
|
||||
ok: false,
|
||||
error: "No manifestPath available for this session. Provide one in query string.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const manifestRecord = await manifestStore.read(manifestPath);
|
||||
const graph = await buildSessionGraphInsight({
|
||||
stateRoot,
|
||||
runtimeEventLogPath,
|
||||
sessionId,
|
||||
manifest: manifestRecord.manifest,
|
||||
});
|
||||
|
||||
res.json({ ok: true, graph, manifestPath: manifestRecord.path });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/sessions/:sessionId/run", async (req, res, next) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId;
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ ok: false, error: "Session id is required." });
|
||||
return;
|
||||
}
|
||||
|
||||
const parseResult = StartRunSchema.safeParse(req.body);
|
||||
if (!parseResult.success) {
|
||||
res.status(400).json({ ok: false, error: parseResult.error.issues[0]?.message ?? "Invalid body" });
|
||||
return;
|
||||
}
|
||||
|
||||
const body = parseResult.data;
|
||||
const manifest = await resolveManifestFromRunRequest({ body, manifestStore });
|
||||
const record = await runService.startRun({
|
||||
prompt: body.prompt,
|
||||
manifest,
|
||||
manifestPath: body.manifestPath,
|
||||
sessionId,
|
||||
topologyHint: body.topologyHint,
|
||||
initialFlags: body.initialFlags ?? {},
|
||||
runtimeContextOverrides: body.runtimeContextOverrides ?? {},
|
||||
simulateValidationNodeIds: body.simulateValidationNodeIds ?? [],
|
||||
executionMode: (body.executionMode ?? "mock") as RunExecutionMode,
|
||||
provider: (body.provider ?? "codex") as RunProvider,
|
||||
});
|
||||
|
||||
res.status(202).json({ ok: true, run: record });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/sessions/:sessionId/close", async (req, res, next) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId;
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ ok: false, error: "Session id is required." });
|
||||
return;
|
||||
}
|
||||
|
||||
const mergeToProject =
|
||||
typeof (req.body as { mergeToProject?: unknown } | undefined)?.mergeToProject === "boolean"
|
||||
? ((req.body as { mergeToProject: boolean }).mergeToProject)
|
||||
: false;
|
||||
|
||||
const session = await runService.closeSession({
|
||||
sessionId,
|
||||
mergeToProject,
|
||||
});
|
||||
|
||||
res.json({ ok: true, session });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/runs", (req, res) => {
|
||||
res.json({ ok: true, runs: runService.listRuns() });
|
||||
});
|
||||
|
||||
app.post("/api/runs", async (req, res, next) => {
|
||||
try {
|
||||
const parseResult = StartRunSchema.safeParse(req.body);
|
||||
if (!parseResult.success) {
|
||||
res.status(400).json({ ok: false, error: parseResult.error.issues[0]?.message ?? "Invalid body" });
|
||||
return;
|
||||
}
|
||||
|
||||
const body = parseResult.data;
|
||||
const manifest = await resolveManifestFromRunRequest({ body, manifestStore });
|
||||
const record = await runService.startRun({
|
||||
prompt: body.prompt,
|
||||
manifest,
|
||||
manifestPath: body.manifestPath,
|
||||
sessionId: body.sessionId,
|
||||
topologyHint: body.topologyHint,
|
||||
initialFlags: body.initialFlags ?? {},
|
||||
runtimeContextOverrides: body.runtimeContextOverrides ?? {},
|
||||
simulateValidationNodeIds: body.simulateValidationNodeIds ?? [],
|
||||
executionMode: (body.executionMode ?? "mock") as RunExecutionMode,
|
||||
provider: (body.provider ?? "codex") as RunProvider,
|
||||
});
|
||||
|
||||
res.status(202).json({ ok: true, run: record });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/runs/:runId/cancel", async (req, res, next) => {
|
||||
try {
|
||||
const runId = req.params.runId;
|
||||
if (!runId) {
|
||||
res.status(400).json({ ok: false, error: "runId required" });
|
||||
return;
|
||||
}
|
||||
const run = await runService.cancelRun(runId);
|
||||
res.json({ ok: true, run });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/runs/:runId", (req, res) => {
|
||||
const runId = req.params.runId;
|
||||
if (!runId) {
|
||||
res.status(400).json({ ok: false, error: "runId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const requestUrl = new URL(request.url ?? "/", `http://${request.headers.host ?? "localhost"}`);
|
||||
const pathname = requestUrl.pathname === "/" ? "/index.html" : requestUrl.pathname;
|
||||
const cleanPath = pathname.replace(/^\//, "");
|
||||
if (cleanPath.includes("..")) {
|
||||
notFound(response);
|
||||
const run = runService.getRun(runId);
|
||||
if (!run) {
|
||||
res.status(404).json({ ok: false, error: `Run "${runId}" was not found.` });
|
||||
return;
|
||||
}
|
||||
res.json({ ok: true, run });
|
||||
});
|
||||
|
||||
const staticPath = resolve(staticRoot, cleanPath);
|
||||
const served = await serveStaticFile({
|
||||
response,
|
||||
filePath: staticPath,
|
||||
app.use("/api", (req, res) => {
|
||||
res.status(404).json({
|
||||
ok: false,
|
||||
error: `API route "${req.method} ${req.originalUrl}" was not found.`,
|
||||
});
|
||||
});
|
||||
|
||||
if (served) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fallbackServed = await serveStaticFile({
|
||||
response,
|
||||
filePath: resolve(staticRoot, "index.html"),
|
||||
app.use((err: unknown, req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
res.status(400).json({
|
||||
ok: false,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
});
|
||||
|
||||
if (!fallbackServed) {
|
||||
notFound(response);
|
||||
}
|
||||
app.use(express.static(staticRoot));
|
||||
app.get(/(.*)/, (req, res) => {
|
||||
res.sendFile(resolve(staticRoot, "index.html"));
|
||||
});
|
||||
|
||||
const host = input.host ?? "127.0.0.1";
|
||||
const port = input.port ?? parsePort(process.env.AGENT_UI_PORT);
|
||||
|
||||
const server = app.listen(port, host);
|
||||
await new Promise<void>((resolveReady, rejectReady) => {
|
||||
server.once("error", rejectReady);
|
||||
server.listen(port, host, () => {
|
||||
server.once("listening", () => {
|
||||
server.off("error", rejectReady);
|
||||
console.log(`AI Ops UI listening at http://${host}:${String(port)}`);
|
||||
resolveReady();
|
||||
});
|
||||
});
|
||||
|
||||
console.log(`AI Ops UI listening at http://${host}:${String(port)}`);
|
||||
|
||||
return {
|
||||
close: async () => {
|
||||
if (!server.listening) {
|
||||
return;
|
||||
}
|
||||
await new Promise<void>((resolveClose, rejectClose) => {
|
||||
server.close((error) => {
|
||||
if (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ERR_SERVER_NOT_RUNNING") {
|
||||
resolveClose();
|
||||
return;
|
||||
}
|
||||
rejectClose(error);
|
||||
return;
|
||||
}
|
||||
|
||||
296
tests/claude-observability.test.ts
Normal file
296
tests/claude-observability.test.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtemp, readFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import type { SDKMessage } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { ClaudeObservabilityLogger, summarizeClaudeMessage } from "../src/ui/claude-observability.js";
|
||||
|
||||
test("summarizeClaudeMessage returns compact result metadata in summary mode", () => {
|
||||
const message = {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
stop_reason: "end_turn",
|
||||
num_turns: 1,
|
||||
total_cost_usd: 0.0012,
|
||||
usage: {
|
||||
input_tokens: 120,
|
||||
output_tokens: 40,
|
||||
},
|
||||
result: "{\"status\":\"success\"}",
|
||||
duration_ms: 40,
|
||||
duration_api_ms: 32,
|
||||
is_error: false,
|
||||
modelUsage: {},
|
||||
permission_denials: [],
|
||||
uuid: "uuid-1",
|
||||
session_id: "sdk-session-1",
|
||||
} as unknown as SDKMessage;
|
||||
|
||||
const summary = summarizeClaudeMessage(message, "summary");
|
||||
|
||||
assert.equal(summary.messageType, "result");
|
||||
assert.equal(summary.messageSubtype, "success");
|
||||
assert.equal(summary.sdkSessionId, "sdk-session-1");
|
||||
assert.equal(summary.summary, "Claude query result success.");
|
||||
assert.equal(summary.data?.numTurns, 1);
|
||||
const usage = summary.data?.usage as Record<string, unknown> | undefined;
|
||||
assert.equal(usage?.input_tokens, 120);
|
||||
});
|
||||
|
||||
test("summarizeClaudeMessage redacts sensitive fields in full mode", () => {
|
||||
const message = {
|
||||
type: "system",
|
||||
subtype: "init",
|
||||
session_id: "sdk-session-2",
|
||||
uuid: "uuid-2",
|
||||
apiKey: "top-secret",
|
||||
nested: {
|
||||
authToken: "really-secret",
|
||||
ok: true,
|
||||
},
|
||||
} as unknown as SDKMessage;
|
||||
|
||||
const summary = summarizeClaudeMessage(message, "full");
|
||||
const payload = summary.data?.message as Record<string, unknown> | undefined;
|
||||
const nested = payload?.nested as Record<string, unknown> | undefined;
|
||||
|
||||
assert.equal(summary.messageType, "system");
|
||||
assert.equal(summary.messageSubtype, "init");
|
||||
assert.equal(payload?.apiKey, "[redacted]");
|
||||
assert.equal(nested?.authToken, "[redacted]");
|
||||
assert.equal(nested?.ok, true);
|
||||
});
|
||||
|
||||
test("ClaudeObservabilityLogger samples tool_progress messages for stdout", () => {
|
||||
const lines: string[] = [];
|
||||
const originalLog = console.log;
|
||||
const originalNow = Date.now;
|
||||
let now = 1000;
|
||||
|
||||
console.log = (line?: unknown) => {
|
||||
lines.push(String(line ?? ""));
|
||||
};
|
||||
Date.now = () => now;
|
||||
|
||||
try {
|
||||
const logger = new ClaudeObservabilityLogger({
|
||||
workspaceRoot: process.cwd(),
|
||||
config: {
|
||||
mode: "stdout",
|
||||
verbosity: "summary",
|
||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
||||
includePartialMessages: false,
|
||||
debug: false,
|
||||
},
|
||||
});
|
||||
|
||||
const context = {
|
||||
sessionId: "session-a",
|
||||
nodeId: "node-a",
|
||||
attempt: 1,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
const makeMessage = (): SDKMessage =>
|
||||
({
|
||||
type: "tool_progress",
|
||||
tool_name: "Bash",
|
||||
tool_use_id: "tool-1",
|
||||
parent_tool_use_id: null,
|
||||
elapsed_time_seconds: 1,
|
||||
uuid: "uuid-tool",
|
||||
session_id: "sdk-session-tool",
|
||||
}) as unknown as SDKMessage;
|
||||
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: makeMessage(),
|
||||
});
|
||||
|
||||
now += 300;
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: makeMessage(),
|
||||
});
|
||||
|
||||
now += 1200;
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: makeMessage(),
|
||||
});
|
||||
|
||||
assert.equal(lines.length, 2);
|
||||
assert.match(lines[0] ?? "", /^\[claude-trace\] /);
|
||||
assert.match(lines[1] ?? "", /"suppressedSinceLastEmit":1/);
|
||||
} finally {
|
||||
console.log = originalLog;
|
||||
Date.now = originalNow;
|
||||
}
|
||||
});
|
||||
|
||||
test("ClaudeObservabilityLogger keeps assistant/user message records in file output", async () => {
|
||||
const workspace = await mkdtemp(join(tmpdir(), "claude-obsv-test-"));
|
||||
const logPath = ".ai_ops/events/claude-trace.ndjson";
|
||||
const logger = new ClaudeObservabilityLogger({
|
||||
workspaceRoot: workspace,
|
||||
config: {
|
||||
mode: "file",
|
||||
verbosity: "summary",
|
||||
logPath,
|
||||
includePartialMessages: false,
|
||||
debug: false,
|
||||
},
|
||||
});
|
||||
|
||||
const context = {
|
||||
sessionId: "session-file",
|
||||
nodeId: "node-file",
|
||||
attempt: 1,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
logger.recordQueryStarted({
|
||||
context,
|
||||
});
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: {
|
||||
type: "assistant",
|
||||
uuid: "assistant-1",
|
||||
session_id: "sdk-file-1",
|
||||
parent_tool_use_id: null,
|
||||
message: {} as never,
|
||||
} as unknown as SDKMessage,
|
||||
});
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: {
|
||||
type: "user",
|
||||
uuid: "user-1",
|
||||
session_id: "sdk-file-1",
|
||||
parent_tool_use_id: null,
|
||||
message: {} as never,
|
||||
} as unknown as SDKMessage,
|
||||
});
|
||||
logger.recordMessage({
|
||||
context,
|
||||
message: {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
stop_reason: "end_turn",
|
||||
num_turns: 1,
|
||||
total_cost_usd: 0.0012,
|
||||
usage: {
|
||||
input_tokens: 100,
|
||||
output_tokens: 20,
|
||||
},
|
||||
result: "{}",
|
||||
duration_ms: 10,
|
||||
duration_api_ms: 9,
|
||||
is_error: false,
|
||||
modelUsage: {},
|
||||
permission_denials: [],
|
||||
uuid: "result-1",
|
||||
session_id: "sdk-file-1",
|
||||
} as unknown as SDKMessage,
|
||||
});
|
||||
logger.recordQueryCompleted({
|
||||
context,
|
||||
});
|
||||
|
||||
await logger.close();
|
||||
|
||||
const filePath = join(workspace, logPath);
|
||||
const content = await readFile(filePath, "utf8");
|
||||
const lines = content.split(/\r?\n/).filter((line) => line.trim().length > 0);
|
||||
const records = lines.map((line) => JSON.parse(line) as Record<string, unknown>);
|
||||
const messageTypes = records
|
||||
.map((record) => record.sdkMessageType)
|
||||
.filter((value) => typeof value === "string");
|
||||
|
||||
assert.equal(messageTypes.includes("assistant"), true);
|
||||
assert.equal(messageTypes.includes("user"), true);
|
||||
assert.equal(messageTypes.includes("result"), true);
|
||||
});
|
||||
|
||||
test("summarizeClaudeMessage maps task_notification system subtype", () => {
|
||||
const message = {
|
||||
type: "system",
|
||||
subtype: "task_notification",
|
||||
task_id: "task-1",
|
||||
status: "completed",
|
||||
output_file: "/tmp/out.txt",
|
||||
summary: "Task complete",
|
||||
uuid: "uuid-task",
|
||||
session_id: "sdk-session-task",
|
||||
} as unknown as SDKMessage;
|
||||
|
||||
const summary = summarizeClaudeMessage(message, "summary");
|
||||
|
||||
assert.equal(summary.messageType, "system");
|
||||
assert.equal(summary.messageSubtype, "task_notification");
|
||||
assert.equal(summary.summary, "Task notification: completed.");
|
||||
assert.equal(summary.data?.taskId, "task-1");
|
||||
});
|
||||
|
||||
test("ClaudeObservabilityLogger honors includePartialMessages for stream events", () => {
|
||||
const lines: string[] = [];
|
||||
const originalLog = console.log;
|
||||
console.log = (line?: unknown) => {
|
||||
lines.push(String(line ?? ""));
|
||||
};
|
||||
|
||||
try {
|
||||
const context = {
|
||||
sessionId: "session-stream",
|
||||
nodeId: "node-stream",
|
||||
attempt: 1,
|
||||
depth: 0,
|
||||
};
|
||||
const streamMessage = {
|
||||
type: "stream_event",
|
||||
event: {
|
||||
type: "content_block_delta",
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
uuid: "stream-1",
|
||||
session_id: "sdk-session-stream",
|
||||
} as unknown as SDKMessage;
|
||||
|
||||
const withoutPartial = new ClaudeObservabilityLogger({
|
||||
workspaceRoot: process.cwd(),
|
||||
config: {
|
||||
mode: "stdout",
|
||||
verbosity: "summary",
|
||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
||||
includePartialMessages: false,
|
||||
debug: false,
|
||||
},
|
||||
});
|
||||
withoutPartial.recordMessage({
|
||||
context,
|
||||
message: streamMessage,
|
||||
});
|
||||
|
||||
const withPartial = new ClaudeObservabilityLogger({
|
||||
workspaceRoot: process.cwd(),
|
||||
config: {
|
||||
mode: "stdout",
|
||||
verbosity: "summary",
|
||||
logPath: ".ai_ops/events/claude-trace.ndjson",
|
||||
includePartialMessages: true,
|
||||
debug: false,
|
||||
},
|
||||
});
|
||||
withPartial.recordMessage({
|
||||
context,
|
||||
message: streamMessage,
|
||||
});
|
||||
|
||||
assert.equal(lines.length, 1);
|
||||
assert.match(lines[0] ?? "", /\"sdkMessageType\":\"stream_event\"/);
|
||||
} finally {
|
||||
console.log = originalLog;
|
||||
}
|
||||
});
|
||||
42
tests/claude-trace-store.test.ts
Normal file
42
tests/claude-trace-store.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtemp, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { filterClaudeTraceEvents, readClaudeTraceEvents } from "../src/ui/claude-trace-store.js";
|
||||
|
||||
test("readClaudeTraceEvents parses and sorts ndjson records", async () => {
|
||||
const workspace = await mkdtemp(join(tmpdir(), "claude-trace-store-"));
|
||||
const logPath = join(workspace, "claude-trace.ndjson");
|
||||
await writeFile(
|
||||
logPath,
|
||||
[
|
||||
'{"timestamp":"2026-02-24T17:27:05.000Z","message":"later","sessionId":"s1"}',
|
||||
'not-json',
|
||||
'{"timestamp":"2026-02-24T17:26:00.000Z","message":"earlier","sessionId":"s1"}',
|
||||
'{"message":"missing timestamp"}',
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const events = await readClaudeTraceEvents(logPath);
|
||||
assert.equal(events.length, 2);
|
||||
assert.equal(events[0]?.message, "earlier");
|
||||
assert.equal(events[1]?.message, "later");
|
||||
});
|
||||
|
||||
test("filterClaudeTraceEvents filters by session and limit", () => {
|
||||
const events = [
|
||||
{ timestamp: "2026-02-24T17:00:00.000Z", message: "a", sessionId: "s1" },
|
||||
{ timestamp: "2026-02-24T17:01:00.000Z", message: "b", sessionId: "s2" },
|
||||
{ timestamp: "2026-02-24T17:02:00.000Z", message: "c", sessionId: "s1" },
|
||||
];
|
||||
|
||||
const filtered = filterClaudeTraceEvents(events, {
|
||||
sessionId: "s1",
|
||||
limit: 1,
|
||||
});
|
||||
|
||||
assert.equal(filtered.length, 1);
|
||||
assert.equal(filtered[0]?.message, "c");
|
||||
});
|
||||
@@ -12,6 +12,7 @@ test("loads defaults and freezes config", () => {
|
||||
|
||||
assert.equal(config.agentManager.maxConcurrentAgents, 4);
|
||||
assert.equal(config.orchestration.maxDepth, 4);
|
||||
assert.equal(config.orchestration.mergeConflictMaxAttempts, 2);
|
||||
assert.equal(config.provisioning.portRange.basePort, 36000);
|
||||
assert.equal(config.discovery.fileRelativePath, ".agent-context/resources.json");
|
||||
assert.equal(config.security.violationHandling, "hard_abort");
|
||||
@@ -24,6 +25,12 @@ test("loads defaults and freezes config", () => {
|
||||
"session.failed",
|
||||
]);
|
||||
assert.equal(config.provider.openAiAuthMode, "auto");
|
||||
assert.equal(config.provider.claudeMaxTurns, 2);
|
||||
assert.equal(config.provider.claudeObservability.mode, "off");
|
||||
assert.equal(config.provider.claudeObservability.verbosity, "summary");
|
||||
assert.equal(config.provider.claudeObservability.logPath, ".ai_ops/events/claude-trace.ndjson");
|
||||
assert.equal(config.provider.claudeObservability.includePartialMessages, false);
|
||||
assert.equal(config.provider.claudeObservability.debug, false);
|
||||
assert.equal(Object.isFrozen(config), true);
|
||||
assert.equal(Object.isFrozen(config.orchestration), true);
|
||||
});
|
||||
@@ -49,6 +56,11 @@ test("validates security violation mode", () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("loads dangerous_warn_only security violation mode", () => {
|
||||
const config = loadConfig({ AGENT_SECURITY_VIOLATION_MODE: "dangerous_warn_only" });
|
||||
assert.equal(config.security.violationHandling, "dangerous_warn_only");
|
||||
});
|
||||
|
||||
test("validates runtime discord severity mode", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ AGENT_RUNTIME_DISCORD_MIN_SEVERITY: "verbose" }),
|
||||
@@ -56,6 +68,45 @@ test("validates runtime discord severity mode", () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("validates claude observability mode", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ CLAUDE_OBSERVABILITY_MODE: "stream" }),
|
||||
/CLAUDE_OBSERVABILITY_MODE must be one of/,
|
||||
);
|
||||
});
|
||||
|
||||
test("validates CLAUDE_MAX_TURNS bounds", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ CLAUDE_MAX_TURNS: "0" }),
|
||||
/CLAUDE_MAX_TURNS must be an integer >= 1/,
|
||||
);
|
||||
});
|
||||
|
||||
test("validates claude observability verbosity", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ CLAUDE_OBSERVABILITY_VERBOSITY: "verbose" }),
|
||||
/CLAUDE_OBSERVABILITY_VERBOSITY must be one of/,
|
||||
);
|
||||
});
|
||||
|
||||
test("loads claude observability settings", () => {
|
||||
const config = loadConfig({
|
||||
CLAUDE_OBSERVABILITY_MODE: "both",
|
||||
CLAUDE_OBSERVABILITY_VERBOSITY: "full",
|
||||
CLAUDE_OBSERVABILITY_LOG_PATH: ".ai_ops/debug/claude.ndjson",
|
||||
CLAUDE_OBSERVABILITY_INCLUDE_PARTIAL: "true",
|
||||
CLAUDE_OBSERVABILITY_DEBUG: "true",
|
||||
CLAUDE_OBSERVABILITY_DEBUG_LOG_PATH: ".ai_ops/debug/claude-sdk.log",
|
||||
});
|
||||
|
||||
assert.equal(config.provider.claudeObservability.mode, "both");
|
||||
assert.equal(config.provider.claudeObservability.verbosity, "full");
|
||||
assert.equal(config.provider.claudeObservability.logPath, ".ai_ops/debug/claude.ndjson");
|
||||
assert.equal(config.provider.claudeObservability.includePartialMessages, true);
|
||||
assert.equal(config.provider.claudeObservability.debug, true);
|
||||
assert.equal(config.provider.claudeObservability.debugLogPath, ".ai_ops/debug/claude-sdk.log");
|
||||
});
|
||||
|
||||
test("prefers CLAUDE_CODE_OAUTH_TOKEN over ANTHROPIC_API_KEY", () => {
|
||||
const config = loadConfig({
|
||||
CLAUDE_CODE_OAUTH_TOKEN: "oauth-token",
|
||||
@@ -104,3 +155,33 @@ test("resolveOpenAiApiKey prefers CODEX_API_KEY in auto mode", () => {
|
||||
|
||||
assert.equal(resolveOpenAiApiKey(config.provider), "codex-key");
|
||||
});
|
||||
|
||||
test("normalizes anthropic-prefixed CLAUDE_MODEL values", () => {
|
||||
const config = loadConfig({
|
||||
CLAUDE_MODEL: "anthropic/claude-sonnet-4-6",
|
||||
});
|
||||
|
||||
assert.equal(config.provider.claudeModel, "claude-sonnet-4-6");
|
||||
});
|
||||
|
||||
test("normalizes AGENT_WORKTREE_TARGET_PATH", () => {
|
||||
const config = loadConfig({
|
||||
AGENT_WORKTREE_TARGET_PATH: "./src/agents/",
|
||||
});
|
||||
|
||||
assert.equal(config.provisioning.gitWorktree.targetPath, "src/agents");
|
||||
});
|
||||
|
||||
test("validates AGENT_WORKTREE_TARGET_PATH against parent traversal", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ AGENT_WORKTREE_TARGET_PATH: "../secrets" }),
|
||||
/must not contain "\.\." path segments/,
|
||||
);
|
||||
});
|
||||
|
||||
test("validates AGENT_MERGE_CONFLICT_MAX_ATTEMPTS bounds", () => {
|
||||
assert.throws(
|
||||
() => loadConfig({ AGENT_MERGE_CONFLICT_MAX_ATTEMPTS: "0" }),
|
||||
/AGENT_MERGE_CONFLICT_MAX_ATTEMPTS must be an integer >= 1/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import assert from "node:assert/strict";
|
||||
import { mkdtemp, readFile, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { resolve } from "node:path";
|
||||
import { parseEnvFile, writeEnvFileUpdates } from "../src/ui/env-store.js";
|
||||
import { parseEnvFile, writeEnvFileUpdates } from "../src/store/env-store.js";
|
||||
|
||||
test("parseEnvFile handles missing files", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-env-store-"));
|
||||
|
||||
@@ -148,7 +148,7 @@ test("rejects legacy edge trigger aliases", () => {
|
||||
|
||||
assert.throws(
|
||||
() => parseAgentManifest(manifest),
|
||||
/unsupported event "onValidationFail"/,
|
||||
/Invalid option/,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -380,6 +380,7 @@ test("injects resolved mcp/helpers and enforces Claude tool gate in actor execut
|
||||
);
|
||||
assert.deepEqual(allow, {
|
||||
behavior: "allow",
|
||||
updatedInput: {},
|
||||
toolUseID: "allow-1",
|
||||
});
|
||||
|
||||
@@ -614,6 +615,7 @@ test("runs parallel topology blocks concurrently and routes via domain-event edg
|
||||
projectContextPatch: {
|
||||
enqueueTasks: [
|
||||
{
|
||||
taskId: "task-integrate",
|
||||
id: "task-integrate",
|
||||
title: "Integrate feature branches",
|
||||
status: "pending",
|
||||
@@ -939,6 +941,169 @@ test("propagates abort signal into actor execution and stops the run", async ()
|
||||
assert.equal(observedAbort, true);
|
||||
});
|
||||
|
||||
test("createClaudeCanUseTool accepts tool casing differences from providers", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
||||
const projectContextPath = resolve(stateRoot, "project-context.json");
|
||||
|
||||
const manifest = {
|
||||
schemaVersion: "1",
|
||||
topologies: ["sequential"],
|
||||
personas: [
|
||||
{
|
||||
id: "coder",
|
||||
displayName: "Coder",
|
||||
systemPromptTemplate: "Coder",
|
||||
toolClearance: {
|
||||
allowlist: ["bash"],
|
||||
banlist: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
topologyConstraints: {
|
||||
maxDepth: 2,
|
||||
maxRetries: 0,
|
||||
},
|
||||
pipeline: {
|
||||
entryNodeId: "case-node",
|
||||
nodes: [
|
||||
{
|
||||
id: "case-node",
|
||||
actorId: "case_actor",
|
||||
personaId: "coder",
|
||||
},
|
||||
],
|
||||
edges: [],
|
||||
},
|
||||
} as const;
|
||||
|
||||
const engine = new SchemaDrivenExecutionEngine({
|
||||
manifest,
|
||||
settings: {
|
||||
workspaceRoot,
|
||||
stateRoot,
|
||||
projectContextPath,
|
||||
maxChildren: 1,
|
||||
maxDepth: 2,
|
||||
maxRetries: 0,
|
||||
runtimeContext: {},
|
||||
},
|
||||
actorExecutors: {
|
||||
case_actor: async (input) => {
|
||||
const canUseTool = input.mcp.createClaudeCanUseTool();
|
||||
const allow = await canUseTool("Bash", {}, {
|
||||
signal: new AbortController().signal,
|
||||
toolUseID: "allow-bash",
|
||||
});
|
||||
assert.deepEqual(allow, {
|
||||
behavior: "allow",
|
||||
updatedInput: {},
|
||||
toolUseID: "allow-bash",
|
||||
});
|
||||
|
||||
return {
|
||||
status: "success",
|
||||
payload: {
|
||||
ok: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await engine.runSession({
|
||||
sessionId: "session-claude-tool-casing",
|
||||
initialPayload: {
|
||||
task: "verify tool casing",
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(result.status, "success");
|
||||
});
|
||||
|
||||
test("dangerous_warn_only allows tool use outside persona allowlist", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
||||
const projectContextPath = resolve(stateRoot, "project-context.json");
|
||||
|
||||
const manifest = {
|
||||
schemaVersion: "1",
|
||||
topologies: ["sequential"],
|
||||
personas: [
|
||||
{
|
||||
id: "reader",
|
||||
displayName: "Reader",
|
||||
systemPromptTemplate: "Reader",
|
||||
toolClearance: {
|
||||
allowlist: ["read_file"],
|
||||
banlist: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
topologyConstraints: {
|
||||
maxDepth: 2,
|
||||
maxRetries: 0,
|
||||
},
|
||||
pipeline: {
|
||||
entryNodeId: "warn-node",
|
||||
nodes: [
|
||||
{
|
||||
id: "warn-node",
|
||||
actorId: "warn_actor",
|
||||
personaId: "reader",
|
||||
},
|
||||
],
|
||||
edges: [],
|
||||
},
|
||||
} as const;
|
||||
|
||||
const engine = new SchemaDrivenExecutionEngine({
|
||||
manifest,
|
||||
settings: {
|
||||
workspaceRoot,
|
||||
stateRoot,
|
||||
projectContextPath,
|
||||
maxChildren: 1,
|
||||
maxDepth: 2,
|
||||
maxRetries: 0,
|
||||
securityViolationHandling: "dangerous_warn_only",
|
||||
runtimeContext: {},
|
||||
},
|
||||
actorExecutors: {
|
||||
warn_actor: async (input) => {
|
||||
const canUseTool = input.mcp.createClaudeCanUseTool();
|
||||
const allow = await canUseTool("Bash", {}, {
|
||||
signal: new AbortController().signal,
|
||||
toolUseID: "allow-bash-warn",
|
||||
});
|
||||
assert.deepEqual(allow, {
|
||||
behavior: "allow",
|
||||
updatedInput: {},
|
||||
toolUseID: "allow-bash-warn",
|
||||
});
|
||||
|
||||
return {
|
||||
status: "success",
|
||||
payload: {
|
||||
ok: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await engine.runSession({
|
||||
sessionId: "session-dangerous-warn-only",
|
||||
initialPayload: {
|
||||
task: "verify warn-only bypass",
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(result.status, "success");
|
||||
});
|
||||
|
||||
test("hard-aborts pipeline on security violations by default", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
|
||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
|
||||
|
||||
@@ -28,6 +28,7 @@ test("project context store reads defaults and applies domain patches", async ()
|
||||
},
|
||||
enqueueTasks: [
|
||||
{
|
||||
taskId: "task-1",
|
||||
id: "task-1",
|
||||
title: "Build parser",
|
||||
status: "pending",
|
||||
@@ -38,11 +39,13 @@ test("project context store reads defaults and applies domain patches", async ()
|
||||
const updated = await store.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: "task-1",
|
||||
id: "task-1",
|
||||
title: "Build parser",
|
||||
status: "in_progress",
|
||||
},
|
||||
{
|
||||
taskId: "task-2",
|
||||
id: "task-2",
|
||||
title: "Add tests",
|
||||
status: "pending",
|
||||
@@ -59,6 +62,35 @@ test("project context store reads defaults and applies domain patches", async ()
|
||||
assert.equal(updated.schemaVersion, 1);
|
||||
});
|
||||
|
||||
test("project context accepts conflict-aware task statuses", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-conflict-"));
|
||||
const store = new FileSystemProjectContextStore({
|
||||
filePath: resolve(root, "project-context.json"),
|
||||
});
|
||||
|
||||
const updated = await store.patchState({
|
||||
upsertTasks: [
|
||||
{
|
||||
taskId: "task-conflict",
|
||||
id: "task-conflict",
|
||||
title: "Resolve merge conflict",
|
||||
status: "conflict",
|
||||
},
|
||||
{
|
||||
taskId: "task-resolving",
|
||||
id: "task-resolving",
|
||||
title: "Retry merge",
|
||||
status: "resolving_conflict",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
assert.deepEqual(
|
||||
updated.taskQueue.map((task) => `${task.taskId}:${task.status}`),
|
||||
["task-conflict:conflict", "task-resolving:resolving_conflict"],
|
||||
);
|
||||
});
|
||||
|
||||
test("project context parser merges missing root keys with defaults", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
|
||||
const filePath = resolve(root, "project-context.json");
|
||||
@@ -70,6 +102,7 @@ test("project context parser merges missing root keys with defaults", async () =
|
||||
{
|
||||
taskQueue: [
|
||||
{
|
||||
taskId: "task-1",
|
||||
id: "task-1",
|
||||
title: "Migrate",
|
||||
status: "pending",
|
||||
|
||||
@@ -160,6 +160,7 @@ test("runClaudePrompt wires auth env, stream parsing, and output", async () => {
|
||||
ANTHROPIC_API_KEY: "legacy-api-key",
|
||||
CLAUDE_MODEL: "claude-sonnet-4-6",
|
||||
CLAUDE_CODE_PATH: "/usr/local/bin/claude",
|
||||
CLAUDE_MAX_TURNS: "5",
|
||||
});
|
||||
|
||||
let closed = false;
|
||||
@@ -229,6 +230,7 @@ test("runClaudePrompt wires auth env, stream parsing, and output", async () => {
|
||||
assert.equal(queryInput?.prompt, "augmented prompt");
|
||||
assert.equal(queryInput?.options?.model, "claude-sonnet-4-6");
|
||||
assert.equal(queryInput?.options?.pathToClaudeCodeExecutable, "/usr/local/bin/claude");
|
||||
assert.equal(queryInput?.options?.maxTurns, 5);
|
||||
assert.equal(queryInput?.options?.cwd, "/tmp/claude-worktree");
|
||||
assert.equal(queryInput?.options?.authToken, "oauth-token");
|
||||
assert.deepEqual(queryInput?.options?.mcpServers, sessionContext.mcp.claudeMcpServers);
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { parseActorExecutionResultFromModelOutput } from "../src/ui/provider-executor.js";
|
||||
import { mkdtemp } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { resolve } from "node:path";
|
||||
import { loadConfig } from "../src/config.js";
|
||||
import type { ActorExecutionInput } from "../src/agents/pipeline.js";
|
||||
import {
|
||||
buildProviderRuntimeEnv,
|
||||
createProviderRunRuntime,
|
||||
parseActorExecutionResultFromModelOutput,
|
||||
resolveProviderWorkingDirectory,
|
||||
type ProviderRunRuntime,
|
||||
} from "../src/agents/provider-executor.js";
|
||||
|
||||
test("parseActorExecutionResultFromModelOutput parses strict JSON payload", () => {
|
||||
const parsed = parseActorExecutionResultFromModelOutput({
|
||||
@@ -64,3 +75,110 @@ test("parseActorExecutionResultFromModelOutput falls back when response is not J
|
||||
assert.equal(parsed.status, "success");
|
||||
assert.equal(parsed.payload?.assistantResponse, "Implemented update successfully.");
|
||||
});
|
||||
|
||||
test("parseActorExecutionResultFromModelOutput preserves status when optional fields are malformed", () => {
|
||||
const parsed = parseActorExecutionResultFromModelOutput({
|
||||
rawText: JSON.stringify({
|
||||
status: "failure",
|
||||
payload: {
|
||||
reason: "hard failure",
|
||||
},
|
||||
stateFlags: {
|
||||
retryable: false,
|
||||
invalid_flag: "nope",
|
||||
},
|
||||
stateMetadata: "not-an-object",
|
||||
events: [
|
||||
{
|
||||
type: "validation_failed",
|
||||
payload: {
|
||||
summary: "failed",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 123,
|
||||
},
|
||||
],
|
||||
failureKind: "not-valid",
|
||||
failureCode: 403,
|
||||
}),
|
||||
});
|
||||
|
||||
assert.equal(parsed.status, "failure");
|
||||
assert.equal(parsed.payload?.reason, "hard failure");
|
||||
assert.equal(parsed.stateFlags?.retryable, false);
|
||||
assert.equal(parsed.stateFlags && "invalid_flag" in parsed.stateFlags, false);
|
||||
assert.equal(parsed.stateMetadata, undefined);
|
||||
assert.equal(parsed.events?.length, 1);
|
||||
assert.equal(parsed.events?.[0]?.type, "validation_failed");
|
||||
assert.equal(parsed.failureKind, undefined);
|
||||
assert.equal(parsed.failureCode, undefined);
|
||||
});
|
||||
|
||||
test("resolveProviderWorkingDirectory reads cwd from actor execution context", () => {
|
||||
const actorInput = {
|
||||
executionContext: {
|
||||
security: {
|
||||
worktreePath: "/tmp/session/tasks/product-intake",
|
||||
},
|
||||
},
|
||||
} as unknown as ActorExecutionInput;
|
||||
|
||||
assert.equal(
|
||||
resolveProviderWorkingDirectory(actorInput),
|
||||
"/tmp/session/tasks/product-intake",
|
||||
);
|
||||
});
|
||||
|
||||
test("buildProviderRuntimeEnv scopes AGENT_WORKTREE_PATH to actor worktree and filters undefined auth", () => {
|
||||
const config = loadConfig({
|
||||
CLAUDE_CODE_OAUTH_TOKEN: "oauth-token",
|
||||
});
|
||||
const runtime = {
|
||||
provider: "claude",
|
||||
config,
|
||||
sharedEnv: {
|
||||
PATH: "/usr/bin",
|
||||
KEEP_ME: "1",
|
||||
},
|
||||
claudeObservability: {} as ProviderRunRuntime["claudeObservability"],
|
||||
close: async () => {},
|
||||
} satisfies ProviderRunRuntime;
|
||||
const actorInput = {
|
||||
executionContext: {
|
||||
security: {
|
||||
worktreePath: "/tmp/session/tasks/product-intake",
|
||||
},
|
||||
},
|
||||
} as unknown as ActorExecutionInput;
|
||||
|
||||
const env = buildProviderRuntimeEnv({
|
||||
runtime,
|
||||
actorInput,
|
||||
includeClaudeAuth: true,
|
||||
});
|
||||
|
||||
assert.equal(env.AGENT_WORKTREE_PATH, "/tmp/session/tasks/product-intake");
|
||||
assert.equal(env.CLAUDE_CODE_OAUTH_TOKEN, "oauth-token");
|
||||
assert.equal("ANTHROPIC_API_KEY" in env, false);
|
||||
assert.equal(env.KEEP_ME, "1");
|
||||
});
|
||||
|
||||
test("createProviderRunRuntime does not require session context provisioning", async () => {
|
||||
const observabilityRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-provider-runtime-"));
|
||||
const runtime = await createProviderRunRuntime({
|
||||
provider: "claude",
|
||||
config: loadConfig({}),
|
||||
observabilityRootPath: observabilityRoot,
|
||||
baseEnv: {
|
||||
PATH: "/usr/bin",
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
assert.equal(runtime.provider, "claude");
|
||||
assert.equal(runtime.sharedEnv.PATH, "/usr/bin");
|
||||
} finally {
|
||||
await runtime.close();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -18,6 +18,7 @@ function parentSnapshot(): DiscoverySnapshot {
|
||||
worktreeRoot: "/repo/.ai_ops/worktrees",
|
||||
worktreePath: "/repo/.ai_ops/worktrees/parent",
|
||||
baseRef: "HEAD",
|
||||
targetPath: "src/agents",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -55,6 +56,7 @@ test("builds deterministic child suballocation requests", () => {
|
||||
const gitRequest = requests.find((entry) => entry.kind === "git-worktree");
|
||||
assert.ok(gitRequest);
|
||||
assert.equal(typeof gitRequest.options?.rootDirectory, "string");
|
||||
assert.equal(gitRequest.options?.targetPath, "src/agents");
|
||||
|
||||
const portRequest = requests.find((entry) => entry.kind === "port-range");
|
||||
assert.ok(portRequest);
|
||||
|
||||
237
tests/run-service.test.ts
Normal file
237
tests/run-service.test.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { execFile } from "node:child_process";
|
||||
import { mkdtemp, mkdir, stat, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { resolve } from "node:path";
|
||||
import { UiRunService, readRunMetaBySession } from "../src/runs/run-service.js";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
async function waitForTerminalRun(
|
||||
runService: UiRunService,
|
||||
runId: string,
|
||||
): Promise<"success" | "failure" | "cancelled"> {
|
||||
const maxPolls = 100;
|
||||
for (let index = 0; index < maxPolls; index += 1) {
|
||||
const run = runService.getRun(runId);
|
||||
if (run && run.status !== "running") {
|
||||
return run.status;
|
||||
}
|
||||
await new Promise((resolveWait) => setTimeout(resolveWait, 20));
|
||||
}
|
||||
throw new Error("Run did not reach a terminal status within polling window.");
|
||||
}
|
||||
|
||||
test("run service persists failure when pipeline summary is failure", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-"));
|
||||
const stateRoot = resolve(workspaceRoot, "state");
|
||||
const projectContextPath = resolve(workspaceRoot, "project-context.json");
|
||||
const envPath = resolve(workspaceRoot, ".env");
|
||||
|
||||
await writeFile(
|
||||
envPath,
|
||||
[
|
||||
`AGENT_STATE_ROOT=${stateRoot}`,
|
||||
`AGENT_PROJECT_CONTEXT_PATH=${projectContextPath}`,
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const runService = new UiRunService({
|
||||
workspaceRoot,
|
||||
envFilePath: ".env",
|
||||
});
|
||||
|
||||
const manifest = {
|
||||
schemaVersion: "1",
|
||||
topologies: ["sequential"],
|
||||
personas: [
|
||||
{
|
||||
id: "writer",
|
||||
displayName: "Writer",
|
||||
systemPromptTemplate: "Write the draft",
|
||||
toolClearance: {
|
||||
allowlist: ["read_file", "write_file"],
|
||||
banlist: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
topologyConstraints: {
|
||||
maxDepth: 1,
|
||||
maxRetries: 0,
|
||||
},
|
||||
pipeline: {
|
||||
entryNodeId: "write-node",
|
||||
nodes: [
|
||||
{
|
||||
id: "write-node",
|
||||
actorId: "writer-actor",
|
||||
personaId: "writer",
|
||||
topology: {
|
||||
kind: "sequential",
|
||||
},
|
||||
constraints: {
|
||||
maxRetries: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
edges: [],
|
||||
},
|
||||
};
|
||||
|
||||
const started = await runService.startRun({
|
||||
prompt: "force validation failure on first attempt",
|
||||
manifest,
|
||||
executionMode: "mock",
|
||||
simulateValidationNodeIds: ["write-node"],
|
||||
});
|
||||
|
||||
const terminalStatus = await waitForTerminalRun(runService, started.runId);
|
||||
assert.equal(terminalStatus, "failure");
|
||||
|
||||
const persisted = await readRunMetaBySession({
|
||||
stateRoot,
|
||||
sessionId: started.sessionId,
|
||||
});
|
||||
assert.equal(persisted?.status, "failure");
|
||||
});
|
||||
|
||||
test("run service creates, runs, and closes explicit sessions", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-session-"));
|
||||
const stateRoot = resolve(workspaceRoot, "state");
|
||||
const envPath = resolve(workspaceRoot, ".env");
|
||||
const projectPath = resolve(workspaceRoot, "project");
|
||||
|
||||
await mkdir(projectPath, { recursive: true });
|
||||
await execFileAsync("git", ["init", projectPath], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "config", "user.name", "AI Ops"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "config", "user.email", "ai-ops@example.local"], { encoding: "utf8" });
|
||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "initial"], { encoding: "utf8" });
|
||||
|
||||
await writeFile(
|
||||
envPath,
|
||||
[
|
||||
`AGENT_STATE_ROOT=${stateRoot}`,
|
||||
"AGENT_WORKTREE_ROOT=.ai_ops/worktrees",
|
||||
"AGENT_WORKTREE_BASE_REF=HEAD",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const runService = new UiRunService({
|
||||
workspaceRoot,
|
||||
envFilePath: ".env",
|
||||
});
|
||||
|
||||
const createdSession = await runService.createSession({
|
||||
projectPath,
|
||||
});
|
||||
assert.equal(createdSession.sessionStatus, "active");
|
||||
|
||||
const manifest = {
|
||||
schemaVersion: "1",
|
||||
topologies: ["sequential"],
|
||||
personas: [
|
||||
{
|
||||
id: "writer",
|
||||
displayName: "Writer",
|
||||
systemPromptTemplate: "Write draft",
|
||||
toolClearance: {
|
||||
allowlist: ["read_file", "write_file"],
|
||||
banlist: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
topologyConstraints: {
|
||||
maxDepth: 1,
|
||||
maxRetries: 0,
|
||||
},
|
||||
pipeline: {
|
||||
entryNodeId: "write-node",
|
||||
nodes: [
|
||||
{
|
||||
id: "write-node",
|
||||
actorId: "writer-actor",
|
||||
personaId: "writer",
|
||||
},
|
||||
],
|
||||
edges: [],
|
||||
},
|
||||
};
|
||||
|
||||
const started = await runService.startRun({
|
||||
prompt: "complete task",
|
||||
manifest,
|
||||
sessionId: createdSession.sessionId,
|
||||
executionMode: "mock",
|
||||
});
|
||||
|
||||
const terminalStatus = await waitForTerminalRun(runService, started.runId);
|
||||
assert.equal(terminalStatus, "success");
|
||||
|
||||
const closed = await runService.closeSession({
|
||||
sessionId: createdSession.sessionId,
|
||||
});
|
||||
assert.equal(closed.sessionStatus, "closed");
|
||||
|
||||
await assert.rejects(() => stat(createdSession.baseWorkspacePath), {
|
||||
code: "ENOENT",
|
||||
});
|
||||
});
|
||||
|
||||
test("run service marks session closed_with_conflicts when close merge conflicts", async () => {
|
||||
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-close-conflict-"));
|
||||
const stateRoot = resolve(workspaceRoot, "state");
|
||||
const envPath = resolve(workspaceRoot, ".env");
|
||||
const projectPath = resolve(workspaceRoot, "project");
|
||||
|
||||
await mkdir(projectPath, { recursive: true });
|
||||
await execFileAsync("git", ["init", projectPath], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "config", "user.name", "AI Ops"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "config", "user.email", "ai-ops@example.local"], { encoding: "utf8" });
|
||||
await writeFile(resolve(projectPath, "README.md"), "base\n", "utf8");
|
||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "initial"], { encoding: "utf8" });
|
||||
|
||||
await writeFile(
|
||||
envPath,
|
||||
[
|
||||
`AGENT_STATE_ROOT=${stateRoot}`,
|
||||
"AGENT_WORKTREE_ROOT=.ai_ops/worktrees",
|
||||
"AGENT_WORKTREE_BASE_REF=HEAD",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const runService = new UiRunService({
|
||||
workspaceRoot,
|
||||
envFilePath: ".env",
|
||||
});
|
||||
|
||||
const createdSession = await runService.createSession({
|
||||
projectPath,
|
||||
});
|
||||
|
||||
await writeFile(resolve(createdSession.baseWorkspacePath, "README.md"), "base branch update\n", "utf8");
|
||||
await execFileAsync("git", ["-C", createdSession.baseWorkspacePath, "add", "README.md"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", createdSession.baseWorkspacePath, "commit", "-m", "base update"], { encoding: "utf8" });
|
||||
|
||||
await writeFile(resolve(projectPath, "README.md"), "project branch update\n", "utf8");
|
||||
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
|
||||
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "project update"], { encoding: "utf8" });
|
||||
|
||||
const closed = await runService.closeSession({
|
||||
sessionId: createdSession.sessionId,
|
||||
mergeToProject: true,
|
||||
});
|
||||
|
||||
assert.equal(closed.sessionStatus, "closed_with_conflicts");
|
||||
const baseWorkspaceStats = await stat(createdSession.baseWorkspacePath);
|
||||
assert.equal(baseWorkspaceStats.isDirectory(), true);
|
||||
});
|
||||
@@ -111,6 +111,42 @@ test("rules engine enforces binary allowlist, tool policy, and path boundaries",
|
||||
);
|
||||
});
|
||||
|
||||
test("rules engine dangerous_warn_only logs but does not block violating shell commands", async () => {
|
||||
const worktreeRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-warn-worktree-"));
|
||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-warn-state-"));
|
||||
const projectContextPath = resolve(stateRoot, "project-context.json");
|
||||
|
||||
const rules = new SecurityRulesEngine(
|
||||
{
|
||||
allowedBinaries: ["git"],
|
||||
worktreeRoot,
|
||||
protectedPaths: [stateRoot, projectContextPath],
|
||||
requireCwdWithinWorktree: true,
|
||||
rejectRelativePathTraversal: true,
|
||||
enforcePathBoundaryOnArguments: true,
|
||||
allowedEnvAssignments: [],
|
||||
blockedEnvAssignments: [],
|
||||
},
|
||||
undefined,
|
||||
{
|
||||
violationHandling: "dangerous_warn_only",
|
||||
},
|
||||
);
|
||||
|
||||
const validated = await rules.validateShellCommand({
|
||||
command: "unauthorized_bin --version",
|
||||
cwd: worktreeRoot,
|
||||
toolClearance: {
|
||||
allowlist: ["git"],
|
||||
banlist: [],
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(validated.cwd, worktreeRoot);
|
||||
assert.equal(validated.parsed.commandCount, 0);
|
||||
assert.deepEqual(validated.parsed.commands, []);
|
||||
});
|
||||
|
||||
test("secure executor runs with explicit env policy", async () => {
|
||||
const worktreeRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-exec-"));
|
||||
|
||||
@@ -155,3 +191,85 @@ test("secure executor runs with explicit env policy", async () => {
|
||||
assert.equal(result.stdout, "ok|\n");
|
||||
assert.equal(streamedStdout, result.stdout);
|
||||
});
|
||||
|
||||
test("rules engine carries session context in tool audit events", () => {
|
||||
const events: Array<Record<string, unknown>> = [];
|
||||
const rules = new SecurityRulesEngine(
|
||||
{
|
||||
allowedBinaries: ["git"],
|
||||
worktreeRoot: "/tmp",
|
||||
protectedPaths: [],
|
||||
requireCwdWithinWorktree: true,
|
||||
rejectRelativePathTraversal: true,
|
||||
enforcePathBoundaryOnArguments: true,
|
||||
allowedEnvAssignments: [],
|
||||
blockedEnvAssignments: [],
|
||||
},
|
||||
(event) => {
|
||||
events.push(event as unknown as Record<string, unknown>);
|
||||
},
|
||||
);
|
||||
|
||||
rules.assertToolInvocationAllowed({
|
||||
tool: "git",
|
||||
toolClearance: {
|
||||
allowlist: ["git"],
|
||||
banlist: [],
|
||||
},
|
||||
context: {
|
||||
sessionId: "session-ctx",
|
||||
nodeId: "node-ctx",
|
||||
attempt: 2,
|
||||
},
|
||||
});
|
||||
|
||||
const allowedEvent = events.find((event) => event.type === "tool.invocation_allowed");
|
||||
assert.ok(allowedEvent);
|
||||
assert.equal(allowedEvent.sessionId, "session-ctx");
|
||||
assert.equal(allowedEvent.nodeId, "node-ctx");
|
||||
assert.equal(allowedEvent.attempt, 2);
|
||||
});
|
||||
|
||||
test("rules engine applies tool clearance matching case-insensitively", () => {
|
||||
const rules = new SecurityRulesEngine({
|
||||
allowedBinaries: ["git"],
|
||||
worktreeRoot: "/tmp",
|
||||
protectedPaths: [],
|
||||
requireCwdWithinWorktree: true,
|
||||
rejectRelativePathTraversal: true,
|
||||
enforcePathBoundaryOnArguments: true,
|
||||
allowedEnvAssignments: [],
|
||||
blockedEnvAssignments: [],
|
||||
});
|
||||
|
||||
assert.doesNotThrow(() =>
|
||||
rules.assertToolInvocationAllowed({
|
||||
tool: "Bash",
|
||||
toolClearance: {
|
||||
allowlist: ["bash", "glob"],
|
||||
banlist: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
assert.throws(
|
||||
() =>
|
||||
rules.assertToolInvocationAllowed({
|
||||
tool: "Glob",
|
||||
toolClearance: {
|
||||
allowlist: ["bash", "glob"],
|
||||
banlist: ["GLOB"],
|
||||
},
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof SecurityViolationError && error.code === "TOOL_BANNED",
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
rules.filterAllowedTools(["Bash", "Glob", "Read"], {
|
||||
allowlist: ["bash", "glob"],
|
||||
banlist: ["gLoB"],
|
||||
}),
|
||||
["Bash"],
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ import { mkdir, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { resolve } from "node:path";
|
||||
import { mkdtemp } from "node:fs/promises";
|
||||
import { buildSessionGraphInsight, buildSessionSummaries } from "../src/ui/session-insights.js";
|
||||
import { buildSessionGraphInsight, buildSessionSummaries } from "../src/telemetry/session-insights.js";
|
||||
import { parseAgentManifest } from "../src/agents/manifest.js";
|
||||
|
||||
function createManifest() {
|
||||
@@ -155,13 +155,13 @@ test("buildSessionGraphInsight maps attempts, edge visits, and sandbox payload",
|
||||
assert.equal(graph.status, "success");
|
||||
assert.equal(graph.nodes.length, 2);
|
||||
|
||||
const node2 = graph.nodes.find((node) => node.nodeId === "n2");
|
||||
const node2 = graph.nodes.find((node: any) => node.nodeId === "n2");
|
||||
assert.ok(node2);
|
||||
assert.equal(node2.attemptCount, 2);
|
||||
assert.equal(node2.subtaskCount, 1);
|
||||
assert.equal(node2.sandboxPayload?.phase, "n2");
|
||||
|
||||
const edge = graph.edges.find((entry) => entry.from === "n1" && entry.to === "n2");
|
||||
const edge = graph.edges.find((entry: any) => entry.from === "n1" && entry.to === "n2");
|
||||
assert.ok(edge);
|
||||
assert.equal(edge.visited, true);
|
||||
assert.equal(edge.trigger, "event:validation_failed");
|
||||
|
||||
287
tests/session-lifecycle.test.ts
Normal file
287
tests/session-lifecycle.test.ts
Normal file
@@ -0,0 +1,287 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { execFile } from "node:child_process";
|
||||
import { mkdtemp, mkdir, readFile, rm, stat, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { resolve } from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import {
|
||||
FileSystemSessionMetadataStore,
|
||||
SessionWorktreeManager,
|
||||
type SessionMetadata,
|
||||
} from "../src/agents/session-lifecycle.js";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
async function git(args: string[]): Promise<string> {
|
||||
const { stdout } = await execFileAsync("git", args, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
return stdout.trim();
|
||||
}
|
||||
|
||||
test("session metadata store persists and updates session metadata", async () => {
|
||||
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-store-"));
|
||||
const store = new FileSystemSessionMetadataStore({ stateRoot });
|
||||
|
||||
const created = await store.createSession({
|
||||
sessionId: "session-abc",
|
||||
projectPath: resolve(stateRoot, "project"),
|
||||
baseWorkspacePath: resolve(stateRoot, "worktrees", "session-abc", "base"),
|
||||
});
|
||||
|
||||
assert.equal(created.sessionStatus, "active");
|
||||
assert.equal(created.sessionId, "session-abc");
|
||||
|
||||
const listed = await store.listSessions();
|
||||
assert.equal(listed.length, 1);
|
||||
assert.equal(listed[0]?.sessionId, "session-abc");
|
||||
|
||||
const updated = await store.updateSession("session-abc", {
|
||||
sessionStatus: "closed",
|
||||
});
|
||||
assert.equal(updated.sessionStatus, "closed");
|
||||
|
||||
const readBack = await store.readSession("session-abc");
|
||||
assert.equal(readBack?.sessionStatus, "closed");
|
||||
|
||||
const closedWithConflicts = await store.updateSession("session-abc", {
|
||||
sessionStatus: "closed_with_conflicts",
|
||||
});
|
||||
assert.equal(closedWithConflicts.sessionStatus, "closed_with_conflicts");
|
||||
});
|
||||
|
||||
test("session worktree manager provisions and merges task worktrees", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-"));
|
||||
const projectPath = resolve(root, "project");
|
||||
const worktreeRoot = resolve(root, "worktrees");
|
||||
|
||||
await mkdir(projectPath, { recursive: true });
|
||||
await git(["init", projectPath]);
|
||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
||||
await git(["-C", projectPath, "add", "README.md"]);
|
||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
||||
|
||||
const manager = new SessionWorktreeManager({
|
||||
worktreeRoot,
|
||||
baseRef: "HEAD",
|
||||
});
|
||||
|
||||
const sessionId = "session-1";
|
||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
||||
|
||||
await manager.initializeSessionBaseWorkspace({
|
||||
sessionId,
|
||||
projectPath,
|
||||
baseWorkspacePath,
|
||||
});
|
||||
|
||||
const baseStats = await stat(baseWorkspacePath);
|
||||
assert.equal(baseStats.isDirectory(), true);
|
||||
|
||||
const taskWorktreePath = (
|
||||
await manager.ensureTaskWorktree({
|
||||
sessionId,
|
||||
taskId: "task-1",
|
||||
baseWorkspacePath,
|
||||
})
|
||||
).taskWorktreePath;
|
||||
|
||||
await writeFile(resolve(taskWorktreePath, "feature.txt"), "task output\n", "utf8");
|
||||
|
||||
const mergeOutcome = await manager.mergeTaskIntoBase({
|
||||
taskId: "task-1",
|
||||
baseWorkspacePath,
|
||||
taskWorktreePath,
|
||||
});
|
||||
assert.equal(mergeOutcome.kind, "success");
|
||||
|
||||
const mergedFile = await readFile(resolve(baseWorkspacePath, "feature.txt"), "utf8");
|
||||
assert.equal(mergedFile, "task output\n");
|
||||
|
||||
const session: SessionMetadata = {
|
||||
sessionId,
|
||||
projectPath,
|
||||
baseWorkspacePath,
|
||||
sessionStatus: "active",
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const closeOutcome = await manager.closeSession({
|
||||
session,
|
||||
taskWorktreePaths: [],
|
||||
mergeBaseIntoProject: false,
|
||||
});
|
||||
assert.equal(closeOutcome.kind, "success");
|
||||
|
||||
await assert.rejects(() => stat(baseWorkspacePath), {
|
||||
code: "ENOENT",
|
||||
});
|
||||
});
|
||||
|
||||
test("session worktree manager returns conflict outcome instead of throwing", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-conflict-"));
|
||||
const projectPath = resolve(root, "project");
|
||||
const worktreeRoot = resolve(root, "worktrees");
|
||||
|
||||
await mkdir(projectPath, { recursive: true });
|
||||
await git(["init", projectPath]);
|
||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
||||
await writeFile(resolve(projectPath, "README.md"), "base\n", "utf8");
|
||||
await git(["-C", projectPath, "add", "README.md"]);
|
||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
||||
|
||||
const manager = new SessionWorktreeManager({
|
||||
worktreeRoot,
|
||||
baseRef: "HEAD",
|
||||
});
|
||||
|
||||
const sessionId = "session-conflict-1";
|
||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
||||
|
||||
await manager.initializeSessionBaseWorkspace({
|
||||
sessionId,
|
||||
projectPath,
|
||||
baseWorkspacePath,
|
||||
});
|
||||
|
||||
const taskWorktreePath = (
|
||||
await manager.ensureTaskWorktree({
|
||||
sessionId,
|
||||
taskId: "task-conflict",
|
||||
baseWorkspacePath,
|
||||
})
|
||||
).taskWorktreePath;
|
||||
|
||||
await writeFile(resolve(baseWorkspacePath, "README.md"), "base branch change\n", "utf8");
|
||||
await git(["-C", baseWorkspacePath, "add", "README.md"]);
|
||||
await git(["-C", baseWorkspacePath, "commit", "-m", "base update"]);
|
||||
|
||||
await writeFile(resolve(taskWorktreePath, "README.md"), "task branch change\n", "utf8");
|
||||
|
||||
const mergeOutcome = await manager.mergeTaskIntoBase({
|
||||
taskId: "task-conflict",
|
||||
baseWorkspacePath,
|
||||
taskWorktreePath,
|
||||
});
|
||||
|
||||
assert.equal(mergeOutcome.kind, "conflict");
|
||||
if (mergeOutcome.kind !== "conflict") {
|
||||
throw new Error("Expected merge conflict outcome.");
|
||||
}
|
||||
assert.equal(mergeOutcome.taskId, "task-conflict");
|
||||
assert.equal(mergeOutcome.worktreePath, taskWorktreePath);
|
||||
assert.ok(mergeOutcome.conflictFiles.includes("README.md"));
|
||||
});
|
||||
|
||||
test("session worktree manager recreates a task worktree after stale metadata prune", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-prune-"));
|
||||
const projectPath = resolve(root, "project");
|
||||
const worktreeRoot = resolve(root, "worktrees");
|
||||
|
||||
await mkdir(projectPath, { recursive: true });
|
||||
await git(["init", projectPath]);
|
||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
||||
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
|
||||
await git(["-C", projectPath, "add", "README.md"]);
|
||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
||||
|
||||
const manager = new SessionWorktreeManager({
|
||||
worktreeRoot,
|
||||
baseRef: "HEAD",
|
||||
});
|
||||
|
||||
const sessionId = "session-prune-1";
|
||||
const taskId = "task-prune-1";
|
||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
||||
|
||||
await manager.initializeSessionBaseWorkspace({
|
||||
sessionId,
|
||||
projectPath,
|
||||
baseWorkspacePath,
|
||||
});
|
||||
|
||||
const initialTaskWorktreePath = (
|
||||
await manager.ensureTaskWorktree({
|
||||
sessionId,
|
||||
taskId,
|
||||
baseWorkspacePath,
|
||||
})
|
||||
).taskWorktreePath;
|
||||
|
||||
await rm(initialTaskWorktreePath, { recursive: true, force: true });
|
||||
|
||||
const recreatedTaskWorktreePath = (
|
||||
await manager.ensureTaskWorktree({
|
||||
sessionId,
|
||||
taskId,
|
||||
baseWorkspacePath,
|
||||
})
|
||||
).taskWorktreePath;
|
||||
|
||||
assert.equal(recreatedTaskWorktreePath, initialTaskWorktreePath);
|
||||
const stats = await stat(recreatedTaskWorktreePath);
|
||||
assert.equal(stats.isDirectory(), true);
|
||||
});
|
||||
|
||||
test("session worktree manager applies target path sparse checkout and task working directory", async () => {
|
||||
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-target-"));
|
||||
const projectPath = resolve(root, "project");
|
||||
const worktreeRoot = resolve(root, "worktrees");
|
||||
|
||||
await mkdir(resolve(projectPath, "app", "src"), { recursive: true });
|
||||
await mkdir(resolve(projectPath, "infra"), { recursive: true });
|
||||
await git(["init", projectPath]);
|
||||
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
|
||||
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
|
||||
await writeFile(resolve(projectPath, "app", "src", "index.ts"), "export const app = true;\n", "utf8");
|
||||
await writeFile(resolve(projectPath, "infra", "notes.txt"), "infra\n", "utf8");
|
||||
await git(["-C", projectPath, "add", "."]);
|
||||
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
|
||||
|
||||
const manager = new SessionWorktreeManager({
|
||||
worktreeRoot,
|
||||
baseRef: "HEAD",
|
||||
targetPath: "app",
|
||||
});
|
||||
|
||||
const sessionId = "session-target-1";
|
||||
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
|
||||
await manager.initializeSessionBaseWorkspace({
|
||||
sessionId,
|
||||
projectPath,
|
||||
baseWorkspacePath,
|
||||
});
|
||||
|
||||
const baseWorkingDirectory = manager.resolveWorkingDirectoryForWorktree(baseWorkspacePath);
|
||||
assert.equal(baseWorkingDirectory, resolve(baseWorkspacePath, "app"));
|
||||
const baseWorkingStats = await stat(baseWorkingDirectory);
|
||||
assert.equal(baseWorkingStats.isDirectory(), true);
|
||||
await assert.rejects(() => stat(resolve(baseWorkspacePath, "infra")), {
|
||||
code: "ENOENT",
|
||||
});
|
||||
|
||||
const ensured = await manager.ensureTaskWorktree({
|
||||
sessionId,
|
||||
taskId: "task-target-1",
|
||||
baseWorkspacePath,
|
||||
});
|
||||
assert.equal(ensured.taskWorkingDirectory, resolve(ensured.taskWorktreePath, "app"));
|
||||
|
||||
await writeFile(resolve(ensured.taskWorkingDirectory, "src", "feature.ts"), "export const feature = true;\n", "utf8");
|
||||
|
||||
const mergeOutcome = await manager.mergeTaskIntoBase({
|
||||
taskId: "task-target-1",
|
||||
baseWorkspacePath,
|
||||
taskWorktreePath: ensured.taskWorktreePath,
|
||||
});
|
||||
assert.equal(mergeOutcome.kind, "success");
|
||||
|
||||
const merged = await readFile(resolve(baseWorkingDirectory, "src", "feature.ts"), "utf8");
|
||||
assert.equal(merged, "export const feature = true;\n");
|
||||
});
|
||||
24
ui/.gitignore
vendored
Normal file
24
ui/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
73
ui/README.md
Normal file
73
ui/README.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# React + TypeScript + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
## React Compiler
|
||||
|
||||
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
|
||||
|
||||
```js
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
|
||||
// Remove tseslint.configs.recommended and replace with this
|
||||
tseslint.configs.recommendedTypeChecked,
|
||||
// Alternatively, use this for stricter rules
|
||||
tseslint.configs.strictTypeChecked,
|
||||
// Optionally, add this for stylistic rules
|
||||
tseslint.configs.stylisticTypeChecked,
|
||||
|
||||
// Other configs...
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
|
||||
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
|
||||
|
||||
```js
|
||||
// eslint.config.js
|
||||
import reactX from 'eslint-plugin-react-x'
|
||||
import reactDom from 'eslint-plugin-react-dom'
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
// Other configs...
|
||||
// Enable lint rules for React
|
||||
reactX.configs['recommended-typescript'],
|
||||
// Enable lint rules for React DOM
|
||||
reactDom.configs.recommended,
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
// other options...
|
||||
},
|
||||
},
|
||||
])
|
||||
```
|
||||
23
ui/eslint.config.js
Normal file
23
ui/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
import { defineConfig, globalIgnores } from 'eslint/config'
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
js.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
reactHooks.configs.flat.recommended,
|
||||
reactRefresh.configs.vite,
|
||||
],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
},
|
||||
])
|
||||
13
ui/index.html
Normal file
13
ui/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>ui</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
3354
ui/package-lock.json
generated
Normal file
3354
ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
32
ui/package.json
Normal file
32
ui/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "ui",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"lucide-react": "^0.575.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-router-dom": "^7.13.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.4.24",
|
||||
"globals": "^16.5.0",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.48.0",
|
||||
"vite": "^7.3.1"
|
||||
}
|
||||
}
|
||||
1
ui/public/vite.svg
Normal file
1
ui/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
42
ui/src/App.css
Normal file
42
ui/src/App.css
Normal file
@@ -0,0 +1,42 @@
|
||||
#root {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
height: 6em;
|
||||
padding: 1.5em;
|
||||
will-change: filter;
|
||||
transition: filter 300ms;
|
||||
}
|
||||
.logo:hover {
|
||||
filter: drop-shadow(0 0 2em #646cffaa);
|
||||
}
|
||||
.logo.react:hover {
|
||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
||||
}
|
||||
|
||||
@keyframes logo-spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
a:nth-of-type(2) .logo {
|
||||
animation: logo-spin infinite 20s linear;
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
.read-the-docs {
|
||||
color: #888;
|
||||
}
|
||||
22
ui/src/App.tsx
Normal file
22
ui/src/App.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
import { BrowserRouter, Routes, Route } from 'react-router-dom';
|
||||
import MainLayout from './layouts/MainLayout';
|
||||
import Dashboard from './pages/Dashboard';
|
||||
import Settings from './pages/Settings';
|
||||
import History from './pages/History';
|
||||
|
||||
function App() {
|
||||
return (
|
||||
<BrowserRouter>
|
||||
<Routes>
|
||||
<Route path="/" element={<MainLayout />}>
|
||||
<Route index element={<Dashboard />} />
|
||||
<Route path="history" element={<History />} />
|
||||
<Route path="settings" element={<Settings />} />
|
||||
</Route>
|
||||
</Routes>
|
||||
</BrowserRouter>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
1
ui/src/assets/react.svg
Normal file
1
ui/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
602
ui/src/index.css
Normal file
602
ui/src/index.css
Normal file
@@ -0,0 +1,602 @@
|
||||
:root {
|
||||
--bg-primary: #0a0a0c;
|
||||
--bg-secondary: #121216;
|
||||
--bg-tertiary: #1a1a20;
|
||||
--bg-highlight: #23232b;
|
||||
|
||||
--text-primary: #ffffff;
|
||||
--text-secondary: #94949e;
|
||||
--text-tertiary: #6b6b76;
|
||||
|
||||
--accent-primary: #818cf8;
|
||||
--accent-primary-hover: #6366f1;
|
||||
--accent-secondary: #c084fc;
|
||||
|
||||
--success: #34d399;
|
||||
--warning: #fbbf24;
|
||||
--danger: #ef4444;
|
||||
--info: #38bdf8;
|
||||
|
||||
--border-color: rgba(255, 255, 255, 0.08);
|
||||
--border-focus: rgba(129, 140, 248, 0.5);
|
||||
|
||||
--shadow-sm: 0 1px 2px 0 rgba(0, 0, 0, 0.05);
|
||||
--shadow-md: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
|
||||
--shadow-lg: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05);
|
||||
--shadow-glow: 0 0 15px rgba(129, 140, 248, 0.3);
|
||||
|
||||
--radius-sm: 4px;
|
||||
--radius-md: 8px;
|
||||
--radius-lg: 12px;
|
||||
--radius-xl: 16px;
|
||||
--radius-full: 9999px;
|
||||
|
||||
--transition-fast: 150ms cubic-bezier(0.4, 0, 0.2, 1);
|
||||
--transition-normal: 250ms cubic-bezier(0.4, 0, 0.2, 1);
|
||||
--transition-slow: 350ms cubic-bezier(0.4, 0, 0.2, 1);
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
||||
background-color: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
line-height: 1.5;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
overflow: hidden;
|
||||
/* App takes full height */
|
||||
}
|
||||
|
||||
/* Typography elements */
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
color: var(--text-primary);
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
letter-spacing: -0.025em;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 1.25rem;
|
||||
letter-spacing: -0.025em;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 1.125rem;
|
||||
}
|
||||
|
||||
p {
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
/* Form Elements */
|
||||
input,
|
||||
select,
|
||||
textarea,
|
||||
button {
|
||||
font-family: inherit;
|
||||
font-size: 0.875rem;
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 0.5rem 0.75rem;
|
||||
transition: all var(--transition-fast);
|
||||
outline: none;
|
||||
}
|
||||
|
||||
input:focus,
|
||||
select:focus,
|
||||
textarea:focus {
|
||||
border-color: var(--accent-primary);
|
||||
box-shadow: 0 0 0 2px var(--border-focus);
|
||||
}
|
||||
|
||||
input:disabled,
|
||||
select:disabled,
|
||||
textarea:disabled,
|
||||
button:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
button {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 0.5rem;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
background: var(--bg-highlight);
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
button:hover:not(:disabled) {
|
||||
background: var(--bg-tertiary);
|
||||
border-color: rgba(255, 255, 255, 0.2);
|
||||
}
|
||||
|
||||
button:active:not(:disabled) {
|
||||
transform: translateY(1px);
|
||||
}
|
||||
|
||||
button.primary {
|
||||
background: var(--accent-primary);
|
||||
color: white;
|
||||
border-color: transparent;
|
||||
box-shadow: 0 2px 4px rgba(129, 140, 248, 0.2);
|
||||
}
|
||||
|
||||
button.primary:hover:not(:disabled) {
|
||||
background: var(--accent-primary-hover);
|
||||
box-shadow: 0 4px 12px rgba(129, 140, 248, 0.3);
|
||||
}
|
||||
|
||||
button.danger {
|
||||
color: var(--danger);
|
||||
border-color: rgba(239, 68, 68, 0.2);
|
||||
background: rgba(239, 68, 68, 0.05);
|
||||
}
|
||||
|
||||
button.danger:hover:not(:disabled) {
|
||||
background: rgba(239, 68, 68, 0.1);
|
||||
border-color: rgba(239, 68, 68, 0.3);
|
||||
}
|
||||
|
||||
/* App Shell Layout */
|
||||
#root {
|
||||
display: flex;
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
width: 260px;
|
||||
background-color: var(--bg-secondary);
|
||||
border-right: 1px solid var(--border-color);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
padding: 1.5rem;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.sidebar-header h1 {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
background: linear-gradient(135deg, var(--accent-primary), var(--accent-secondary));
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.sidebar-header p {
|
||||
font-size: 0.75rem;
|
||||
margin-top: 0.25rem;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.sidebar-nav {
|
||||
flex: 1;
|
||||
padding: 1rem 0;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
padding: 0.75rem 1.5rem;
|
||||
color: var(--text-secondary);
|
||||
text-decoration: none;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 500;
|
||||
transition: all var(--transition-fast);
|
||||
border-left: 2px solid transparent;
|
||||
}
|
||||
|
||||
.nav-item:hover {
|
||||
background-color: var(--bg-highlight);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.nav-item.active {
|
||||
background-color: rgba(129, 140, 248, 0.05);
|
||||
color: var(--accent-primary);
|
||||
border-left-color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.nav-item svg {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.nav-item.active svg {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.sidebar-footer {
|
||||
padding: 1.25rem;
|
||||
border-top: 1px solid var(--border-color);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--text-tertiary);
|
||||
box-shadow: 0 0 0 0 rgba(0, 0, 0, 0);
|
||||
transition: all var(--transition-normal);
|
||||
}
|
||||
|
||||
.status-dot.online {
|
||||
background-color: var(--success);
|
||||
box-shadow: 0 0 8px var(--success);
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
|
||||
.status-dot.offline {
|
||||
background-color: var(--danger);
|
||||
box-shadow: 0 0 8px var(--danger);
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0 rgba(52, 211, 153, 0.4);
|
||||
}
|
||||
|
||||
70% {
|
||||
box-shadow: 0 0 0 6px rgba(52, 211, 153, 0);
|
||||
}
|
||||
|
||||
100% {
|
||||
box-shadow: 0 0 0 0 rgba(52, 211, 153, 0);
|
||||
}
|
||||
}
|
||||
|
||||
.main-content {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background-color: var(--bg-primary);
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.top-bar {
|
||||
height: 60px;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
background-color: rgba(10, 10, 12, 0.8);
|
||||
backdrop-filter: blur(12px);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0 2rem;
|
||||
justify-content: space-between;
|
||||
z-index: 5;
|
||||
}
|
||||
|
||||
.page-title {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 600;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.content-scroll {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 2rem;
|
||||
scroll-behavior: smooth;
|
||||
}
|
||||
|
||||
/* Animations */
|
||||
@keyframes fadeIn {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(10px);
|
||||
}
|
||||
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
.animate-fade-in {
|
||||
animation: fadeIn 0.4s ease-out forwards;
|
||||
}
|
||||
|
||||
/* Panel Layout */
|
||||
.panel {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 1.5rem;
|
||||
margin-bottom: 1.5rem;
|
||||
box-shadow: var(--shadow-md);
|
||||
}
|
||||
|
||||
.panel-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1.25rem;
|
||||
padding-bottom: 0.75rem;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.panel-title {
|
||||
font-size: 1rem;
|
||||
font-weight: 600;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
/* Forms & Inputs specific to layout */
|
||||
.form-group {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
margin-bottom: 0.4rem;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.form-row {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.form-row>* {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.divider {
|
||||
height: 1px;
|
||||
background-color: var(--border-color);
|
||||
margin: 1.5rem 0;
|
||||
}
|
||||
|
||||
/* Tag / Badge */
|
||||
.badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 0.125rem 0.5rem;
|
||||
border-radius: var(--radius-full);
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
.badge.success {
|
||||
background: rgba(52, 211, 153, 0.15);
|
||||
color: var(--success);
|
||||
}
|
||||
|
||||
.badge.warning {
|
||||
background: rgba(251, 191, 36, 0.15);
|
||||
color: var(--warning);
|
||||
border: 1px solid rgba(251, 191, 36, 0.3);
|
||||
}
|
||||
|
||||
.badge.danger {
|
||||
background: rgba(239, 68, 68, 0.15);
|
||||
color: var(--danger);
|
||||
}
|
||||
|
||||
.badge.info {
|
||||
background: rgba(56, 189, 248, 0.15);
|
||||
color: var(--info);
|
||||
}
|
||||
|
||||
.badge.neutral {
|
||||
background: var(--bg-highlight);
|
||||
color: var(--text-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
/* Utility */
|
||||
.text-subtle {
|
||||
color: var(--text-tertiary);
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.flex {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.justify-between {
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.items-center {
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.gap-2 {
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.gap-4 {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.mt-4 {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.mb-4 {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
/* Grid Layout */
|
||||
.dashboard-grid {
|
||||
display: grid;
|
||||
grid-template-columns: 2fr 1fr;
|
||||
gap: 1.5rem;
|
||||
}
|
||||
|
||||
.config-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
||||
gap: 1.5rem;
|
||||
}
|
||||
|
||||
/* Utilities added for React flow */
|
||||
.flex-col {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.flex-1 {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.w-full {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.h-full {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.h-64 {
|
||||
height: 16rem;
|
||||
}
|
||||
|
||||
.h-48 {
|
||||
height: 12rem;
|
||||
}
|
||||
|
||||
.justify-center {
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.text-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.p-2 {
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.p-3 {
|
||||
padding: 0.75rem;
|
||||
}
|
||||
|
||||
.p-4 {
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.py-8 {
|
||||
padding-top: 2rem;
|
||||
padding-bottom: 2rem;
|
||||
}
|
||||
|
||||
.ml-2 {
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
|
||||
.mb-0 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.mb-2 {
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.border {
|
||||
border-width: 1px;
|
||||
border-style: solid;
|
||||
}
|
||||
|
||||
.border-b {
|
||||
border-bottom-width: 1px;
|
||||
border-style: solid;
|
||||
}
|
||||
|
||||
.border-dashed {
|
||||
border-style: dashed;
|
||||
}
|
||||
|
||||
.border-color {
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
|
||||
.rounded-lg {
|
||||
border-radius: var(--radius-lg);
|
||||
}
|
||||
|
||||
.bg-tertiary {
|
||||
background-color: var(--bg-tertiary);
|
||||
}
|
||||
|
||||
.text-sm {
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.text-accent {
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.text-success {
|
||||
color: var(--success);
|
||||
}
|
||||
|
||||
.text-info {
|
||||
color: var(--info);
|
||||
}
|
||||
|
||||
.text-warning {
|
||||
color: var(--warning);
|
||||
}
|
||||
|
||||
.font-medium {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.font-mono {
|
||||
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
}
|
||||
|
||||
.opacity-75 {
|
||||
opacity: 0.75;
|
||||
}
|
||||
|
||||
.overflow-y-auto {
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.overflow-x-auto {
|
||||
overflow-x: auto;
|
||||
}
|
||||
78
ui/src/layouts/MainLayout.tsx
Normal file
78
ui/src/layouts/MainLayout.tsx
Normal file
@@ -0,0 +1,78 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { NavLink, Outlet, useLocation } from 'react-router-dom';
|
||||
import { LayoutDashboard, Settings, Activity, History } from 'lucide-react';
|
||||
|
||||
const MainLayout: React.FC = () => {
|
||||
const [isServerOnline, setIsServerOnline] = useState(false);
|
||||
const location = useLocation();
|
||||
|
||||
useEffect(() => {
|
||||
// Basic ping to check if our API is running
|
||||
fetch('/api/health')
|
||||
.then((res) => {
|
||||
if (res.ok) setIsServerOnline(true);
|
||||
})
|
||||
.catch(() => setIsServerOnline(false));
|
||||
|
||||
// Optional: add a polling interval
|
||||
const interval = setInterval(() => {
|
||||
fetch('/api/health')
|
||||
.then((res) => setIsServerOnline(res.ok))
|
||||
.catch(() => setIsServerOnline(false));
|
||||
}, 10000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, []);
|
||||
|
||||
const getPageTitle = (pathname: string) => {
|
||||
switch (pathname) {
|
||||
case '/': return 'Dashboard Overview';
|
||||
case '/settings': return 'Definitions & Policies';
|
||||
case '/history': return 'Run History';
|
||||
default: return 'AI Ops';
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div id="root">
|
||||
<aside className="sidebar">
|
||||
<div className="sidebar-header">
|
||||
<h1>AI Ops Control</h1>
|
||||
<p>Topology & Telemetry</p>
|
||||
</div>
|
||||
<nav className="sidebar-nav">
|
||||
<NavLink to="/" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
|
||||
<LayoutDashboard /> Dashboard
|
||||
</NavLink>
|
||||
<NavLink to="/history" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
|
||||
<History /> History
|
||||
</NavLink>
|
||||
<NavLink to="/settings" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
|
||||
<Settings /> Policies & Limits
|
||||
</NavLink>
|
||||
</nav>
|
||||
<div className="sidebar-footer">
|
||||
<div className={`status-dot ${isServerOnline ? 'online' : 'offline'}`} />
|
||||
{isServerOnline ? 'Server Online' : 'Connecting...'}
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<main className="main-content">
|
||||
<header className="top-bar">
|
||||
<h2 className="page-title">{getPageTitle(location.pathname)}</h2>
|
||||
<div>
|
||||
<div className="badge neutral flex items-center gap-2">
|
||||
<Activity size={14} /> Agent Ready
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div className="content-scroll animate-fade-in" key={location.pathname}>
|
||||
<Outlet />
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default MainLayout;
|
||||
10
ui/src/main.tsx
Normal file
10
ui/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import App from './App.tsx'
|
||||
import './index.css'
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>,
|
||||
)
|
||||
214
ui/src/pages/Dashboard.tsx
Normal file
214
ui/src/pages/Dashboard.tsx
Normal file
@@ -0,0 +1,214 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Play, Activity, Box } from 'lucide-react';
|
||||
|
||||
const Dashboard: React.FC = () => {
|
||||
const [sessions, setSessions] = useState<any[]>([]);
|
||||
const [manifests, setManifests] = useState<string[]>([]);
|
||||
const [selectedSession, setSelectedSession] = useState('');
|
||||
|
||||
// Job Form State
|
||||
const [prompt, setPrompt] = useState('');
|
||||
const [runManifest, setRunManifest] = useState('');
|
||||
const [provider, setProvider] = useState('codex');
|
||||
const [mode, setMode] = useState('provider');
|
||||
const [topologyHint, setTopologyHint] = useState('');
|
||||
const [runStatus, setRunStatus] = useState({ text: '', isError: false });
|
||||
|
||||
// Events State
|
||||
const [events, setEvents] = useState<any[]>([]);
|
||||
|
||||
// Graph State
|
||||
const [graphData, setGraphData] = useState<any>(null);
|
||||
const [graphLoading, setGraphLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
fetch('/api/sessions').then(r => r.json()).then(d => {
|
||||
if (d.ok) setSessions(d.sessions || []);
|
||||
});
|
||||
fetch('/api/manifests').then(r => r.json()).then(d => {
|
||||
if (d.ok) {
|
||||
setManifests(d.manifests || []);
|
||||
if (d.manifests?.length) setRunManifest(d.manifests[0]);
|
||||
}
|
||||
});
|
||||
fetchEvents();
|
||||
}, []);
|
||||
|
||||
const fetchEvents = () => {
|
||||
fetch('/api/runtime-events?limit=50').then(r => r.json()).then(d => {
|
||||
if (d.ok) {
|
||||
setEvents(d.events || []);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleGraphRefresh = async () => {
|
||||
if (!selectedSession || !runManifest) return;
|
||||
setGraphLoading(true);
|
||||
try {
|
||||
const res = await fetch(`/api/sessions/graph?sessionId=${encodeURIComponent(selectedSession)}&manifestPath=${encodeURIComponent(runManifest)}`);
|
||||
const data = await res.json();
|
||||
if (data.ok) {
|
||||
setGraphData(data.graph);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
} finally {
|
||||
setGraphLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleStartRun = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setRunStatus({ text: 'Starting...', isError: false });
|
||||
try {
|
||||
const res = await fetch('/api/runs', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
prompt,
|
||||
manifestPath: runManifest,
|
||||
provider,
|
||||
executionMode: mode,
|
||||
topologyHint: topologyHint || undefined
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (res.ok && data.ok) {
|
||||
setRunStatus({ text: 'Run started successfully.', isError: false });
|
||||
setPrompt('');
|
||||
fetchEvents();
|
||||
} else {
|
||||
setRunStatus({ text: data.error || 'Failed to start run.', isError: true });
|
||||
}
|
||||
} catch (err: any) {
|
||||
setRunStatus({ text: err.message, isError: true });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="dashboard-grid fade-in">
|
||||
{/* Left Column */}
|
||||
<div className="flex flex-col gap-4">
|
||||
{/* Graph Visualizer Panel */}
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Network size={18} className="text-accent" /> Graph Visualizer
|
||||
</h2>
|
||||
<div className="flex gap-4">
|
||||
<label className="text-subtle">
|
||||
Session
|
||||
<select className="ml-2 bg-tertiary" value={selectedSession} onChange={e => setSelectedSession(e.target.value)}>
|
||||
<option value="">Select Session</option>
|
||||
{sessions.map(s => (
|
||||
<option key={s.id} value={s.id}>{s.id}</option>
|
||||
))}
|
||||
</select>
|
||||
</label>
|
||||
<button className="primary" type="button" onClick={handleGraphRefresh} disabled={graphLoading}>
|
||||
{graphLoading ? 'Loading...' : 'Refresh'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-64 flex items-center justify-center border border-dashed border-color rounded-lg text-subtle overflow-hidden relative">
|
||||
{graphData ? (
|
||||
<pre className="text-xs overflow-auto w-full h-full p-4 text-left">
|
||||
{JSON.stringify(graphData, null, 2)}
|
||||
</pre>
|
||||
) : (
|
||||
<span>Select a Session and Manifest to view execution graph structure</span>
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Live Event Feed Panel */}
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Activity size={18} className="text-info" /> Live Event Feed
|
||||
</h2>
|
||||
<button className="primary text-sm p-1" type="button" onClick={fetchEvents}>Refresh</button>
|
||||
</div>
|
||||
<div className="h-48 overflow-y-auto bg-tertiary rounded-lg p-2 text-sm font-mono text-subtle opacity-75 whitespace-pre-wrap">
|
||||
{events.length === 0 ? '[Live Feed] Waiting for events...' : events.map((ev, i) => (
|
||||
<div key={i}>[{new Date(ev.timestamp).toLocaleTimeString()}] {ev.type} - {ev.message}</div>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
{/* Right Column */}
|
||||
<div className="flex flex-col gap-4">
|
||||
{/* Job Trigger Panel */}
|
||||
<aside className="panel mb-0">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Play size={18} className="text-success" /> Job Trigger
|
||||
</h2>
|
||||
</div>
|
||||
<form className="flex flex-col gap-4" onSubmit={handleStartRun}>
|
||||
<div className="form-group mb-0">
|
||||
<label>Prompt / Task</label>
|
||||
<textarea rows={3} placeholder="Describe the run objective..." className="w-full" value={prompt} onChange={e => setPrompt(e.target.value)} required></textarea>
|
||||
</div>
|
||||
|
||||
<div className="form-group mb-0">
|
||||
<label>Manifest</label>
|
||||
<select className="w-full" value={runManifest} onChange={e => setRunManifest(e.target.value)} required>
|
||||
{manifests.map(m => (
|
||||
<option key={m} value={m}>{m}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div className="form-row mb-0">
|
||||
<div className="form-group mb-0">
|
||||
<label>Provider</label>
|
||||
<select className="w-full" value={provider} onChange={e => setProvider(e.target.value)}>
|
||||
<option value="codex">codex</option>
|
||||
<option value="claude">claude</option>
|
||||
</select>
|
||||
</div>
|
||||
<div className="form-group mb-0">
|
||||
<label>Mode</label>
|
||||
<select className="w-full" value={mode} onChange={e => setMode(e.target.value)}>
|
||||
<option value="provider">provider</option>
|
||||
<option value="mock">mock</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="form-group mb-0">
|
||||
<label>Topology Hint</label>
|
||||
<input type="text" placeholder="sequential | parallel..." className="w-full" value={topologyHint} onChange={e => setTopologyHint(e.target.value)} />
|
||||
</div>
|
||||
|
||||
<div className="flex justify-between items-center mt-2">
|
||||
<button type="submit" className="primary w-full">Start Run</button>
|
||||
</div>
|
||||
{runStatus.text && (
|
||||
<div className={`text-sm ${runStatus.isError ? 'text-danger' : 'text-success'}`}>{runStatus.text}</div>
|
||||
)}
|
||||
</form>
|
||||
</aside>
|
||||
|
||||
{/* Node Inspector */}
|
||||
<aside className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Box size={18} className="text-warning" /> Node Inspector
|
||||
</h2>
|
||||
</div>
|
||||
<div className="p-4 bg-tertiary rounded-lg text-center text-subtle text-sm">
|
||||
Select a graph node to inspect details.
|
||||
</div>
|
||||
</aside>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
// Add missing lucide import that was used from layout
|
||||
import { Network } from 'lucide-react';
|
||||
export default Dashboard;
|
||||
88
ui/src/pages/History.tsx
Normal file
88
ui/src/pages/History.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { History as HistoryIcon, RefreshCw } from 'lucide-react';
|
||||
|
||||
const History: React.FC = () => {
|
||||
const [runs, setRuns] = useState<any[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const fetchHistory = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch('/api/sessions');
|
||||
const data = await res.json();
|
||||
if (data.ok) {
|
||||
// Sort by descending update time (assuming default or simple sort needed)
|
||||
const sortedRuns = (data.runs || []).sort((a: any, b: any) => {
|
||||
return new Date(b.updatedAt || 0).getTime() - new Date(a.updatedAt || 0).getTime();
|
||||
});
|
||||
setRuns(sortedRuns);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to fetch history', e);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
fetchHistory();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="fade-in">
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<HistoryIcon size={18} className="text-secondary" /> Run History
|
||||
</h2>
|
||||
<button className="primary" onClick={fetchHistory} disabled={loading}>
|
||||
<RefreshCw size={14} className={loading ? 'animate-spin' : ''} /> Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="overflow-x-auto">
|
||||
<table className="w-full text-left border-collapse">
|
||||
<thead>
|
||||
<tr className="border-b border-color text-subtle">
|
||||
<th className="p-3 font-medium">Session ID</th>
|
||||
<th className="p-3 font-medium">Status</th>
|
||||
<th className="p-3 font-medium">Attempts</th>
|
||||
<th className="p-3 font-medium">Updated</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{runs.length === 0 ? (
|
||||
<tr>
|
||||
<td className="p-3" colSpan={4}>
|
||||
<div className="text-center py-8 text-subtle">
|
||||
{loading ? 'Loading...' : 'No run history available.'}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
) : (
|
||||
runs.map((run: any) => {
|
||||
const dateStr = run.updatedAt ? new Date(run.updatedAt).toLocaleString() : 'N/A';
|
||||
let statusColor = 'text-subtle';
|
||||
if (run.status === 'success') statusColor = 'text-success';
|
||||
if (run.status === 'failure' || run.status === 'cancelled') statusColor = 'text-danger';
|
||||
if (run.status === 'validation_fail') statusColor = 'text-warning';
|
||||
|
||||
return (
|
||||
<tr key={run.id} className="border-b border-color hover:bg-highlight transition-colors">
|
||||
<td className="p-3 font-mono text-sm">{run.sessionId || 'N/A'}</td>
|
||||
<td className={`p-3 font-medium ${statusColor}`}>{run.status || 'unknown'}</td>
|
||||
<td className="p-3">{run.attempts ?? 0}</td>
|
||||
<td className="p-3 text-sm text-subtle">{dateStr}</td>
|
||||
</tr>
|
||||
);
|
||||
})
|
||||
)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default History;
|
||||
293
ui/src/pages/Settings.tsx
Normal file
293
ui/src/pages/Settings.tsx
Normal file
@@ -0,0 +1,293 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Shield, Bell, HardDrive, FileJson } from 'lucide-react';
|
||||
|
||||
const Settings: React.FC = () => {
|
||||
// Config Status
|
||||
const [configStatus, setConfigStatus] = useState({ text: '', isError: false, section: '' });
|
||||
|
||||
// Notifications State
|
||||
const [webhookUrl, setWebhookUrl] = useState('');
|
||||
const [severity, setSeverity] = useState('info');
|
||||
const [alwaysNotify, setAlwaysNotify] = useState('');
|
||||
|
||||
// Security State
|
||||
const [violationMode, setViolationMode] = useState('hard_abort');
|
||||
const [allowedBinaries, setAllowedBinaries] = useState('');
|
||||
|
||||
// Limits State
|
||||
const [maxConcurrent, setMaxConcurrent] = useState('');
|
||||
const [maxSession, setMaxSession] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
fetch('/api/config')
|
||||
.then(res => res.json())
|
||||
.then(data => {
|
||||
if (data.ok && data.config) {
|
||||
const cfg = data.config;
|
||||
setWebhookUrl(cfg.runtimeEvents?.discordWebhookUrl || '');
|
||||
setSeverity(cfg.runtimeEvents?.minSeverity || 'info');
|
||||
setAlwaysNotify(cfg.runtimeEvents?.alwaysNotifyTypes?.join(',') || '');
|
||||
|
||||
setViolationMode(cfg.securityPolicy?.violationMode || 'hard_abort');
|
||||
setAllowedBinaries(cfg.securityPolicy?.allowedBinaries?.join(',') || '');
|
||||
|
||||
setMaxConcurrent(cfg.limits?.maxConcurrentSessionLimit?.toString() || '');
|
||||
setMaxSession(cfg.limits?.maxSessionAgentLimit?.toString() || '');
|
||||
}
|
||||
})
|
||||
.catch(e => console.error("Failed to load config", e));
|
||||
}, []);
|
||||
|
||||
const handleSaveNotifications = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setConfigStatus({ text: 'Saving notifications...', isError: false, section: 'notifications' });
|
||||
try {
|
||||
const res = await fetch('/api/config/runtime-events', {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
discordWebhookUrl: webhookUrl || undefined,
|
||||
minSeverity: severity,
|
||||
alwaysNotifyTypes: alwaysNotify ? alwaysNotify.split(',').map(s => s.trim()) : undefined
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'notifications' });
|
||||
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'notifications' });
|
||||
} catch (err: any) {
|
||||
setConfigStatus({ text: err.message, isError: true, section: 'notifications' });
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveSecurity = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setConfigStatus({ text: 'Saving security...', isError: false, section: 'security' });
|
||||
try {
|
||||
const res = await fetch('/api/config/security', {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
violationMode,
|
||||
allowedBinaries: allowedBinaries ? allowedBinaries.split(',').map(s => s.trim()) : undefined
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'security' });
|
||||
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'security' });
|
||||
} catch (err: any) {
|
||||
setConfigStatus({ text: err.message, isError: true, section: 'security' });
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveLimits = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setConfigStatus({ text: 'Saving limits...', isError: false, section: 'limits' });
|
||||
try {
|
||||
const res = await fetch('/api/config/limits', {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
maxConcurrentSessionLimit: parseInt(maxConcurrent) || undefined,
|
||||
maxSessionAgentLimit: parseInt(maxSession) || undefined
|
||||
})
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'limits' });
|
||||
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'limits' });
|
||||
} catch (err: any) {
|
||||
setConfigStatus({ text: err.message, isError: true, section: 'limits' });
|
||||
}
|
||||
};
|
||||
|
||||
const [manifestPath, setManifestPath] = useState('');
|
||||
const [manifestJson, setManifestJson] = useState('');
|
||||
const [manifestStatus, setManifestStatus] = useState({ text: '', isError: false });
|
||||
|
||||
const handleLoadManifest = async () => {
|
||||
if (!manifestPath) {
|
||||
setManifestStatus({ text: 'Please enter a manifest path.', isError: true });
|
||||
return;
|
||||
}
|
||||
setManifestStatus({ text: 'Loading...', isError: false });
|
||||
try {
|
||||
const res = await fetch(`/api/manifests/read?path=${encodeURIComponent(manifestPath)}`);
|
||||
const data = await res.json();
|
||||
if (data.ok) {
|
||||
setManifestJson(JSON.stringify(data.manifest.source || data.manifest.manifest, null, 2));
|
||||
setManifestStatus({ text: 'Manifest loaded successfully.', isError: false });
|
||||
} else {
|
||||
setManifestStatus({ text: data.error || 'Failed to load manifest.', isError: true });
|
||||
}
|
||||
} catch (e: any) {
|
||||
setManifestStatus({ text: e.message || 'Error loading manifest.', isError: true });
|
||||
}
|
||||
};
|
||||
|
||||
const handleValidateManifest = async () => {
|
||||
try {
|
||||
const parsed = JSON.parse(manifestJson);
|
||||
setManifestStatus({ text: 'Validating...', isError: false });
|
||||
const res = await fetch('/api/manifests/validate', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ manifest: parsed })
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.ok) {
|
||||
setManifestStatus({ text: 'Manifest is valid.', isError: false });
|
||||
} else {
|
||||
setManifestStatus({ text: data.error || 'Manifest validation failed.', isError: true });
|
||||
}
|
||||
} catch (e: any) {
|
||||
setManifestStatus({ text: 'Invalid JSON format.', isError: true });
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveManifest = async () => {
|
||||
if (!manifestPath) {
|
||||
setManifestStatus({ text: 'Please enter a manifest path.', isError: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(manifestJson);
|
||||
setManifestStatus({ text: 'Saving...', isError: false });
|
||||
const res = await fetch('/api/manifests/save', {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ path: manifestPath, manifest: parsed })
|
||||
});
|
||||
const data = await res.json();
|
||||
if (data.ok) {
|
||||
setManifestStatus({ text: 'Manifest saved successfully.', isError: false });
|
||||
} else {
|
||||
setManifestStatus({ text: data.error || 'Failed to save manifest.', isError: true });
|
||||
}
|
||||
} catch (e: any) {
|
||||
setManifestStatus({ text: 'Invalid JSON format.', isError: true });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fade-in">
|
||||
<div className="config-grid">
|
||||
{/* Notifications */}
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Bell size={18} className="text-accent" /> Notifications
|
||||
</h2>
|
||||
</div>
|
||||
<form className="flex flex-col gap-3" onSubmit={handleSaveNotifications}>
|
||||
<div className="form-group mb-0">
|
||||
<label>Discord Webhook URL</label>
|
||||
<input type="text" className="w-full" placeholder="https://discord.com/api/webhooks/..." value={webhookUrl} onChange={(e) => setWebhookUrl(e.target.value)} />
|
||||
</div>
|
||||
<div className="form-group mb-0">
|
||||
<label>Min Severity</label>
|
||||
<select className="w-full" value={severity} onChange={(e) => setSeverity(e.target.value)}>
|
||||
<option value="info">info</option>
|
||||
<option value="warning">warning</option>
|
||||
<option value="critical">critical</option>
|
||||
</select>
|
||||
</div>
|
||||
<div className="form-group mb-0">
|
||||
<label>Always Notify Types (CSV)</label>
|
||||
<input type="text" className="w-full" value={alwaysNotify} onChange={(e) => setAlwaysNotify(e.target.value)} />
|
||||
</div>
|
||||
<button type="submit" className="primary mt-2">Save Notifications</button>
|
||||
{configStatus.section === 'notifications' && (
|
||||
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
|
||||
)}
|
||||
</form>
|
||||
</section>
|
||||
|
||||
{/* Security */}
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<Shield size={18} className="text-warning" /> Security Policy
|
||||
</h2>
|
||||
</div>
|
||||
<form className="flex flex-col gap-3" onSubmit={handleSaveSecurity}>
|
||||
<div className="form-group mb-0">
|
||||
<label>Violation Mode</label>
|
||||
<select className="w-full" value={violationMode} onChange={(e) => setViolationMode(e.target.value)}>
|
||||
<option value="hard_abort">hard_abort</option>
|
||||
<option value="validation_fail">validation_fail</option>
|
||||
</select>
|
||||
</div>
|
||||
<div className="form-group mb-0">
|
||||
<label>Allowed Binaries (CSV)</label>
|
||||
<input type="text" className="w-full" value={allowedBinaries} onChange={(e) => setAllowedBinaries(e.target.value)} />
|
||||
</div>
|
||||
<button type="submit" className="primary mt-2">Save Security</button>
|
||||
{configStatus.section === 'security' && (
|
||||
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
|
||||
)}
|
||||
</form>
|
||||
</section>
|
||||
|
||||
{/* Limits */}
|
||||
<section className="panel">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<HardDrive size={18} className="text-info" /> Platform Limits
|
||||
</h2>
|
||||
</div>
|
||||
<form className="flex flex-col gap-3" onSubmit={handleSaveLimits}>
|
||||
<div className="form-group mb-0">
|
||||
<label>AGENT_MAX_CONCURRENT</label>
|
||||
<input type="number" className="w-full" value={maxConcurrent} onChange={(e) => setMaxConcurrent(e.target.value)} />
|
||||
</div>
|
||||
<div className="form-group mb-0">
|
||||
<label>AGENT_MAX_SESSION</label>
|
||||
<input type="number" className="w-full" value={maxSession} onChange={(e) => setMaxSession(e.target.value)} />
|
||||
</div>
|
||||
<button type="submit" className="primary mt-2">Save Limits</button>
|
||||
{configStatus.section === 'limits' && (
|
||||
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
|
||||
)}
|
||||
</form>
|
||||
</section>
|
||||
|
||||
{/* Manifest Builder */}
|
||||
<section className="panel md:col-span-2">
|
||||
<div className="panel-header">
|
||||
<h2 className="panel-title">
|
||||
<FileJson size={18} className="text-success" /> Manifest Builder
|
||||
</h2>
|
||||
</div>
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex gap-4 mb-2">
|
||||
<input
|
||||
type="text"
|
||||
className="flex-1"
|
||||
placeholder=".ai_ops/manifests/default.json"
|
||||
value={manifestPath}
|
||||
onChange={(e) => setManifestPath(e.target.value)}
|
||||
/>
|
||||
<button type="button" onClick={handleLoadManifest}>Load</button>
|
||||
<button type="button" onClick={handleValidateManifest}>Validate</button>
|
||||
<button type="button" className="primary" onClick={handleSaveManifest}>Save</button>
|
||||
</div>
|
||||
<textarea
|
||||
className="w-full font-mono text-sm bg-tertiary"
|
||||
rows={16}
|
||||
placeholder="{...}"
|
||||
value={manifestJson}
|
||||
onChange={(e) => setManifestJson(e.target.value)}
|
||||
spellCheck={false}
|
||||
></textarea>
|
||||
{manifestStatus.text && (
|
||||
<div className={`text-sm mt-2 ${manifestStatus.isError ? 'text-danger' : 'text-success'}`}>
|
||||
{manifestStatus.text}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Settings;
|
||||
28
ui/tsconfig.app.json
Normal file
28
ui/tsconfig.app.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2022",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"types": ["vite/client"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
7
ui/tsconfig.json
Normal file
7
ui/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
]
|
||||
}
|
||||
26
ui/tsconfig.node.json
Normal file
26
ui/tsconfig.node.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"lib": ["ES2023"],
|
||||
"module": "ESNext",
|
||||
"types": ["node"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
15
ui/vite.config.ts
Normal file
15
ui/vite.config.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {
|
||||
proxy: {
|
||||
'/api': {
|
||||
target: 'http://localhost:4317',
|
||||
changeOrigin: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
Reference in New Issue
Block a user