4 Commits

Author SHA1 Message Date
35e3f81327 Merge origin/main with local UI refactor integration 2026-02-25 00:38:19 -05:00
659f3edcee Refactor UI modules and harden run/API behavior 2026-02-25 00:21:04 -05:00
90725eaae8 a 2026-02-24 18:57:45 -05:00
7727612ce9 a 2026-02-24 18:57:20 -05:00
56 changed files with 7388 additions and 1256 deletions

View File

@@ -16,6 +16,7 @@ CLAUDE_CODE_OAUTH_TOKEN=
ANTHROPIC_API_KEY=
CLAUDE_MODEL=
CLAUDE_CODE_PATH=
CLAUDE_MAX_TURNS=2
# Claude binary observability: off | stdout | file | both
CLAUDE_OBSERVABILITY_MODE=off
# CLAUDE_OBSERVABILITY_VERBOSITY: summary | full
@@ -52,7 +53,7 @@ AGENT_PORT_LOCK_DIR=.ai_ops/locks/ports
AGENT_DISCOVERY_FILE_RELATIVE_PATH=.agent-context/resources.json
# Security middleware
# AGENT_SECURITY_VIOLATION_MODE: hard_abort | validation_fail
# AGENT_SECURITY_VIOLATION_MODE: hard_abort | validation_fail | dangerous_warn_only
AGENT_SECURITY_VIOLATION_MODE=hard_abort
AGENT_SECURITY_ALLOWED_BINARIES=git,npm,node,cat,ls,pwd,echo,bash,sh
AGENT_SECURITY_COMMAND_TIMEOUT_MS=120000

View File

@@ -60,6 +60,7 @@ TypeScript runtime for deterministic multi-agent execution with:
```bash
npm install
npm --prefix ui install
cp .env.example .env
cp mcp.config.example.json mcp.config.json
```
@@ -86,6 +87,8 @@ Start the local UI server:
npm run ui
```
This script builds the React frontend from `ui/` before serving.
Then open:
- `http://127.0.0.1:4317` (default)
@@ -109,7 +112,9 @@ Provider mode notes:
- `provider=codex` uses existing OpenAI/Codex auth settings (`OPENAI_AUTH_MODE`, `CODEX_API_KEY`, `OPENAI_API_KEY`).
- `provider=claude` uses Claude auth resolution (`CLAUDE_CODE_OAUTH_TOKEN` preferred, otherwise `ANTHROPIC_API_KEY`, or existing Claude Code login state).
- `CLAUDE_MODEL` should be a Claude model id/alias recognized by Claude Code (for example `claude-sonnet-4-6`); `anthropic/...` prefixes are normalized automatically.
- `CLAUDE_MAX_TURNS` controls the per-query Claude turn budget (default `2`).
- Claude provider runs can emit Claude SDK/CLI internals to stdout and/or NDJSON with `CLAUDE_OBSERVABILITY_*` settings.
- UI session-mode provider runs execute directly in orchestration-assigned task/base worktrees; provider adapters do not allocate additional nested worktrees.
## Manifest Semantics
@@ -271,6 +276,7 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
- Pipeline behavior on `SecurityViolationError` is configurable:
- `hard_abort` (default)
- `validation_fail` (retry-unrolled remediation)
- `dangerous_warn_only` (logs violations and continues execution; high risk)
## Environment Variables
@@ -285,6 +291,7 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
- `ANTHROPIC_API_KEY` (used when `CLAUDE_CODE_OAUTH_TOKEN` is unset)
- `CLAUDE_MODEL`
- `CLAUDE_CODE_PATH`
- `CLAUDE_MAX_TURNS` (integer >= 1, defaults to `2`)
- `CLAUDE_OBSERVABILITY_MODE` (`off`, `stdout`, `file`, or `both`)
- `CLAUDE_OBSERVABILITY_VERBOSITY` (`summary` or `full`)
- `CLAUDE_OBSERVABILITY_LOG_PATH`
@@ -322,7 +329,7 @@ jq -c 'select(.severity=="critical")' .ai_ops/events/runtime-events.ndjson
### Security Middleware
- `AGENT_SECURITY_VIOLATION_MODE` (`hard_abort` or `validation_fail`)
- `AGENT_SECURITY_VIOLATION_MODE` (`hard_abort`, `validation_fail`, or `dangerous_warn_only`)
- `AGENT_SECURITY_ALLOWED_BINARIES`
- `AGENT_SECURITY_COMMAND_TIMEOUT_MS`
- `AGENT_SECURITY_AUDIT_LOG_PATH`

73
demo-manifest.json Normal file
View File

@@ -0,0 +1,73 @@
{
"schemaVersion": "1",
"topologies": [
"sequential"
],
"personas": [
{
"id": "researcher",
"displayName": "Researcher",
"systemPromptTemplate": "You are a researcher. Read the README.md file in the repository to understand the core architecture of the AI Ops platform. Once you understand it, output your summary.",
"toolClearance": {
"allowlist": [
"read_file",
"list_directory"
],
"banlist": []
}
},
{
"id": "writer",
"displayName": "Writer",
"systemPromptTemplate": "You are a writer. Take the summary provided by the researcher and write it to a new file called 'demo-summary.txt' in the root directory.",
"toolClearance": {
"allowlist": [
"write_file"
],
"banlist": []
}
}
],
"relationships": [
{
"parentPersonaId": "researcher",
"childPersonaId": "writer",
"constraints": {
"maxDepth": 1,
"maxChildren": 1
}
}
],
"topologyConstraints": {
"maxDepth": 5,
"maxRetries": 2
},
"pipeline": {
"entryNodeId": "research-node",
"nodes": [
{
"id": "research-node",
"actorId": "researcher_actor",
"personaId": "researcher",
"topology": {
"kind": "sequential"
}
},
{
"id": "write-node",
"actorId": "writer_actor",
"personaId": "writer",
"topology": {
"kind": "sequential"
}
}
],
"edges": [
{
"from": "research-node",
"to": "write-node",
"on": "success"
}
]
}
}

View File

@@ -37,6 +37,11 @@ Before each actor invocation, orchestration resolves an immutable `ResolvedExecu
This keeps orchestration policy resolution separate from executor enforcement. Executors do not need to parse manifests or MCP registry internals.
Worktree ownership invariant:
- In UI session mode, orchestration/session lifecycle is the single owner of git worktree allocation.
- Provider adapters (Codex/Claude runtime wrappers) must execute inside `ResolvedExecutionContext.security.worktreePath` and must not provision independent worktrees.
## Execution topology model
- Pipeline graph execution is DAG-based with ready-node frontiers.

View File

@@ -30,6 +30,7 @@ This middleware provides a first-pass hardening layer for agent-executed shell c
- `hard_abort` (default): fail fast and stop the pipeline.
- `validation_fail`: map violation to retry-unrolled behavior so the actor can attempt a compliant alternative.
- `dangerous_warn_only`: emit security audit/runtime events but continue execution. This is intentionally unsafe and should only be used for temporary unblock/debug workflows.
## MCP integration

View File

@@ -10,21 +10,137 @@
# in progress
there is some major ui issue. there is app/provider logic wrapped up in the ui which i didnt know about or understand and it has gotten out of hand. we need to rip it out and clean it up. additionally the work trees are still not working as intended after like 5 attempts to fix it so that has got to be officially spaghetti at this point
here is the takeaway from the ui app logic issue
- Keep orchestration core in src/agents.
- Move backend run/session/provider code out of src/ui into src/control-plane (or src/backend).
- Keep src/ui as static/frontend + API client only.
- Treat provider prompt shaping as an adapter concern (src/providers), not UI concern.
test results
session itself has a dir in worktrees that is a worktree
then there is a base dir and a tasks dir
base is also a worktree
inside of base, there is ANOTHER WORKTREE
inside of tasks is a product-intake??? directory
code is being written in both product-intake and the worktree in the base/worktrees/d3e411... directory
i dont think that the product guy is writing any files
fwiw, the dev agents are definitely making the app
log activity of claude code binary
WHY IS IT STILL NOT LOGGING WHAT IS ACTUALLY HAPPENING
it will not explain it, it just keeps adding different logs
test run
they are writing files!
# problem 1 - logging
logging is still fucking dog dick fuck ass shit
# problem 2 - worktree
the worktree shit is fucking insanity
they are getting confused because they see some of the orchestration infrastructure
they legit need to be in a clean room and know nothing about the world outside of their project going forward
# problem 3 - task management/product context being passed in its entirety
the dev agents for some reason have the entire fucking task list in their context
# Scheduled
So yes, the UI growing into “its own project” increases risk because orchestration logic leaks into UI-layer
services.
Best refactor target:
1. Make UI a thin transport layer (HTTP in/out, no resource ownership decisions).
2. Move run/session orchestration into one app-service module with a strict interface.
3. Enforce single-owner invariants in code (worktree owner = session lifecycle only).
4. Add contract tests around ownership boundaries (like the regression we just added).
what even is src/examples ????
clean up events/locks/ports (may not be needed with new session work?)
ui is gargantuan - needs a full rewrite in a different dir or something holy
the ais arent actually writing to the directory
the ui is fucking bad
it kinda slow
i think the banned command thing is kind of restrictive, idk if they will really be able to do anything
codex isnt working?
i dont even know if this runs on linux at all
wtf is even happening in the backend i dont see any logs for it anywhere
# identify potential design conflict dag vs things that happen not on the dag?
## linked to confusion around max env vars and what they do
- "This runtime enforces a DAG, so merge-conflict retry loops are multi-run (rerun the same managed session) rather than in-run graph cycles."
- what are the implications of this? is this a stop gap until we do stricter control at the ends (task manager + git worktree cleanup)? or do we have a design inconsistency that we need to address?
For structured-dev-workflow specifically
- Happy path is depth 4 (product -> task -> dev -> qa -> merge).
- QA-fail/rework path reaches depth ~6.
- If merge-conflict resolver nodes are used, path can reach ~7.
# confusion around max env vars and what they do
## linked to identify potential design conflict dag vs things that happen not on the dag?
PROBLEM
.env has AGENT_MAX_RECURSIVE_DEPTH=3, but structured-dev-workflow requires depth 4 (product -> task -> dev -> qa -> merge)
- does this mean that if qa sends a task back to dev that the depth is 5? or does it literally mean num of agents in a branch of the dag?
- if it blocks, problem because it will just fail if we have to pass back
- if it doesnt, problem bc this does nothing - we are either allowing them to loop infinitely creating themselves OR we are relying on enforcing that elsewhere and we need to consolidated
RESPONSE
• Short answer: your concern is valid, and today the two depth limits are conflated more than they should be.
1. Does QA -> Dev make depth 5?
- In this system, depth is not “number of unique agents.” It is path depth from the entry node (edge distance), plus recursive child depth when retries/fanout happen.
- Back-edges like qa -> dev to the same upstream node are not allowed because pipeline must be a strict DAG (manifest.ts (/home/zman/projects/ai_ops/src/agents/manifest.ts)).
- So “send back” is modeled as downstream rework nodes (qa -> dev-rework -> qa-rework -> merge), which does increase depth.
2. If it blocks, thats bad; if not, does the limit do nothing?
- It does block today, and it also does still serve a purpose.
- PipelineExecutor increments node depth by edge traversal (pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:748), pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:815)).
- But that same depth is passed into AgentManager.runRecursive (pipeline.ts (/home/zman/projects/ai_ops/src/agents/pipeline.ts:875)), and AgentManager enforces maxRecursiveDepth on that depth (manager.ts (/home/zman/projects/ai_ops/
src/agents/manager.ts:343)).
- So AGENT_MAX_RECURSIVE_DEPTH is currently acting as a second pipeline-depth cap, not just a recursion cap.
For structured-dev-workflow specifically
- Happy path is depth 4 (product -> task -> dev -> qa -> merge).
- QA-fail/rework path reaches depth ~6.
- If merge-conflict resolver nodes are used, path can reach ~7.
- So with .env values like AGENT_MAX_RECURSIVE_DEPTH=3 and AGENT_TOPOLOGY_MAX_DEPTH=4, fallback branches will fail.
SOLUTION
2. Structural fix (better):
- Decouple meanings:
- AGENT_TOPOLOGY_MAX_DEPTH should gate DAG traversal depth only.
- AGENT_MAX_RECURSIVE_DEPTH should gate recursive fanout/retry depth only.
- In practice: stop passing pipeline queue depth into manager recursive depth; start recursive runs at a local depth baseline per node.
3. Safety/clarity guard:
- Add a preflight check that computes max possible DAG depth and warns/errors if env depth limits are below it.
# other scheduled
- persona definitions
@@ -556,3 +672,149 @@ Manifest Builder: A UI to visually build or edit the AgentManifest (Schema "1"),
Security Policy Management: An interface mapped to src/security/schemas.ts. This allows admins to define AGENT_SECURITY_ALLOWED_BINARIES, toggle AGENT_SECURITY_VIOLATION_MODE (hard_abort vs validation_fail), and manage MCP tool allowlists/banlists.
Environment & Resource Limits: Simple forms to configure agent manager limits (AGENT_MAX_CONCURRENT) and port block sizing without manually editing the .env file.
# Architecture Requirements: Session Isolation & Task-Scoped Worktrees
## Objective
Disentangle the `ai_ops` control plane from the target project data plane. Replace the implicit `process.cwd()` execution anchor with a formal Session lifecycle and dynamic, task-scoped Git worktrees. This ensures concurrent agents operate in isolated environments and prevents the runtime from mutating its own repository.
## 1. Domain Definitions
- **Target Project:** The absolute local path to the repository being operated on (e.g., `/home/user/target_repo`).
- **Session (The Clean Room):** A persistent orchestration context strictly bound to one Target Project. It maintains a "Base Workspace" (a localized Git checkout/branch) that represents the integrated, approved state of the current work period.
- **Task Worktree:** An ephemeral Git worktree branched from the Session's Base Workspace. It is scoped strictly to a `taskId`, enabling multi-agent handoffs (e.g., Coder $\rightarrow$ QA) within the same isolated environment before merging back to the Base Workspace.
## 2. Core Data Model Updates
Introduce explicit types to track project binding and resource ownership.
- **API Payloads:**
TypeScript
```
interface CreateSessionRequest {
projectPath: string; // Absolute local path to target repo
}
```
- **Session State (`AGENT_STATE_ROOT`):**
TypeScript
```
interface SessionMetadata {
sessionId: string;
projectPath: string;
sessionStatus: 'active' | 'suspended' | 'closed';
baseWorkspacePath: string; // e.g., ${AGENT_WORKTREE_ROOT}/${sessionId}/base
createdAt: string;
updatedAt: string;
}
```
- **Project Context (`src/agents/project-context.ts`):**
Update the `taskQueue` schema to act as the persistent ledger for worktree ownership.
TypeScript
```
interface TaskRecord {
taskId: string;
status: 'pending' | 'in_progress' | 'review' | 'merged' | 'failed';
worktreePath?: string; // e.g., ${AGENT_WORKTREE_ROOT}/${sessionId}/tasks/${taskId}
}
```
## 3. API & Control Plane (`src/ui/server.ts`)
Replace implicit session generation with an explicit lifecycle API.
- `POST /api/sessions`: Accepts `CreateSessionRequest`. Initializes the SessionMetadata and provisions the Base Workspace.
- `GET /api/sessions`: Returns existing sessions for resuming work across restarts.
- `POST /api/sessions/:id/run`: Triggers `SchemaDrivenExecutionEngine.runSession(...)`, passing the resolved `SessionMetadata`.
- `POST /api/sessions/:id/close`: Prunes all task worktrees, optionally merges the Base Workspace back to the original `projectPath`, and marks the session closed.
## 4. Provisioning Layer (`src/agents/provisioning.ts`)
Remove all fallback logic relying on `process.cwd()`.
- **Session Initialization:** Clone or create a primary worktree of `projectPath` into `baseWorkspacePath`.
- **Task Provisioning:** When a task begins execution, check out a new branch from the Base Workspace and provision it at `worktreePath`.
- **Security & MCP Isolation:** `SecureCommandExecutor` and MCP handler configurations must dynamically anchor their working directories to the specific `worktreePath` injected into the execution context, preventing traversal outside the task scope.
## 5. Orchestration & Routing (`src/agents/pipeline.ts`)
Implement the hybrid routing model: Domain Events for control flow, Project Context for resource lookup.
1. **The Signal (Domain Events):** When a Coder agent finishes, it emits a standard domain event (e.g., `task_ready_for_review` with the `taskId`). The pipeline routes this event to trigger the QA agent.
2. **The Map (Project Context):** Before initializing the QA agent's sandbox, the lifecycle observer/engine reads `project-context.ts` to look up the `worktreePath` associated with that `taskId`.
3. **The Execution:** The QA agent boots inside the exact same Task Worktree the Coder agent just vacated, preserving all uncommitted files and local state.
4. **The Merge:** Upon successful QA (e.g., `validation_passed`), the orchestration layer commits the Task Worktree, merges it into the Base Workspace, and deletes the Task Worktree.
# turning merge conflicts into first-class orchestration events instead of hard exceptions.
1. Add new domain events:
- merge_conflict_detected
- merge_conflict_resolved
- merge_conflict_unresolved (after max attempts)
- optionally merge_retry_started
2. Extend task state model with conflict-aware statuses:
- add conflict (and maybe resolving_conflict)
3. Change merge code path to return structured outcomes instead of throwing on conflict:
- success
- conflict (with conflictFiles, mergeBase, taskId, worktreePath)
- fatal_error
- only throw for truly fatal cases (repo corruption, missing worktree, etc.)
4. On conflict, patch project context + emit event:
- set task to conflict
- store conflict metadata in task.metadata
- emit merge_conflict_detected
5. Route conflict events to dedicated resolver personas in the pipeline:
- Coder/QA conflict-resolver agent opens same worktreePath
- resolves conflict markers, runs checks
- emits merge_conflict_resolved
6. Retry merge after resolution event:
- integration node attempts merge again
- if successful, emit branch_merged, mark merged, prune task worktree
- if still conflicting, loop with bounded retries
7. Add retry guardrails:
- max conflict-resolution attempts per task
- on exhaustion emit merge_conflict_unresolved and stop cleanly (not crash the whole session)
8. Apply same pattern to session close (base -> project) so close can become:
- conflict workflow or “closed_with_conflicts” state, rather than a hard failure.
This keeps the app stable and lets agents handle conflicts as part of normal orchestration.

968
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,8 @@
"test": "node --import tsx/esm --test tests/**/*.test.ts",
"verify": "npm run check && npm run check:tests && npm run test && npm run build",
"dev": "node --import tsx/esm src/index.ts",
"ui": "node --import tsx/esm src/ui/server.ts",
"ui:build": "npm --prefix ui run build",
"ui": "npm run ui:build && node --import tsx/esm src/ui/server.ts",
"codex": "node --import tsx/esm src/examples/codex.ts",
"claude": "node --import tsx/esm src/examples/claude.ts",
"start": "node dist/index.js"
@@ -29,11 +30,16 @@
"dependencies": {
"@anthropic-ai/claude-agent-sdk": "^0.2.50",
"@openai/codex-sdk": "^0.104.0",
"cors": "^2.8.6",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"lowdb": "^7.0.1",
"sh-syntax": "^0.5.8",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/cors": "^2.8.19",
"@types/express": "^5.0.6",
"@types/node": "^25.3.0",
"tsx": "^4.21.0",
"typescript": "^5.9.3"

View File

@@ -1,398 +1,113 @@
import { isRecord } from "./types.js";
import { z } from "zod";
import { isDomainEventType, type DomainEventType } from "./domain-events.js";
import {
parseToolClearancePolicy,
toolClearancePolicySchema,
type ToolClearancePolicy as SecurityToolClearancePolicy,
} from "../security/schemas.js";
export type ToolClearancePolicy = SecurityToolClearancePolicy;
export type ManifestPersona = {
id: string;
displayName: string;
systemPromptTemplate: string;
modelConstraint?: string;
toolClearance: ToolClearancePolicy;
};
export const ManifestPersonaSchema = z.object({
id: z.string().trim().min(1, 'Manifest field "id" must be a non-empty string.'),
displayName: z.string().trim().min(1, 'Manifest field "displayName" must be a non-empty string.'),
systemPromptTemplate: z.string().trim().min(1, 'Manifest field "systemPromptTemplate" must be a non-empty string.'),
modelConstraint: z.string().trim().min(1, 'Manifest persona field "modelConstraint" must be a non-empty string when provided.').optional(),
toolClearance: toolClearancePolicySchema,
});
export type ManifestPersona = z.infer<typeof ManifestPersonaSchema>;
export type RelationshipConstraint = {
maxDepth?: number;
maxChildren?: number;
};
export const RelationshipConstraintSchema = z.object({
maxDepth: z.number().int().min(1, 'Manifest field "maxDepth" must be an integer >= 1.').optional(),
maxChildren: z.number().int().min(1, 'Manifest field "maxChildren" must be an integer >= 1.').optional(),
});
export type RelationshipConstraint = z.infer<typeof RelationshipConstraintSchema>;
export type RelationshipEdge = {
parentPersonaId: string;
childPersonaId: string;
constraints?: RelationshipConstraint;
};
export const RelationshipEdgeSchema = z.object({
parentPersonaId: z.string().trim().min(1, 'Manifest field "parentPersonaId" must be a non-empty string.'),
childPersonaId: z.string().trim().min(1, 'Manifest field "childPersonaId" must be a non-empty string.'),
constraints: RelationshipConstraintSchema.optional(),
});
export type RelationshipEdge = z.infer<typeof RelationshipEdgeSchema>;
export type RouteCondition =
| {
type: "always";
}
| {
type: "state_flag";
key: string;
equals: boolean;
}
| {
type: "history_has_event";
event: string;
}
| {
type: "file_exists";
path: string;
};
export const RouteConditionSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal("always") }),
z.object({ type: z.literal("state_flag"), key: z.string().trim().min(1), equals: z.boolean() }),
z.object({ type: z.literal("history_has_event"), event: z.string().trim().min(1) }),
z.object({ type: z.literal("file_exists"), path: z.string().trim().min(1) }),
]);
export type RouteCondition = z.infer<typeof RouteConditionSchema>;
export type PipelineConstraint = {
maxRetries?: number;
};
export const PipelineConstraintSchema = z.object({
maxRetries: z.number().int().min(0, 'Manifest field "maxRetries" must be an integer >= 0.').optional(),
});
export type PipelineConstraint = z.infer<typeof PipelineConstraintSchema>;
export type NodeTopologyKind = "sequential" | "parallel" | "hierarchical" | "retry-unrolled";
export const NodeTopologyKindSchema = z.enum(["sequential", "parallel", "hierarchical", "retry-unrolled"]);
export type NodeTopologyKind = z.infer<typeof NodeTopologyKindSchema>;
export type PipelineNodeTopology = {
kind: NodeTopologyKind;
blockId?: string;
};
export const PipelineNodeTopologySchema = z.object({
kind: NodeTopologyKindSchema,
blockId: z.string().trim().min(1, 'Pipeline node topology blockId must be a non-empty string when provided.').optional(),
});
export type PipelineNodeTopology = z.infer<typeof PipelineNodeTopologySchema>;
export type PipelineNode = {
id: string;
actorId: string;
personaId: string;
constraints?: PipelineConstraint;
topology?: PipelineNodeTopology;
};
export const PipelineNodeSchema = z.object({
id: z.string().trim().min(1),
actorId: z.string().trim().min(1),
personaId: z.string().trim().min(1),
constraints: PipelineConstraintSchema.optional(),
topology: PipelineNodeTopologySchema.optional(),
});
export type PipelineNode = z.infer<typeof PipelineNodeSchema>;
export type PipelineEdge = {
from: string;
to: string;
on?: "success" | "validation_fail" | "failure" | "always";
event?: DomainEventType;
when?: RouteCondition[];
};
export const PipelineEdgeSchema = z.object({
from: z.string().trim().min(1),
to: z.string().trim().min(1),
on: z.enum(["success", "validation_fail", "failure", "always"]).optional(),
event: z.string().refine((val): val is DomainEventType => isDomainEventType(val), {
message: "Pipeline edge field 'event' has unsupported domain event.",
}).optional(),
when: z.array(RouteConditionSchema).optional(),
}).refine((data) => {
if (!data.on && !data.event) return false;
if (data.on && data.event) return false;
return true;
}, {
message: 'Pipeline edge must provide either an "on" trigger or an "event" trigger, but not both.',
});
export type PipelineEdge = z.infer<typeof PipelineEdgeSchema>;
export type PipelineGraph = {
entryNodeId: string;
nodes: PipelineNode[];
edges: PipelineEdge[];
};
export const PipelineGraphSchema = z.object({
entryNodeId: z.string().trim().min(1),
nodes: z.array(PipelineNodeSchema).min(1, "Manifest pipeline.nodes must be a non-empty array."),
edges: z.array(PipelineEdgeSchema),
});
export type PipelineGraph = z.infer<typeof PipelineGraphSchema>;
export type TopologyKind = "hierarchical" | "parallel" | "retry-unrolled" | "sequential";
export const TopologyKindSchema = z.enum(["hierarchical", "parallel", "retry-unrolled", "sequential"]);
export type TopologyKind = z.infer<typeof TopologyKindSchema>;
export type TopologyConstraint = {
maxDepth: number;
maxRetries: number;
};
export const TopologyConstraintSchema = z.object({
maxDepth: z.number().int().min(1).default(4),
maxRetries: z.number().int().min(0).default(2),
});
export type TopologyConstraint = z.infer<typeof TopologyConstraintSchema>;
export type AgentManifest = {
schemaVersion: "1";
topologies: TopologyKind[];
personas: ManifestPersona[];
relationships: RelationshipEdge[];
pipeline: PipelineGraph;
topologyConstraints: TopologyConstraint;
};
function readString(record: Record<string, unknown>, key: string): string {
const value = record[key];
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`Manifest field \"${key}\" must be a non-empty string.`);
}
return value.trim();
}
function readOptionalInteger(
record: Record<string, unknown>,
key: string,
input: {
min: number;
},
): number | undefined {
const value = record[key];
if (value === undefined) {
return undefined;
}
if (typeof value !== "number" || !Number.isInteger(value) || value < input.min) {
throw new Error(`Manifest field \"${key}\" must be an integer >= ${String(input.min)}.`);
}
return value;
}
function readStringArray(record: Record<string, unknown>, key: string): string[] {
const value = record[key];
if (!Array.isArray(value)) {
throw new Error(`Manifest field \"${key}\" must be an array.`);
}
const output: string[] = [];
for (const item of value) {
if (typeof item !== "string" || item.trim().length === 0) {
throw new Error(`Manifest field \"${key}\" contains an invalid string.`);
}
output.push(item.trim());
}
return output;
}
function parseToolClearance(value: unknown): ToolClearancePolicy {
try {
return parseToolClearancePolicy(value);
} catch (error) {
const detail = error instanceof Error ? error.message : String(error);
throw new Error(`Manifest persona toolClearance is invalid: ${detail}`);
}
}
function parsePersona(value: unknown): ManifestPersona {
if (!isRecord(value)) {
throw new Error("Manifest persona entry must be an object.");
}
const modelConstraintRaw = value.modelConstraint;
if (
modelConstraintRaw !== undefined &&
(typeof modelConstraintRaw !== "string" || modelConstraintRaw.trim().length === 0)
) {
throw new Error('Manifest persona field "modelConstraint" must be a non-empty string when provided.');
}
return {
id: readString(value, "id"),
displayName: readString(value, "displayName"),
systemPromptTemplate: readString(value, "systemPromptTemplate"),
...(typeof modelConstraintRaw === "string"
? { modelConstraint: modelConstraintRaw.trim() }
: {}),
toolClearance: parseToolClearance(value.toolClearance),
};
}
function parseRelationship(value: unknown): RelationshipEdge {
if (!isRecord(value)) {
throw new Error("Manifest relationship entry must be an object.");
}
const constraints = isRecord(value.constraints)
? {
maxDepth: readOptionalInteger(value.constraints, "maxDepth", { min: 1 }),
maxChildren: readOptionalInteger(value.constraints, "maxChildren", { min: 1 }),
}
: undefined;
return {
parentPersonaId: readString(value, "parentPersonaId"),
childPersonaId: readString(value, "childPersonaId"),
constraints,
};
}
function parseCondition(value: unknown): RouteCondition {
if (!isRecord(value)) {
throw new Error("Route condition must be an object.");
}
const type = readString(value, "type");
if (type === "always") {
return { type };
}
if (type === "state_flag") {
const key = readString(value, "key");
const equals = value.equals;
if (typeof equals !== "boolean") {
throw new Error('Route condition field "equals" must be a boolean.');
}
return {
type,
key,
equals,
};
}
if (type === "history_has_event") {
return {
type,
event: readString(value, "event"),
};
}
if (type === "file_exists") {
return {
type,
path: readString(value, "path"),
};
}
throw new Error(`Unsupported route condition type \"${type}\".`);
}
function parsePipelineNode(value: unknown): PipelineNode {
if (!isRecord(value)) {
throw new Error("Pipeline node must be an object.");
}
const topology = value.topology;
let parsedTopology: PipelineNodeTopology | undefined;
if (topology !== undefined) {
if (!isRecord(topology)) {
throw new Error("Pipeline node topology must be an object when provided.");
}
const kind = readString(topology, "kind");
if (
kind !== "sequential" &&
kind !== "parallel" &&
kind !== "hierarchical" &&
kind !== "retry-unrolled"
) {
throw new Error(`Pipeline node topology kind "${kind}" is not supported.`);
}
const blockIdRaw = topology.blockId;
if (blockIdRaw !== undefined && (typeof blockIdRaw !== "string" || blockIdRaw.trim().length === 0)) {
throw new Error("Pipeline node topology blockId must be a non-empty string when provided.");
}
parsedTopology = {
kind,
...(typeof blockIdRaw === "string" ? { blockId: blockIdRaw.trim() } : {}),
};
}
const constraints = isRecord(value.constraints)
? {
maxRetries: readOptionalInteger(value.constraints, "maxRetries", { min: 0 }),
}
: undefined;
return {
id: readString(value, "id"),
actorId: readString(value, "actorId"),
personaId: readString(value, "personaId"),
constraints,
...(parsedTopology ? { topology: parsedTopology } : {}),
};
}
function parsePipelineEdge(value: unknown): PipelineEdge {
if (!isRecord(value)) {
throw new Error("Pipeline edge must be an object.");
}
const validEvents: NonNullable<PipelineEdge["on"]>[] = [
"success",
"validation_fail",
"failure",
"always",
];
const rawOn = value.on;
let on: PipelineEdge["on"];
if (rawOn !== undefined) {
if (typeof rawOn !== "string" || !validEvents.includes(rawOn as NonNullable<PipelineEdge["on"]>)) {
throw new Error(`Pipeline edge field "on" has unsupported event "${String(rawOn)}".`);
}
on = rawOn as NonNullable<PipelineEdge["on"]>;
}
const rawDomainEvent = value.event;
let event: DomainEventType | undefined;
if (rawDomainEvent !== undefined) {
if (typeof rawDomainEvent !== "string" || !isDomainEventType(rawDomainEvent)) {
throw new Error(`Pipeline edge field "event" has unsupported domain event "${String(rawDomainEvent)}".`);
}
event = rawDomainEvent;
}
if (!on && !event) {
throw new Error('Pipeline edge must provide either an "on" trigger or an "event" trigger.');
}
if (on && event) {
throw new Error('Pipeline edge cannot define both "on" and "event" triggers simultaneously.');
}
const rawWhen = value.when;
const when: RouteCondition[] = [];
if (rawWhen !== undefined) {
if (!Array.isArray(rawWhen)) {
throw new Error('Pipeline edge field "when" must be an array when provided.');
}
for (const condition of rawWhen) {
when.push(parseCondition(condition));
}
}
return {
from: readString(value, "from"),
to: readString(value, "to"),
...(on ? { on } : {}),
...(event ? { event } : {}),
...(when.length > 0 ? { when } : {}),
};
}
function parsePipeline(value: unknown): PipelineGraph {
if (!isRecord(value)) {
throw new Error("Manifest pipeline must be an object.");
}
const nodesValue = value.nodes;
if (!Array.isArray(nodesValue) || nodesValue.length === 0) {
throw new Error("Manifest pipeline.nodes must be a non-empty array.");
}
const edgesValue = value.edges;
if (!Array.isArray(edgesValue)) {
throw new Error("Manifest pipeline.edges must be an array.");
}
const nodes = nodesValue.map(parsePipelineNode);
const edges = edgesValue.map(parsePipelineEdge);
return {
entryNodeId: readString(value, "entryNodeId"),
nodes,
edges,
};
}
function parseTopologies(value: unknown): TopologyKind[] {
if (!Array.isArray(value) || value.length === 0) {
throw new Error("Manifest topologies must be a non-empty array.");
}
const valid = new Set<TopologyKind>(["hierarchical", "parallel", "retry-unrolled", "sequential"]);
const result: TopologyKind[] = [];
for (const item of value) {
if (typeof item !== "string" || !valid.has(item as TopologyKind)) {
throw new Error("Manifest topologies contains an unsupported topology kind.");
}
result.push(item as TopologyKind);
}
return result;
}
function parseTopologyConstraints(value: unknown): TopologyConstraint {
if (!isRecord(value)) {
throw new Error("Manifest topologyConstraints must be an object.");
}
const maxDepth = readOptionalInteger(value, "maxDepth", { min: 1 });
const maxRetries = readOptionalInteger(value, "maxRetries", { min: 0 });
return {
maxDepth: maxDepth ?? 4,
maxRetries: maxRetries ?? 2,
};
}
export const AgentManifestSchema = z.object({
schemaVersion: z.literal("1"),
topologies: z.array(TopologyKindSchema).min(1, "Manifest topologies must be a non-empty array."),
personas: z.array(ManifestPersonaSchema).min(1, "Manifest personas must be a non-empty array."),
relationships: z.array(RelationshipEdgeSchema),
pipeline: PipelineGraphSchema,
topologyConstraints: TopologyConstraintSchema,
});
export type AgentManifest = z.infer<typeof AgentManifestSchema>;
function assertNoDuplicates(items: string[], label: string): void {
const seen = new Set<string>();
for (const item of items) {
if (seen.has(item)) {
throw new Error(`${label} contains duplicate id \"${item}\".`);
throw new Error(`${label} contains duplicate id "${item}".`);
}
seen.add(item);
}
@@ -409,20 +124,20 @@ function assertPipelineDag(pipeline: PipelineGraph): void {
}
if (!nodeIds.has(pipeline.entryNodeId)) {
throw new Error(`Pipeline entry node \"${pipeline.entryNodeId}\" is not defined.`);
throw new Error(`Pipeline entry node "${pipeline.entryNodeId}" is not defined.`);
}
for (const edge of pipeline.edges) {
if (!nodeIds.has(edge.from)) {
throw new Error(`Pipeline edge references unknown from node \"${edge.from}\".`);
throw new Error(`Pipeline edge references unknown from node "${edge.from}".`);
}
if (!nodeIds.has(edge.to)) {
throw new Error(`Pipeline edge references unknown to node \"${edge.to}\".`);
throw new Error(`Pipeline edge references unknown to node "${edge.to}".`);
}
const neighbors = adjacency.get(edge.from);
if (!neighbors) {
throw new Error(`Internal DAG error for node \"${edge.from}\".`);
throw new Error(`Internal DAG error for node "${edge.from}".`);
}
neighbors.push(edge.to);
const currentInDegree = indegree.get(edge.to);
@@ -503,34 +218,16 @@ function assertRelationshipDag(relationships: RelationshipEdge[]): void {
}
export function parseAgentManifest(input: unknown): AgentManifest {
if (!isRecord(input)) {
throw new Error("AgentManifest must be an object.");
let manifest: AgentManifest;
try {
manifest = AgentManifestSchema.parse(input);
} catch (error) {
if (error instanceof z.ZodError) {
throw new Error("Manifest invalid: " + error.issues.map((e: any) => e.message).join(", "));
}
throw error;
}
const schemaVersion = readString(input, "schemaVersion");
if (schemaVersion !== "1") {
throw new Error(`Unsupported AgentManifest schemaVersion \"${schemaVersion}\".`);
}
const personasValue = input.personas;
if (!Array.isArray(personasValue) || personasValue.length === 0) {
throw new Error("Manifest personas must be a non-empty array.");
}
const relationshipsValue = input.relationships;
if (!Array.isArray(relationshipsValue)) {
throw new Error("Manifest relationships must be an array.");
}
const manifest: AgentManifest = {
schemaVersion: "1",
topologies: parseTopologies(input.topologies),
personas: personasValue.map(parsePersona),
relationships: relationshipsValue.map(parseRelationship),
pipeline: parsePipeline(input.pipeline),
topologyConstraints: parseTopologyConstraints(input.topologyConstraints),
};
assertNoDuplicates(
manifest.personas.map((persona) => persona.id),
"Manifest personas",
@@ -545,12 +242,12 @@ export function parseAgentManifest(input: unknown): AgentManifest {
for (const relation of manifest.relationships) {
if (!personaIds.has(relation.parentPersonaId)) {
throw new Error(
`Relationship references unknown parent persona \"${relation.parentPersonaId}\".`,
`Relationship references unknown parent persona "${relation.parentPersonaId}".`,
);
}
if (!personaIds.has(relation.childPersonaId)) {
throw new Error(
`Relationship references unknown child persona \"${relation.childPersonaId}\".`,
`Relationship references unknown child persona "${relation.childPersonaId}".`,
);
}
}
@@ -559,7 +256,7 @@ export function parseAgentManifest(input: unknown): AgentManifest {
for (const node of manifest.pipeline.nodes) {
if (!personaIds.has(node.personaId)) {
throw new Error(`Pipeline node \"${node.id}\" references unknown persona \"${node.personaId}\".`);
throw new Error(`Pipeline node "${node.id}" references unknown persona "${node.personaId}".`);
}
if (node.topology && !manifest.topologies.includes(node.topology.kind as TopologyKind)) {

View File

@@ -26,6 +26,7 @@ import type { JsonObject } from "./types.js";
import { SessionWorktreeManager, type SessionMetadata } from "./session-lifecycle.js";
import {
SecureCommandExecutor,
type SecurityViolationHandling,
type SecurityAuditEvent,
type SecurityAuditSink,
SecurityRulesEngine,
@@ -46,7 +47,7 @@ export type OrchestrationSettings = {
maxRetries: number;
maxChildren: number;
mergeConflictMaxAttempts: number;
securityViolationHandling: "hard_abort" | "validation_fail";
securityViolationHandling: SecurityViolationHandling;
runtimeContext: Record<string, string | number | boolean>;
};
@@ -211,6 +212,9 @@ function createActorSecurityContext(input: {
blockedEnvAssignments: ["AGENT_STATE_ROOT", "AGENT_PROJECT_CONTEXT_PATH"],
},
auditSink,
{
violationHandling: input.settings.securityViolationHandling,
},
);
return {
@@ -342,6 +346,7 @@ export class SchemaDrivenExecutionEngine {
this.sessionWorktreeManager = new SessionWorktreeManager({
worktreeRoot: resolve(this.settings.workspaceRoot, this.config.provisioning.gitWorktree.rootDirectory),
baseRef: this.config.provisioning.gitWorktree.baseRef,
targetPath: this.config.provisioning.gitWorktree.targetPath,
});
this.actorExecutors = toExecutorMap(input.actorExecutors);
@@ -426,7 +431,11 @@ export class SchemaDrivenExecutionEngine {
}): Promise<PipelineRunSummary> {
const managerSessionId = `${input.sessionId}__pipeline`;
const managerSession = this.manager.createSession(managerSessionId);
const workspaceRoot = input.sessionMetadata?.baseWorkspacePath ?? this.settings.workspaceRoot;
const workspaceRoot = input.sessionMetadata
? this.sessionWorktreeManager.resolveWorkingDirectoryForWorktree(
input.sessionMetadata.baseWorkspacePath,
)
: this.settings.workspaceRoot;
const projectContextStore = input.sessionMetadata
? new FileSystemProjectContextStore({
filePath: resolveSessionProjectContextPath(this.settings.stateRoot, input.sessionId),
@@ -531,6 +540,7 @@ export class SchemaDrivenExecutionEngine {
return {
taskId,
workingDirectory: ensured.taskWorkingDirectory,
worktreePath: ensured.taskWorktreePath,
statusAtStart,
...(existing?.metadata ? { metadata: existing.metadata } : {}),

View File

@@ -63,6 +63,7 @@ export type ActorExecutionResult = {
export type ActorToolPermissionResult =
| {
behavior: "allow";
updatedInput?: Record<string, unknown>;
toolUseID?: string;
}
| {
@@ -171,6 +172,7 @@ export type ActorExecutionSecurityContext = {
export type TaskExecutionResolution = {
taskId: string;
workingDirectory: string;
worktreePath: string;
statusAtStart: string;
metadata?: JsonObject;
@@ -941,7 +943,7 @@ export class PipelineExecutor {
node,
toolClearance,
prompt,
worktreePathOverride: taskResolution?.worktreePath,
worktreePathOverride: taskResolution?.workingDirectory,
});
const result = await this.invokeActorExecutor({
@@ -970,6 +972,7 @@ export class PipelineExecutor {
...(taskResolution
? {
taskId: taskResolution.taskId,
workingDirectory: taskResolution.workingDirectory,
worktreePath: taskResolution.worktreePath,
}
: {}),
@@ -1309,6 +1312,7 @@ export class PipelineExecutor {
const createToolPermissionHandler = (): ActorToolPermissionHandler =>
this.createToolPermissionHandler({
allowedTools: executionContext.allowedTools,
violationMode: executionContext.security.violationMode,
sessionId: input.sessionId,
nodeId: input.nodeId,
attempt: input.attempt,
@@ -1326,6 +1330,7 @@ export class PipelineExecutor {
private createToolPermissionHandler(input: {
allowedTools: readonly string[];
violationMode: SecurityViolationHandling;
sessionId: string;
nodeId: string;
attempt: number;
@@ -1340,7 +1345,7 @@ export class PipelineExecutor {
attempt: input.attempt,
};
return async (toolName, _input, options) => {
return async (toolName, toolInput, options) => {
const toolUseID = options.toolUseID;
if (options.signal.aborted) {
return {
@@ -1358,11 +1363,28 @@ export class PipelineExecutor {
caseInsensitiveLookup: caseInsensitiveAllowLookup,
});
if (!allowMatch) {
rulesEngine?.assertToolInvocationAllowed({
tool: candidates[0] ?? toolName,
toolClearance: toolPolicy,
context: toolAuditContext,
});
if (rulesEngine) {
try {
rulesEngine.assertToolInvocationAllowed({
tool: candidates[0] ?? toolName,
toolClearance: toolPolicy,
context: toolAuditContext,
});
} catch (error) {
if (
!(input.violationMode === "dangerous_warn_only" && error instanceof SecurityViolationError)
) {
throw error;
}
}
}
if (input.violationMode === "dangerous_warn_only") {
return {
behavior: "allow",
updatedInput: toolInput,
...(toolUseID ? { toolUseID } : {}),
};
}
return {
behavior: "deny",
message: `Tool "${toolName}" is not in the resolved execution allowlist.`,
@@ -1379,6 +1401,7 @@ export class PipelineExecutor {
return {
behavior: "allow",
updatedInput: toolInput,
...(toolUseID ? { toolUseID } : {}),
};
};

View File

@@ -9,15 +9,15 @@ import {
import { isDomainEventType, type DomainEventEmission } from "../agents/domain-events.js";
import type { ActorExecutionInput, ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
import { isRecord, type JsonObject, type JsonValue } from "../agents/types.js";
import { createSessionContext, type SessionContext } from "../examples/session-context.js";
import { ClaudeObservabilityLogger } from "./claude-observability.js";
import { ClaudeObservabilityLogger } from "../ui/claude-observability.js";
import { z } from "zod";
export type RunProvider = "codex" | "claude";
export type ProviderRunRuntime = {
provider: RunProvider;
config: Readonly<AppConfig>;
sessionContext: SessionContext;
sharedEnv: Record<string, string>;
claudeObservability: ClaudeObservabilityLogger;
close: () => Promise<void>;
};
@@ -30,6 +30,16 @@ type ProviderUsage = {
costUsd?: number;
};
function sanitizeEnv(input: Record<string, string | undefined>): Record<string, string> {
const output: Record<string, string> = {};
for (const [key, value] of Object.entries(input)) {
if (typeof value === "string") {
output[key] = value;
}
}
return output;
}
const ACTOR_RESPONSE_SCHEMA = {
type: "object",
additionalProperties: true,
@@ -74,8 +84,15 @@ const CLAUDE_OUTPUT_FORMAT = {
schema: ACTOR_RESPONSE_SCHEMA,
} as const;
const CLAUDE_PROVIDER_MAX_TURNS = 2;
const ActorResponseSchema = z.object({
status: z.enum(["success", "validation_fail", "failure"]),
payload: z.unknown().optional(),
stateFlags: z.unknown().optional(),
stateMetadata: z.unknown().optional(),
events: z.unknown().optional(),
failureKind: z.unknown().optional(),
failureCode: z.unknown().optional(),
});
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
@@ -83,6 +100,23 @@ function toErrorMessage(error: unknown): string {
return String(error);
}
export function resolveProviderWorkingDirectory(actorInput: ActorExecutionInput): string {
return actorInput.executionContext.security.worktreePath;
}
export function buildProviderRuntimeEnv(input: {
runtime: ProviderRunRuntime;
actorInput: ActorExecutionInput;
includeClaudeAuth?: boolean;
}): Record<string, string> {
const workingDirectory = resolveProviderWorkingDirectory(input.actorInput);
return sanitizeEnv({
...input.runtime.sharedEnv,
...(input.includeClaudeAuth ? buildClaudeAuthEnv(input.runtime.config.provider) : {}),
AGENT_WORKTREE_PATH: workingDirectory,
});
}
function toJsonValue(value: unknown): JsonValue {
return JSON.parse(JSON.stringify(value)) as JsonValue;
}
@@ -238,8 +272,8 @@ function ensureUsageMetadata(input: {
result: ActorExecutionResult;
providerUsage: ProviderUsage;
}): ActorExecutionResult {
const stateMetadata = toJsonObject(input.result.stateMetadata) ?? {};
const existingUsage = toJsonObject(stateMetadata.usage) ?? {};
const stateMetadata = (input.result.stateMetadata as JsonObject | undefined) ?? {};
const existingUsage = (stateMetadata.usage as JsonObject | undefined) ?? {};
const usage: JsonObject = {
...existingUsage,
@@ -274,7 +308,9 @@ export function parseActorExecutionResultFromModelOutput(input: {
structuredOutput?: unknown;
}): ActorExecutionResult {
const parsed = tryParseResponseObject(input.rawText, input.structuredOutput);
if (!isRecord(parsed)) {
const result = ActorResponseSchema.safeParse(parsed);
if (!result.success) {
return {
status: "success",
payload: {
@@ -283,31 +319,22 @@ export function parseActorExecutionResultFromModelOutput(input: {
};
}
const status = parsed.status;
if (status !== "success" && status !== "validation_fail" && status !== "failure") {
return {
status: "success",
payload: {
assistantResponse: input.rawText.trim(),
},
};
}
const payload = toJsonObject(parsed.payload) ?? {
const { data } = result;
const payload = toJsonObject(data.payload) ?? {
assistantResponse: input.rawText.trim(),
};
const stateMetadata = toJsonObject(parsed.stateMetadata);
const stateFlags = toBooleanRecord(parsed.stateFlags);
const events = toEventEmissions(parsed.events);
const failureKind = parsed.failureKind === "soft" || parsed.failureKind === "hard"
? parsed.failureKind
const stateMetadata = toJsonObject(data.stateMetadata);
const stateFlags = toBooleanRecord(data.stateFlags);
const events = toEventEmissions(data.events);
const failureKind = data.failureKind === "soft" || data.failureKind === "hard"
? data.failureKind
: undefined;
const failureCode = typeof parsed.failureCode === "string"
? parsed.failureCode
const failureCode = typeof data.failureCode === "string"
? data.failureCode
: undefined;
return {
status,
status: data.status,
payload,
...(stateFlags ? { stateFlags } : {}),
...(stateMetadata ? { stateMetadata } : {}),
@@ -367,6 +394,7 @@ async function runCodexActor(input: {
const prompt = buildActorPrompt(actorInput);
const startedAt = Date.now();
const apiKey = resolveOpenAiApiKey(runtime.config.provider);
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
const codex = new Codex({
...(apiKey ? { apiKey } : {}),
@@ -376,28 +404,29 @@ async function runCodexActor(input: {
...(actorInput.mcp.resolvedConfig.codexConfig
? { config: actorInput.mcp.resolvedConfig.codexConfig }
: {}),
env: runtime.sessionContext.runtimeInjection.env,
env: buildProviderRuntimeEnv({
runtime,
actorInput,
}),
});
const thread = codex.startThread({
workingDirectory: runtime.sessionContext.runtimeInjection.workingDirectory,
workingDirectory,
skipGitRepoCheck: runtime.config.provider.codexSkipGitCheck,
});
const turn = await runtime.sessionContext.runInSession(() =>
thread.run(prompt, {
signal: actorInput.signal,
outputSchema: ACTOR_RESPONSE_SCHEMA,
}),
);
const turn = await thread.run(prompt, {
signal: actorInput.signal,
outputSchema: ACTOR_RESPONSE_SCHEMA,
});
const usage: ProviderUsage = {
...(turn.usage
? {
tokenInput: turn.usage.input_tokens + turn.usage.cached_input_tokens,
tokenOutput: turn.usage.output_tokens,
tokenTotal: turn.usage.input_tokens + turn.usage.cached_input_tokens + turn.usage.output_tokens,
}
tokenInput: turn.usage.input_tokens + turn.usage.cached_input_tokens,
tokenOutput: turn.usage.output_tokens,
tokenTotal: turn.usage.input_tokens + turn.usage.cached_input_tokens + turn.usage.output_tokens,
}
: {}),
durationMs: Date.now() - startedAt,
};
@@ -457,22 +486,24 @@ function buildClaudeOptions(input: {
actorInput: ActorExecutionInput;
}): Options {
const { runtime, actorInput } = input;
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
const authOptionOverrides = runtime.config.provider.anthropicOauthToken
? { authToken: runtime.config.provider.anthropicOauthToken }
: (() => {
const token = resolveAnthropicToken(runtime.config.provider);
return token ? { apiKey: token } : {};
})();
const token = resolveAnthropicToken(runtime.config.provider);
return token ? { apiKey: token } : {};
})();
const runtimeEnv = {
...runtime.sessionContext.runtimeInjection.env,
...buildClaudeAuthEnv(runtime.config.provider),
};
const runtimeEnv = buildProviderRuntimeEnv({
runtime,
actorInput,
includeClaudeAuth: true,
});
const traceContext = toClaudeTraceContext(actorInput);
return {
maxTurns: CLAUDE_PROVIDER_MAX_TURNS,
maxTurns: runtime.config.provider.claudeMaxTurns,
...(runtime.config.provider.claudeModel
? { model: runtime.config.provider.claudeModel }
: {}),
@@ -484,7 +515,7 @@ function buildClaudeOptions(input: {
? { mcpServers: actorInput.mcp.resolvedConfig.claudeMcpServers as Options["mcpServers"] }
: {}),
canUseTool: actorInput.mcp.createClaudeCanUseTool(),
cwd: runtime.sessionContext.runtimeInjection.workingDirectory,
cwd: workingDirectory,
env: runtimeEnv,
...runtime.claudeObservability.toOptionOverrides({
context: traceContext,
@@ -507,8 +538,8 @@ async function runClaudeTurn(input: {
context: traceContext,
data: {
...(options.model ? { model: options.model } : {}),
maxTurns: options.maxTurns ?? CLAUDE_PROVIDER_MAX_TURNS,
cwd: input.runtime.sessionContext.runtimeInjection.workingDirectory,
maxTurns: options.maxTurns ?? input.runtime.config.provider.claudeMaxTurns,
...(typeof options.cwd === "string" ? { cwd: options.cwd } : {}),
},
});
@@ -605,13 +636,11 @@ async function runClaudeActor(input: {
actorInput: ActorExecutionInput;
}): Promise<ActorExecutionResult> {
const prompt = buildActorPrompt(input.actorInput);
const turn = await input.runtime.sessionContext.runInSession(() =>
runClaudeTurn({
runtime: input.runtime,
actorInput: input.actorInput,
prompt,
}),
);
const turn = await runClaudeTurn({
runtime: input.runtime,
actorInput: input.actorInput,
prompt,
});
const parsed = parseActorExecutionResultFromModelOutput({
rawText: turn.text,
@@ -626,33 +655,21 @@ async function runClaudeActor(input: {
export async function createProviderRunRuntime(input: {
provider: RunProvider;
initialPrompt: string;
config: Readonly<AppConfig>;
projectPath: string;
observabilityRootPath?: string;
baseEnv?: Record<string, string | undefined>;
}): Promise<ProviderRunRuntime> {
const sessionContext = await createSessionContext(input.provider, {
prompt: input.initialPrompt,
config: input.config,
workspaceRoot: input.projectPath,
});
const claudeObservability = new ClaudeObservabilityLogger({
workspaceRoot: input.observabilityRootPath ?? input.projectPath,
workspaceRoot: input.observabilityRootPath ?? process.cwd(),
config: input.config.provider.claudeObservability,
});
return {
provider: input.provider,
config: input.config,
sessionContext,
sharedEnv: sanitizeEnv(input.baseEnv ?? process.env),
claudeObservability,
close: async () => {
try {
await sessionContext.close();
} finally {
await claudeObservability.close();
}
},
close: async () => claudeObservability.close(),
};
}

View File

@@ -95,6 +95,11 @@ function assertAbsolutePath(path: string, label: string): string {
return resolve(path);
}
function normalizeWorktreePath(path: string): string {
const normalized = resolve(path);
return normalized.startsWith("/private/var/") ? normalized.slice("/private".length) : normalized;
}
function assertNonEmptyString(value: unknown, label: string): string {
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`${label} must be a non-empty string.`);
@@ -358,13 +363,16 @@ export class FileSystemSessionMetadataStore {
export class SessionWorktreeManager {
private readonly worktreeRoot: string;
private readonly baseRef: string;
private readonly targetPath?: string;
constructor(input: {
worktreeRoot: string;
baseRef: string;
targetPath?: string;
}) {
this.worktreeRoot = assertAbsolutePath(input.worktreeRoot, "worktreeRoot");
this.baseRef = assertNonEmptyString(input.baseRef, "baseRef");
this.targetPath = normalizeWorktreeTargetPath(input.targetPath, "targetPath");
}
resolveBaseWorkspacePath(sessionId: string): string {
@@ -378,6 +386,11 @@ export class SessionWorktreeManager {
return resolve(this.worktreeRoot, scopedSession, "tasks", scopedTask);
}
resolveWorkingDirectoryForWorktree(worktreePath: string): string {
const normalizedWorktreePath = assertAbsolutePath(worktreePath, "worktreePath");
return this.targetPath ? resolve(normalizedWorktreePath, this.targetPath) : normalizedWorktreePath;
}
private resolveBaseBranchName(sessionId: string): string {
const scoped = sanitizeSegment(sessionId, "session");
return `ai-ops/${scoped}/base`;
@@ -399,14 +412,13 @@ export class SessionWorktreeManager {
await mkdir(dirname(baseWorkspacePath), { recursive: true });
const alreadyExists = await pathExists(baseWorkspacePath);
if (alreadyExists) {
return;
if (!(await pathExists(baseWorkspacePath))) {
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
const branchName = this.resolveBaseBranchName(input.sessionId);
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
}
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
const branchName = this.resolveBaseBranchName(input.sessionId);
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
await this.ensureWorktreeTargetPath(baseWorkspacePath);
}
async ensureTaskWorktree(input: {
@@ -416,6 +428,7 @@ export class SessionWorktreeManager {
existingWorktreePath?: string;
}): Promise<{
taskWorktreePath: string;
taskWorkingDirectory: string;
}> {
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
const maybeExisting = input.existingWorktreePath?.trim();
@@ -425,7 +438,10 @@ export class SessionWorktreeManager {
const branchName = this.resolveTaskBranchName(input.sessionId, input.taskId);
const attachedWorktree = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
if (attachedWorktree && attachedWorktree !== worktreePath) {
const normalizedWorktreePath = normalizeWorktreePath(worktreePath);
const normalizedAttachedWorktree = attachedWorktree ? normalizeWorktreePath(attachedWorktree) : undefined;
if (normalizedAttachedWorktree && normalizedAttachedWorktree !== normalizedWorktreePath) {
throw new Error(
`Task branch "${branchName}" is already attached to worktree "${attachedWorktree}", ` +
`expected "${worktreePath}".`,
@@ -450,9 +466,15 @@ export class SessionWorktreeManager {
]);
if (addResult.exitCode !== 0) {
const attachedAfterFailure = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
if (attachedAfterFailure === worktreePath && (await pathExists(worktreePath))) {
if (
attachedAfterFailure &&
normalizeWorktreePath(attachedAfterFailure) === normalizedWorktreePath &&
(await pathExists(worktreePath))
) {
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
return {
taskWorktreePath: worktreePath,
taskWorkingDirectory,
};
}
throw new Error(
@@ -462,8 +484,10 @@ export class SessionWorktreeManager {
}
}
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
return {
taskWorktreePath: worktreePath,
taskWorkingDirectory,
};
}
@@ -780,4 +804,69 @@ export class SessionWorktreeManager {
}
return parseGitWorktreeRecords(result.stdout);
}
private async ensureWorktreeTargetPath(worktreePath: string): Promise<string> {
if (this.targetPath) {
await runGit(["-C", worktreePath, "sparse-checkout", "init", "--cone"]);
await runGit(["-C", worktreePath, "sparse-checkout", "set", this.targetPath]);
}
const workingDirectory = this.resolveWorkingDirectoryForWorktree(worktreePath);
let workingDirectoryStats;
try {
workingDirectoryStats = await stat(workingDirectory);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
if (this.targetPath) {
throw new Error(
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
);
}
throw new Error(`Worktree path "${workingDirectory}" does not exist.`);
}
throw error;
}
if (!workingDirectoryStats.isDirectory()) {
if (this.targetPath) {
throw new Error(
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
);
}
throw new Error(`Worktree path "${workingDirectory}" is not a directory.`);
}
return workingDirectory;
}
}
function normalizeWorktreeTargetPath(value: string | undefined, key: string): string | undefined {
if (value === undefined) {
return undefined;
}
const trimmed = value.trim();
if (trimmed.length === 0) {
return undefined;
}
const slashNormalized = trimmed.replace(/\\/g, "/");
if (isAbsolute(slashNormalized) || /^[a-zA-Z]:\//.test(slashNormalized)) {
throw new Error(`${key} must be a relative path within the repository worktree.`);
}
const normalizedSegments = slashNormalized
.split("/")
.map((segment) => segment.trim())
.filter((segment) => segment.length > 0 && segment !== ".");
if (normalizedSegments.some((segment) => segment === "..")) {
throw new Error(`${key} must not contain ".." path segments.`);
}
if (normalizedSegments.length === 0) {
return undefined;
}
return normalizedSegments.join("/");
}

View File

@@ -16,6 +16,7 @@ export type ProviderRuntimeConfig = {
anthropicApiKey?: string;
claudeModel?: string;
claudeCodePath?: string;
claudeMaxTurns: number;
claudeObservability: ClaudeObservabilityRuntimeConfig;
};
@@ -136,6 +137,8 @@ const DEFAULT_CLAUDE_OBSERVABILITY: ClaudeObservabilityRuntimeConfig = {
debugLogPath: undefined,
};
const DEFAULT_CLAUDE_MAX_TURNS = 2;
function readOptionalString(
env: NodeJS.ProcessEnv,
key: string,
@@ -401,6 +404,12 @@ export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppCo
anthropicApiKey,
claudeModel: normalizeClaudeModel(readOptionalString(env, "CLAUDE_MODEL")),
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
claudeMaxTurns: readIntegerWithBounds(
env,
"CLAUDE_MAX_TURNS",
DEFAULT_CLAUDE_MAX_TURNS,
{ min: 1 },
),
claudeObservability: {
mode: parseClaudeObservabilityMode(
readStringWithFallback(

View File

@@ -19,7 +19,7 @@ function requiredPrompt(argv: string[]): string {
function buildOptions(config = getConfig()): Options {
return {
maxTurns: 1,
maxTurns: config.provider.claudeMaxTurns,
...(config.provider.claudeModel ? { model: config.provider.claudeModel } : {}),
...(config.provider.claudeCodePath
? { pathToClaudeCodeExecutable: config.provider.claudeCodePath }

View File

@@ -1,6 +1,7 @@
import { randomUUID } from "node:crypto";
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { resolve } from "node:path";
import { JSONFilePreset } from "lowdb/node";
import { SchemaDrivenExecutionEngine } from "../agents/orchestration.js";
import { parseAgentManifest, type AgentManifest } from "../agents/manifest.js";
import { FileSystemProjectContextStore } from "../agents/project-context.js";
@@ -15,12 +16,12 @@ import {
type SessionMetadata,
} from "../agents/session-lifecycle.js";
import { loadConfig, type AppConfig } from "../config.js";
import { parseEnvFile } from "./env-store.js";
import { parseEnvFile } from "../store/env-store.js";
import {
createProviderActorExecutor,
createProviderRunRuntime,
type RunProvider,
} from "./provider-executor.js";
} from "../agents/provider-executor.js";
const RUN_META_FILE_NAME = "ui-run-meta.json";
@@ -267,7 +268,9 @@ async function writeRunMeta(input: {
const sessionDirectory = resolve(input.stateRoot, input.sessionId);
await mkdir(sessionDirectory, { recursive: true });
const path = resolve(sessionDirectory, RUN_META_FILE_NAME);
await writeFile(path, `${JSON.stringify(input.run, null, 2)}\n`, "utf8");
const db = await JSONFilePreset<RunRecord>(path, input.run);
db.data = input.run;
await db.write();
}
export async function readRunMetaBySession(input: {
@@ -298,9 +301,9 @@ export async function readRunMetaBySession(input: {
sessionId: record.sessionId,
status:
record.status === "running" ||
record.status === "success" ||
record.status === "failure" ||
record.status === "cancelled"
record.status === "success" ||
record.status === "failure" ||
record.status === "cancelled"
? record.status
: "failure",
startedAt: record.startedAt,
@@ -359,6 +362,7 @@ export class UiRunService {
worktreeManager: new SessionWorktreeManager({
worktreeRoot: paths.worktreeRoot,
baseRef: config.provisioning.gitWorktree.baseRef,
targetPath: config.provisioning.gitWorktree.targetPath,
}),
};
}
@@ -485,10 +489,9 @@ export class UiRunService {
if (executionMode === "provider") {
providerRuntime = await createProviderRunRuntime({
provider,
initialPrompt: input.prompt,
config,
projectPath: session?.baseWorkspacePath ?? this.workspaceRoot,
observabilityRootPath: this.workspaceRoot,
baseEnv: process.env,
});
}
@@ -496,10 +499,10 @@ export class UiRunService {
executionMode === "provider" && providerRuntime
? createSingleExecutorMap(manifest, createProviderActorExecutor(providerRuntime))
: createMockActorExecutors(manifest, {
prompt: input.prompt,
topologyHint: input.topologyHint,
simulateValidationNodeIds: new Set(input.simulateValidationNodeIds ?? []),
});
prompt: input.prompt,
topologyHint: input.topologyHint,
simulateValidationNodeIds: new Set(input.simulateValidationNodeIds ?? []),
});
const engine = new SchemaDrivenExecutionEngine({
manifest,

View File

@@ -8,6 +8,7 @@ import {
import {
parseShellValidationPolicy,
parseToolClearancePolicy,
type SecurityViolationHandling,
type ShellValidationPolicy,
type ToolClearancePolicy,
} from "./schemas.js";
@@ -62,6 +63,10 @@ function normalizeToken(value: string): string {
return value.trim();
}
function normalizeLookupToken(value: string): string {
return normalizeToken(value).toLowerCase();
}
function hasPathTraversalSegment(token: string): boolean {
const normalized = token.replaceAll("\\", "/");
if (normalized === ".." || normalized.startsWith("../") || normalized.endsWith("/..")) {
@@ -100,6 +105,18 @@ function toToolSet(values: readonly string[]): Set<string> {
return out;
}
function toCaseInsensitiveLookup(values: readonly string[]): Map<string, string> {
const out = new Map<string, string>();
for (const value of values) {
const normalized = normalizeLookupToken(value);
if (!normalized || out.has(normalized)) {
continue;
}
out.set(normalized, value);
}
return out;
}
function toNow(): string {
return new Date().toISOString();
}
@@ -133,10 +150,14 @@ export class SecurityRulesEngine {
private readonly blockedEnvAssignments: Set<string>;
private readonly worktreeRoot: string;
private readonly protectedPaths: string[];
private readonly violationHandling: SecurityViolationHandling;
constructor(
policy: ShellValidationPolicy,
private readonly auditSink?: SecurityAuditSink,
options?: {
violationHandling?: SecurityViolationHandling;
},
) {
this.policy = parseShellValidationPolicy(policy);
this.allowedBinaries = toToolSet(this.policy.allowedBinaries);
@@ -144,6 +165,7 @@ export class SecurityRulesEngine {
this.blockedEnvAssignments = toToolSet(this.policy.blockedEnvAssignments);
this.worktreeRoot = resolve(this.policy.worktreeRoot);
this.protectedPaths = this.policy.protectedPaths.map((path) => resolve(path));
this.violationHandling = options?.violationHandling ?? "hard_abort";
}
getPolicy(): ShellValidationPolicy {
@@ -212,6 +234,15 @@ export class SecurityRulesEngine {
code: error.code,
details: error.details,
});
if (this.violationHandling === "dangerous_warn_only") {
return {
cwd: resolvedCwd,
parsed: {
commandCount: 0,
commands: [],
},
};
}
throw error;
}
@@ -232,8 +263,11 @@ export class SecurityRulesEngine {
};
}): void {
const policy = parseToolClearancePolicy(input.toolClearance);
const normalizedTool = normalizeLookupToken(input.tool);
const banlistLookup = toCaseInsensitiveLookup(policy.banlist);
const allowlistLookup = toCaseInsensitiveLookup(policy.allowlist);
if (policy.banlist.includes(input.tool)) {
if (banlistLookup.has(normalizedTool)) {
this.emit({
...toAuditContext(input.context),
type: "tool.invocation_blocked",
@@ -252,7 +286,7 @@ export class SecurityRulesEngine {
);
}
if (policy.allowlist.length > 0 && !policy.allowlist.includes(input.tool)) {
if (policy.allowlist.length > 0 && !allowlistLookup.has(normalizedTool)) {
this.emit({
...toAuditContext(input.context),
type: "tool.invocation_blocked",
@@ -280,13 +314,15 @@ export class SecurityRulesEngine {
filterAllowedTools(tools: string[], toolClearance: ToolClearancePolicy): string[] {
const policy = parseToolClearancePolicy(toolClearance);
const allowlistLookup = toCaseInsensitiveLookup(policy.allowlist);
const banlistLookup = toCaseInsensitiveLookup(policy.banlist);
const allowedByAllowlist =
policy.allowlist.length === 0
? tools
: tools.filter((tool) => policy.allowlist.includes(tool));
: tools.filter((tool) => allowlistLookup.has(normalizeLookupToken(tool)));
return allowedByAllowlist.filter((tool) => !policy.banlist.includes(tool));
return allowedByAllowlist.filter((tool) => !banlistLookup.has(normalizeLookupToken(tool)));
}
private assertCwdBoundary(cwd: string): void {

View File

@@ -157,11 +157,15 @@ export function parseParsedShellScript(input: unknown): ParsedShellScript {
};
}
export type SecurityViolationHandling = "hard_abort" | "validation_fail";
export type SecurityViolationHandling =
| "hard_abort"
| "validation_fail"
| "dangerous_warn_only";
export const securityViolationHandlingSchema = z.union([
z.literal("hard_abort"),
z.literal("validation_fail"),
z.literal("dangerous_warn_only"),
]);
export function parseSecurityViolationHandling(input: unknown): SecurityViolationHandling {

View File

@@ -1,5 +1,6 @@
import { resolve } from "node:path";
import { loadConfig, type AppConfig } from "../config.js";
import type { SecurityViolationHandling } from "../security/index.js";
import { parseEnvFile, writeEnvFileUpdates } from "./env-store.js";
export type RuntimeNotificationSettings = {
@@ -9,7 +10,7 @@ export type RuntimeNotificationSettings = {
};
export type SecurityPolicySettings = {
violationMode: "hard_abort" | "validation_fail";
violationMode: SecurityViolationHandling;
allowedBinaries: string[];
commandTimeoutMs: number;
inheritedEnv: string[];

View File

@@ -1,90 +0,0 @@
import { createReadStream } from "node:fs";
import { stat } from "node:fs/promises";
import { extname, resolve } from "node:path";
import type { IncomingMessage, ServerResponse } from "node:http";
const CONTENT_TYPES: Record<string, string> = {
".html": "text/html; charset=utf-8",
".js": "text/javascript; charset=utf-8",
".css": "text/css; charset=utf-8",
".json": "application/json; charset=utf-8",
".svg": "image/svg+xml",
};
export function sendJson(response: ServerResponse, statusCode: number, body: unknown): void {
const payload = JSON.stringify(body);
response.statusCode = statusCode;
response.setHeader("Content-Type", "application/json; charset=utf-8");
response.end(payload);
}
export function sendText(response: ServerResponse, statusCode: number, body: string): void {
response.statusCode = statusCode;
response.setHeader("Content-Type", "text/plain; charset=utf-8");
response.end(body);
}
export async function parseJsonBody<T>(request: IncomingMessage): Promise<T> {
const chunks: Buffer[] = [];
await new Promise<void>((resolveBody, rejectBody) => {
request.on("data", (chunk: Buffer) => {
chunks.push(chunk);
});
request.on("end", () => resolveBody());
request.on("error", rejectBody);
});
const body = Buffer.concat(chunks).toString("utf8").trim();
if (!body) {
throw new Error("Request body is required.");
}
return JSON.parse(body) as T;
}
export function methodNotAllowed(response: ServerResponse): void {
sendJson(response, 405, {
ok: false,
error: "Method not allowed.",
});
}
export function notFound(response: ServerResponse): void {
sendJson(response, 404, {
ok: false,
error: "Not found.",
});
}
export async function serveStaticFile(input: {
response: ServerResponse;
filePath: string;
}): Promise<boolean> {
try {
const absolutePath = resolve(input.filePath);
const fileStats = await stat(absolutePath);
if (!fileStats.isFile()) {
return false;
}
const extension = extname(absolutePath).toLowerCase();
const contentType = CONTENT_TYPES[extension] ?? "application/octet-stream";
input.response.statusCode = 200;
input.response.setHeader("Content-Type", contentType);
await new Promise<void>((resolveStream, rejectStream) => {
const stream = createReadStream(absolutePath);
stream.on("error", rejectStream);
stream.on("end", () => resolveStream());
stream.pipe(input.response);
});
return true;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return false;
}
throw error;
}
}

View File

@@ -202,6 +202,7 @@
<select id="cfg-security-mode">
<option value="hard_abort">hard_abort</option>
<option value="validation_fail">validation_fail</option>
<option value="dangerous_warn_only">dangerous_warn_only</option>
</select>
</label>
<label>

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,7 @@ test("loads defaults and freezes config", () => {
"session.failed",
]);
assert.equal(config.provider.openAiAuthMode, "auto");
assert.equal(config.provider.claudeMaxTurns, 2);
assert.equal(config.provider.claudeObservability.mode, "off");
assert.equal(config.provider.claudeObservability.verbosity, "summary");
assert.equal(config.provider.claudeObservability.logPath, ".ai_ops/events/claude-trace.ndjson");
@@ -55,6 +56,11 @@ test("validates security violation mode", () => {
);
});
test("loads dangerous_warn_only security violation mode", () => {
const config = loadConfig({ AGENT_SECURITY_VIOLATION_MODE: "dangerous_warn_only" });
assert.equal(config.security.violationHandling, "dangerous_warn_only");
});
test("validates runtime discord severity mode", () => {
assert.throws(
() => loadConfig({ AGENT_RUNTIME_DISCORD_MIN_SEVERITY: "verbose" }),
@@ -69,6 +75,13 @@ test("validates claude observability mode", () => {
);
});
test("validates CLAUDE_MAX_TURNS bounds", () => {
assert.throws(
() => loadConfig({ CLAUDE_MAX_TURNS: "0" }),
/CLAUDE_MAX_TURNS must be an integer >= 1/,
);
});
test("validates claude observability verbosity", () => {
assert.throws(
() => loadConfig({ CLAUDE_OBSERVABILITY_VERBOSITY: "verbose" }),

View File

@@ -3,7 +3,7 @@ import assert from "node:assert/strict";
import { mkdtemp, readFile, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { parseEnvFile, writeEnvFileUpdates } from "../src/ui/env-store.js";
import { parseEnvFile, writeEnvFileUpdates } from "../src/store/env-store.js";
test("parseEnvFile handles missing files", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-env-store-"));

View File

@@ -148,7 +148,7 @@ test("rejects legacy edge trigger aliases", () => {
assert.throws(
() => parseAgentManifest(manifest),
/unsupported event "onValidationFail"/,
/Invalid option/,
);
});

View File

@@ -380,6 +380,7 @@ test("injects resolved mcp/helpers and enforces Claude tool gate in actor execut
);
assert.deepEqual(allow, {
behavior: "allow",
updatedInput: {},
toolUseID: "allow-1",
});
@@ -997,6 +998,7 @@ test("createClaudeCanUseTool accepts tool casing differences from providers", as
});
assert.deepEqual(allow, {
behavior: "allow",
updatedInput: {},
toolUseID: "allow-bash",
});
@@ -1020,6 +1022,88 @@ test("createClaudeCanUseTool accepts tool casing differences from providers", as
assert.equal(result.status, "success");
});
test("dangerous_warn_only allows tool use outside persona allowlist", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "reader",
displayName: "Reader",
systemPromptTemplate: "Reader",
toolClearance: {
allowlist: ["read_file"],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 2,
maxRetries: 0,
},
pipeline: {
entryNodeId: "warn-node",
nodes: [
{
id: "warn-node",
actorId: "warn_actor",
personaId: "reader",
},
],
edges: [],
},
} as const;
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
maxChildren: 1,
maxDepth: 2,
maxRetries: 0,
securityViolationHandling: "dangerous_warn_only",
runtimeContext: {},
},
actorExecutors: {
warn_actor: async (input) => {
const canUseTool = input.mcp.createClaudeCanUseTool();
const allow = await canUseTool("Bash", {}, {
signal: new AbortController().signal,
toolUseID: "allow-bash-warn",
});
assert.deepEqual(allow, {
behavior: "allow",
updatedInput: {},
toolUseID: "allow-bash-warn",
});
return {
status: "success",
payload: {
ok: true,
},
};
},
},
});
const result = await engine.runSession({
sessionId: "session-dangerous-warn-only",
initialPayload: {
task: "verify warn-only bypass",
},
});
assert.equal(result.status, "success");
});
test("hard-aborts pipeline on security violations by default", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));

View File

@@ -160,6 +160,7 @@ test("runClaudePrompt wires auth env, stream parsing, and output", async () => {
ANTHROPIC_API_KEY: "legacy-api-key",
CLAUDE_MODEL: "claude-sonnet-4-6",
CLAUDE_CODE_PATH: "/usr/local/bin/claude",
CLAUDE_MAX_TURNS: "5",
});
let closed = false;
@@ -229,6 +230,7 @@ test("runClaudePrompt wires auth env, stream parsing, and output", async () => {
assert.equal(queryInput?.prompt, "augmented prompt");
assert.equal(queryInput?.options?.model, "claude-sonnet-4-6");
assert.equal(queryInput?.options?.pathToClaudeCodeExecutable, "/usr/local/bin/claude");
assert.equal(queryInput?.options?.maxTurns, 5);
assert.equal(queryInput?.options?.cwd, "/tmp/claude-worktree");
assert.equal(queryInput?.options?.authToken, "oauth-token");
assert.deepEqual(queryInput?.options?.mcpServers, sessionContext.mcp.claudeMcpServers);

View File

@@ -1,6 +1,17 @@
import test from "node:test";
import assert from "node:assert/strict";
import { parseActorExecutionResultFromModelOutput } from "../src/ui/provider-executor.js";
import { mkdtemp } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { loadConfig } from "../src/config.js";
import type { ActorExecutionInput } from "../src/agents/pipeline.js";
import {
buildProviderRuntimeEnv,
createProviderRunRuntime,
parseActorExecutionResultFromModelOutput,
resolveProviderWorkingDirectory,
type ProviderRunRuntime,
} from "../src/agents/provider-executor.js";
test("parseActorExecutionResultFromModelOutput parses strict JSON payload", () => {
const parsed = parseActorExecutionResultFromModelOutput({
@@ -64,3 +75,110 @@ test("parseActorExecutionResultFromModelOutput falls back when response is not J
assert.equal(parsed.status, "success");
assert.equal(parsed.payload?.assistantResponse, "Implemented update successfully.");
});
test("parseActorExecutionResultFromModelOutput preserves status when optional fields are malformed", () => {
const parsed = parseActorExecutionResultFromModelOutput({
rawText: JSON.stringify({
status: "failure",
payload: {
reason: "hard failure",
},
stateFlags: {
retryable: false,
invalid_flag: "nope",
},
stateMetadata: "not-an-object",
events: [
{
type: "validation_failed",
payload: {
summary: "failed",
},
},
{
type: 123,
},
],
failureKind: "not-valid",
failureCode: 403,
}),
});
assert.equal(parsed.status, "failure");
assert.equal(parsed.payload?.reason, "hard failure");
assert.equal(parsed.stateFlags?.retryable, false);
assert.equal(parsed.stateFlags && "invalid_flag" in parsed.stateFlags, false);
assert.equal(parsed.stateMetadata, undefined);
assert.equal(parsed.events?.length, 1);
assert.equal(parsed.events?.[0]?.type, "validation_failed");
assert.equal(parsed.failureKind, undefined);
assert.equal(parsed.failureCode, undefined);
});
test("resolveProviderWorkingDirectory reads cwd from actor execution context", () => {
const actorInput = {
executionContext: {
security: {
worktreePath: "/tmp/session/tasks/product-intake",
},
},
} as unknown as ActorExecutionInput;
assert.equal(
resolveProviderWorkingDirectory(actorInput),
"/tmp/session/tasks/product-intake",
);
});
test("buildProviderRuntimeEnv scopes AGENT_WORKTREE_PATH to actor worktree and filters undefined auth", () => {
const config = loadConfig({
CLAUDE_CODE_OAUTH_TOKEN: "oauth-token",
});
const runtime = {
provider: "claude",
config,
sharedEnv: {
PATH: "/usr/bin",
KEEP_ME: "1",
},
claudeObservability: {} as ProviderRunRuntime["claudeObservability"],
close: async () => {},
} satisfies ProviderRunRuntime;
const actorInput = {
executionContext: {
security: {
worktreePath: "/tmp/session/tasks/product-intake",
},
},
} as unknown as ActorExecutionInput;
const env = buildProviderRuntimeEnv({
runtime,
actorInput,
includeClaudeAuth: true,
});
assert.equal(env.AGENT_WORKTREE_PATH, "/tmp/session/tasks/product-intake");
assert.equal(env.CLAUDE_CODE_OAUTH_TOKEN, "oauth-token");
assert.equal("ANTHROPIC_API_KEY" in env, false);
assert.equal(env.KEEP_ME, "1");
});
test("createProviderRunRuntime does not require session context provisioning", async () => {
const observabilityRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-provider-runtime-"));
const runtime = await createProviderRunRuntime({
provider: "claude",
config: loadConfig({}),
observabilityRootPath: observabilityRoot,
baseEnv: {
PATH: "/usr/bin",
},
});
try {
assert.equal(runtime.provider, "claude");
assert.equal(runtime.sharedEnv.PATH, "/usr/bin");
} finally {
await runtime.close();
}
});

View File

@@ -4,8 +4,8 @@ import { execFile } from "node:child_process";
import { mkdtemp, mkdir, stat, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { UiRunService, readRunMetaBySession } from "../src/runs/run-service.js";
import { promisify } from "node:util";
import { UiRunService, readRunMetaBySession } from "../src/ui/run-service.js";
const execFileAsync = promisify(execFile);

View File

@@ -111,6 +111,42 @@ test("rules engine enforces binary allowlist, tool policy, and path boundaries",
);
});
test("rules engine dangerous_warn_only logs but does not block violating shell commands", async () => {
const worktreeRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-warn-worktree-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-warn-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const rules = new SecurityRulesEngine(
{
allowedBinaries: ["git"],
worktreeRoot,
protectedPaths: [stateRoot, projectContextPath],
requireCwdWithinWorktree: true,
rejectRelativePathTraversal: true,
enforcePathBoundaryOnArguments: true,
allowedEnvAssignments: [],
blockedEnvAssignments: [],
},
undefined,
{
violationHandling: "dangerous_warn_only",
},
);
const validated = await rules.validateShellCommand({
command: "unauthorized_bin --version",
cwd: worktreeRoot,
toolClearance: {
allowlist: ["git"],
banlist: [],
},
});
assert.equal(validated.cwd, worktreeRoot);
assert.equal(validated.parsed.commandCount, 0);
assert.deepEqual(validated.parsed.commands, []);
});
test("secure executor runs with explicit env policy", async () => {
const worktreeRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-security-exec-"));
@@ -193,3 +229,47 @@ test("rules engine carries session context in tool audit events", () => {
assert.equal(allowedEvent.nodeId, "node-ctx");
assert.equal(allowedEvent.attempt, 2);
});
test("rules engine applies tool clearance matching case-insensitively", () => {
const rules = new SecurityRulesEngine({
allowedBinaries: ["git"],
worktreeRoot: "/tmp",
protectedPaths: [],
requireCwdWithinWorktree: true,
rejectRelativePathTraversal: true,
enforcePathBoundaryOnArguments: true,
allowedEnvAssignments: [],
blockedEnvAssignments: [],
});
assert.doesNotThrow(() =>
rules.assertToolInvocationAllowed({
tool: "Bash",
toolClearance: {
allowlist: ["bash", "glob"],
banlist: [],
},
}),
);
assert.throws(
() =>
rules.assertToolInvocationAllowed({
tool: "Glob",
toolClearance: {
allowlist: ["bash", "glob"],
banlist: ["GLOB"],
},
}),
(error: unknown) =>
error instanceof SecurityViolationError && error.code === "TOOL_BANNED",
);
assert.deepEqual(
rules.filterAllowedTools(["Bash", "Glob", "Read"], {
allowlist: ["bash", "glob"],
banlist: ["gLoB"],
}),
["Bash"],
);
});

View File

@@ -4,7 +4,7 @@ import { mkdir, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { mkdtemp } from "node:fs/promises";
import { buildSessionGraphInsight, buildSessionSummaries } from "../src/ui/session-insights.js";
import { buildSessionGraphInsight, buildSessionSummaries } from "../src/telemetry/session-insights.js";
import { parseAgentManifest } from "../src/agents/manifest.js";
function createManifest() {
@@ -155,13 +155,13 @@ test("buildSessionGraphInsight maps attempts, edge visits, and sandbox payload",
assert.equal(graph.status, "success");
assert.equal(graph.nodes.length, 2);
const node2 = graph.nodes.find((node) => node.nodeId === "n2");
const node2 = graph.nodes.find((node: any) => node.nodeId === "n2");
assert.ok(node2);
assert.equal(node2.attemptCount, 2);
assert.equal(node2.subtaskCount, 1);
assert.equal(node2.sandboxPayload?.phase, "n2");
const edge = graph.edges.find((entry) => entry.from === "n1" && entry.to === "n2");
const edge = graph.edges.find((entry: any) => entry.from === "n1" && entry.to === "n2");
assert.ok(edge);
assert.equal(edge.visited, true);
assert.equal(edge.trigger, "event:validation_failed");

View File

@@ -228,3 +228,60 @@ test("session worktree manager recreates a task worktree after stale metadata pr
const stats = await stat(recreatedTaskWorktreePath);
assert.equal(stats.isDirectory(), true);
});
test("session worktree manager applies target path sparse checkout and task working directory", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-target-"));
const projectPath = resolve(root, "project");
const worktreeRoot = resolve(root, "worktrees");
await mkdir(resolve(projectPath, "app", "src"), { recursive: true });
await mkdir(resolve(projectPath, "infra"), { recursive: true });
await git(["init", projectPath]);
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
await writeFile(resolve(projectPath, "app", "src", "index.ts"), "export const app = true;\n", "utf8");
await writeFile(resolve(projectPath, "infra", "notes.txt"), "infra\n", "utf8");
await git(["-C", projectPath, "add", "."]);
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
const manager = new SessionWorktreeManager({
worktreeRoot,
baseRef: "HEAD",
targetPath: "app",
});
const sessionId = "session-target-1";
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
await manager.initializeSessionBaseWorkspace({
sessionId,
projectPath,
baseWorkspacePath,
});
const baseWorkingDirectory = manager.resolveWorkingDirectoryForWorktree(baseWorkspacePath);
assert.equal(baseWorkingDirectory, resolve(baseWorkspacePath, "app"));
const baseWorkingStats = await stat(baseWorkingDirectory);
assert.equal(baseWorkingStats.isDirectory(), true);
await assert.rejects(() => stat(resolve(baseWorkspacePath, "infra")), {
code: "ENOENT",
});
const ensured = await manager.ensureTaskWorktree({
sessionId,
taskId: "task-target-1",
baseWorkspacePath,
});
assert.equal(ensured.taskWorkingDirectory, resolve(ensured.taskWorktreePath, "app"));
await writeFile(resolve(ensured.taskWorkingDirectory, "src", "feature.ts"), "export const feature = true;\n", "utf8");
const mergeOutcome = await manager.mergeTaskIntoBase({
taskId: "task-target-1",
baseWorkspacePath,
taskWorktreePath: ensured.taskWorktreePath,
});
assert.equal(mergeOutcome.kind, "success");
const merged = await readFile(resolve(baseWorkingDirectory, "src", "feature.ts"), "utf8");
assert.equal(merged, "export const feature = true;\n");
});

24
ui/.gitignore vendored Normal file
View File

@@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

73
ui/README.md Normal file
View File

@@ -0,0 +1,73 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## React Compiler
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

23
ui/eslint.config.js Normal file
View File

@@ -0,0 +1,23 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs.flat.recommended,
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

13
ui/index.html Normal file
View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>ui</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

3354
ui/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
ui/package.json Normal file
View File

@@ -0,0 +1,32 @@
{
"name": "ui",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"lucide-react": "^0.575.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-router-dom": "^7.13.1"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.1",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.48.0",
"vite": "^7.3.1"
}
}

1
ui/public/vite.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

42
ui/src/App.css Normal file
View File

@@ -0,0 +1,42 @@
#root {
max-width: 1280px;
margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
transition: filter 300ms;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.react:hover {
filter: drop-shadow(0 0 2em #61dafbaa);
}
@keyframes logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@media (prefers-reduced-motion: no-preference) {
a:nth-of-type(2) .logo {
animation: logo-spin infinite 20s linear;
}
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
}

22
ui/src/App.tsx Normal file
View File

@@ -0,0 +1,22 @@
import { BrowserRouter, Routes, Route } from 'react-router-dom';
import MainLayout from './layouts/MainLayout';
import Dashboard from './pages/Dashboard';
import Settings from './pages/Settings';
import History from './pages/History';
function App() {
return (
<BrowserRouter>
<Routes>
<Route path="/" element={<MainLayout />}>
<Route index element={<Dashboard />} />
<Route path="history" element={<History />} />
<Route path="settings" element={<Settings />} />
</Route>
</Routes>
</BrowserRouter>
);
}
export default App;

1
ui/src/assets/react.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

After

Width:  |  Height:  |  Size: 4.0 KiB

602
ui/src/index.css Normal file
View File

@@ -0,0 +1,602 @@
:root {
--bg-primary: #0a0a0c;
--bg-secondary: #121216;
--bg-tertiary: #1a1a20;
--bg-highlight: #23232b;
--text-primary: #ffffff;
--text-secondary: #94949e;
--text-tertiary: #6b6b76;
--accent-primary: #818cf8;
--accent-primary-hover: #6366f1;
--accent-secondary: #c084fc;
--success: #34d399;
--warning: #fbbf24;
--danger: #ef4444;
--info: #38bdf8;
--border-color: rgba(255, 255, 255, 0.08);
--border-focus: rgba(129, 140, 248, 0.5);
--shadow-sm: 0 1px 2px 0 rgba(0, 0, 0, 0.05);
--shadow-md: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
--shadow-lg: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05);
--shadow-glow: 0 0 15px rgba(129, 140, 248, 0.3);
--radius-sm: 4px;
--radius-md: 8px;
--radius-lg: 12px;
--radius-xl: 16px;
--radius-full: 9999px;
--transition-fast: 150ms cubic-bezier(0.4, 0, 0.2, 1);
--transition-normal: 250ms cubic-bezier(0.4, 0, 0.2, 1);
--transition-slow: 350ms cubic-bezier(0.4, 0, 0.2, 1);
}
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
font-family: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
background-color: var(--bg-primary);
color: var(--text-primary);
line-height: 1.5;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
overflow: hidden;
/* App takes full height */
}
/* Typography elements */
h1,
h2,
h3,
h4,
h5,
h6 {
font-weight: 600;
line-height: 1.2;
color: var(--text-primary);
margin-bottom: 0.5rem;
}
h1 {
font-size: 1.5rem;
letter-spacing: -0.025em;
}
h2 {
font-size: 1.25rem;
letter-spacing: -0.025em;
}
h3 {
font-size: 1.125rem;
}
p {
color: var(--text-secondary);
}
/* Form Elements */
input,
select,
textarea,
button {
font-family: inherit;
font-size: 0.875rem;
background: var(--bg-tertiary);
color: var(--text-primary);
border: 1px solid var(--border-color);
border-radius: var(--radius-md);
padding: 0.5rem 0.75rem;
transition: all var(--transition-fast);
outline: none;
}
input:focus,
select:focus,
textarea:focus {
border-color: var(--accent-primary);
box-shadow: 0 0 0 2px var(--border-focus);
}
input:disabled,
select:disabled,
textarea:disabled,
button:disabled {
opacity: 0.6;
cursor: not-allowed;
}
button {
display: inline-flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
font-weight: 500;
cursor: pointer;
background: var(--bg-highlight);
border: 1px solid var(--border-color);
}
button:hover:not(:disabled) {
background: var(--bg-tertiary);
border-color: rgba(255, 255, 255, 0.2);
}
button:active:not(:disabled) {
transform: translateY(1px);
}
button.primary {
background: var(--accent-primary);
color: white;
border-color: transparent;
box-shadow: 0 2px 4px rgba(129, 140, 248, 0.2);
}
button.primary:hover:not(:disabled) {
background: var(--accent-primary-hover);
box-shadow: 0 4px 12px rgba(129, 140, 248, 0.3);
}
button.danger {
color: var(--danger);
border-color: rgba(239, 68, 68, 0.2);
background: rgba(239, 68, 68, 0.05);
}
button.danger:hover:not(:disabled) {
background: rgba(239, 68, 68, 0.1);
border-color: rgba(239, 68, 68, 0.3);
}
/* App Shell Layout */
#root {
display: flex;
height: 100vh;
width: 100vw;
overflow: hidden;
}
.sidebar {
width: 260px;
background-color: var(--bg-secondary);
border-right: 1px solid var(--border-color);
display: flex;
flex-direction: column;
z-index: 10;
}
.sidebar-header {
padding: 1.5rem;
border-bottom: 1px solid var(--border-color);
}
.sidebar-header h1 {
font-size: 1.125rem;
font-weight: 700;
background: linear-gradient(135deg, var(--accent-primary), var(--accent-secondary));
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
margin: 0;
}
.sidebar-header p {
font-size: 0.75rem;
margin-top: 0.25rem;
opacity: 0.7;
}
.sidebar-nav {
flex: 1;
padding: 1rem 0;
overflow-y: auto;
}
.nav-item {
display: flex;
align-items: center;
gap: 0.75rem;
padding: 0.75rem 1.5rem;
color: var(--text-secondary);
text-decoration: none;
font-size: 0.875rem;
font-weight: 500;
transition: all var(--transition-fast);
border-left: 2px solid transparent;
}
.nav-item:hover {
background-color: var(--bg-highlight);
color: var(--text-primary);
}
.nav-item.active {
background-color: rgba(129, 140, 248, 0.05);
color: var(--accent-primary);
border-left-color: var(--accent-primary);
}
.nav-item svg {
width: 18px;
height: 18px;
opacity: 0.8;
}
.nav-item.active svg {
opacity: 1;
}
.sidebar-footer {
padding: 1.25rem;
border-top: 1px solid var(--border-color);
display: flex;
align-items: center;
gap: 0.75rem;
font-size: 0.75rem;
color: var(--text-secondary);
}
.status-dot {
width: 8px;
height: 8px;
border-radius: 50%;
background-color: var(--text-tertiary);
box-shadow: 0 0 0 0 rgba(0, 0, 0, 0);
transition: all var(--transition-normal);
}
.status-dot.online {
background-color: var(--success);
box-shadow: 0 0 8px var(--success);
animation: pulse 2s infinite;
}
.status-dot.offline {
background-color: var(--danger);
box-shadow: 0 0 8px var(--danger);
}
@keyframes pulse {
0% {
box-shadow: 0 0 0 0 rgba(52, 211, 153, 0.4);
}
70% {
box-shadow: 0 0 0 6px rgba(52, 211, 153, 0);
}
100% {
box-shadow: 0 0 0 0 rgba(52, 211, 153, 0);
}
}
.main-content {
flex: 1;
display: flex;
flex-direction: column;
background-color: var(--bg-primary);
overflow: hidden;
position: relative;
}
.top-bar {
height: 60px;
border-bottom: 1px solid var(--border-color);
background-color: rgba(10, 10, 12, 0.8);
backdrop-filter: blur(12px);
display: flex;
align-items: center;
padding: 0 2rem;
justify-content: space-between;
z-index: 5;
}
.page-title {
font-size: 1.125rem;
font-weight: 600;
margin: 0;
}
.content-scroll {
flex: 1;
overflow-y: auto;
padding: 2rem;
scroll-behavior: smooth;
}
/* Animations */
@keyframes fadeIn {
from {
opacity: 0;
transform: translateY(10px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.animate-fade-in {
animation: fadeIn 0.4s ease-out forwards;
}
/* Panel Layout */
.panel {
background: var(--bg-secondary);
border: 1px solid var(--border-color);
border-radius: var(--radius-lg);
padding: 1.5rem;
margin-bottom: 1.5rem;
box-shadow: var(--shadow-md);
}
.panel-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 1.25rem;
padding-bottom: 0.75rem;
border-bottom: 1px solid var(--border-color);
}
.panel-title {
font-size: 1rem;
font-weight: 600;
margin: 0;
display: flex;
align-items: center;
gap: 0.5rem;
}
/* Forms & Inputs specific to layout */
.form-group {
margin-bottom: 1rem;
}
.form-group label {
display: block;
font-size: 0.8rem;
font-weight: 500;
margin-bottom: 0.4rem;
color: var(--text-secondary);
}
.form-row {
display: flex;
gap: 1rem;
margin-bottom: 1rem;
}
.form-row>* {
flex: 1;
}
.divider {
height: 1px;
background-color: var(--border-color);
margin: 1.5rem 0;
}
/* Tag / Badge */
.badge {
display: inline-flex;
align-items: center;
padding: 0.125rem 0.5rem;
border-radius: var(--radius-full);
font-size: 0.7rem;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.badge.success {
background: rgba(52, 211, 153, 0.15);
color: var(--success);
}
.badge.warning {
background: rgba(251, 191, 36, 0.15);
color: var(--warning);
border: 1px solid rgba(251, 191, 36, 0.3);
}
.badge.danger {
background: rgba(239, 68, 68, 0.15);
color: var(--danger);
}
.badge.info {
background: rgba(56, 189, 248, 0.15);
color: var(--info);
}
.badge.neutral {
background: var(--bg-highlight);
color: var(--text-secondary);
border: 1px solid var(--border-color);
}
/* Utility */
.text-subtle {
color: var(--text-tertiary);
font-size: 0.8rem;
}
.flex {
display: flex;
}
.justify-between {
justify-content: space-between;
}
.items-center {
align-items: center;
}
.gap-2 {
gap: 0.5rem;
}
.gap-4 {
gap: 1rem;
}
.mt-4 {
margin-top: 1rem;
}
.mb-4 {
margin-bottom: 1rem;
}
/* Grid Layout */
.dashboard-grid {
display: grid;
grid-template-columns: 2fr 1fr;
gap: 1.5rem;
}
.config-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 1.5rem;
}
/* Utilities added for React flow */
.flex-col {
flex-direction: column;
}
.flex-1 {
flex: 1;
}
.w-full {
width: 100%;
}
.h-full {
height: 100%;
}
.h-64 {
height: 16rem;
}
.h-48 {
height: 12rem;
}
.justify-center {
justify-content: center;
}
.text-center {
text-align: center;
}
.p-2 {
padding: 0.5rem;
}
.p-3 {
padding: 0.75rem;
}
.p-4 {
padding: 1rem;
}
.py-8 {
padding-top: 2rem;
padding-bottom: 2rem;
}
.ml-2 {
margin-left: 0.5rem;
}
.mb-0 {
margin-bottom: 0;
}
.mb-2 {
margin-bottom: 0.5rem;
}
.border {
border-width: 1px;
border-style: solid;
}
.border-b {
border-bottom-width: 1px;
border-style: solid;
}
.border-dashed {
border-style: dashed;
}
.border-color {
border-color: var(--border-color);
}
.rounded-lg {
border-radius: var(--radius-lg);
}
.bg-tertiary {
background-color: var(--bg-tertiary);
}
.text-sm {
font-size: 0.875rem;
}
.text-accent {
color: var(--accent-primary);
}
.text-success {
color: var(--success);
}
.text-info {
color: var(--info);
}
.text-warning {
color: var(--warning);
}
.font-medium {
font-weight: 500;
}
.font-mono {
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
}
.opacity-75 {
opacity: 0.75;
}
.overflow-y-auto {
overflow-y: auto;
}
.overflow-x-auto {
overflow-x: auto;
}

View File

@@ -0,0 +1,78 @@
import React, { useEffect, useState } from 'react';
import { NavLink, Outlet, useLocation } from 'react-router-dom';
import { LayoutDashboard, Settings, Activity, History } from 'lucide-react';
const MainLayout: React.FC = () => {
const [isServerOnline, setIsServerOnline] = useState(false);
const location = useLocation();
useEffect(() => {
// Basic ping to check if our API is running
fetch('/api/health')
.then((res) => {
if (res.ok) setIsServerOnline(true);
})
.catch(() => setIsServerOnline(false));
// Optional: add a polling interval
const interval = setInterval(() => {
fetch('/api/health')
.then((res) => setIsServerOnline(res.ok))
.catch(() => setIsServerOnline(false));
}, 10000);
return () => clearInterval(interval);
}, []);
const getPageTitle = (pathname: string) => {
switch (pathname) {
case '/': return 'Dashboard Overview';
case '/settings': return 'Definitions & Policies';
case '/history': return 'Run History';
default: return 'AI Ops';
}
};
return (
<div id="root">
<aside className="sidebar">
<div className="sidebar-header">
<h1>AI Ops Control</h1>
<p>Topology & Telemetry</p>
</div>
<nav className="sidebar-nav">
<NavLink to="/" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
<LayoutDashboard /> Dashboard
</NavLink>
<NavLink to="/history" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
<History /> History
</NavLink>
<NavLink to="/settings" className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`}>
<Settings /> Policies & Limits
</NavLink>
</nav>
<div className="sidebar-footer">
<div className={`status-dot ${isServerOnline ? 'online' : 'offline'}`} />
{isServerOnline ? 'Server Online' : 'Connecting...'}
</div>
</aside>
<main className="main-content">
<header className="top-bar">
<h2 className="page-title">{getPageTitle(location.pathname)}</h2>
<div>
<div className="badge neutral flex items-center gap-2">
<Activity size={14} /> Agent Ready
</div>
</div>
</header>
<div className="content-scroll animate-fade-in" key={location.pathname}>
<Outlet />
</div>
</main>
</div>
);
};
export default MainLayout;

10
ui/src/main.tsx Normal file
View File

@@ -0,0 +1,10 @@
import React from 'react'
import ReactDOM from 'react-dom/client'
import App from './App.tsx'
import './index.css'
ReactDOM.createRoot(document.getElementById('root')!).render(
<React.StrictMode>
<App />
</React.StrictMode>,
)

214
ui/src/pages/Dashboard.tsx Normal file
View File

@@ -0,0 +1,214 @@
import React, { useState, useEffect } from 'react';
import { Play, Activity, Box } from 'lucide-react';
const Dashboard: React.FC = () => {
const [sessions, setSessions] = useState<any[]>([]);
const [manifests, setManifests] = useState<string[]>([]);
const [selectedSession, setSelectedSession] = useState('');
// Job Form State
const [prompt, setPrompt] = useState('');
const [runManifest, setRunManifest] = useState('');
const [provider, setProvider] = useState('codex');
const [mode, setMode] = useState('provider');
const [topologyHint, setTopologyHint] = useState('');
const [runStatus, setRunStatus] = useState({ text: '', isError: false });
// Events State
const [events, setEvents] = useState<any[]>([]);
// Graph State
const [graphData, setGraphData] = useState<any>(null);
const [graphLoading, setGraphLoading] = useState(false);
useEffect(() => {
fetch('/api/sessions').then(r => r.json()).then(d => {
if (d.ok) setSessions(d.sessions || []);
});
fetch('/api/manifests').then(r => r.json()).then(d => {
if (d.ok) {
setManifests(d.manifests || []);
if (d.manifests?.length) setRunManifest(d.manifests[0]);
}
});
fetchEvents();
}, []);
const fetchEvents = () => {
fetch('/api/runtime-events?limit=50').then(r => r.json()).then(d => {
if (d.ok) {
setEvents(d.events || []);
}
});
};
const handleGraphRefresh = async () => {
if (!selectedSession || !runManifest) return;
setGraphLoading(true);
try {
const res = await fetch(`/api/sessions/graph?sessionId=${encodeURIComponent(selectedSession)}&manifestPath=${encodeURIComponent(runManifest)}`);
const data = await res.json();
if (data.ok) {
setGraphData(data.graph);
}
} catch (e) {
console.error(e);
} finally {
setGraphLoading(false);
}
};
const handleStartRun = async (e: React.FormEvent) => {
e.preventDefault();
setRunStatus({ text: 'Starting...', isError: false });
try {
const res = await fetch('/api/runs', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prompt,
manifestPath: runManifest,
provider,
executionMode: mode,
topologyHint: topologyHint || undefined
})
});
const data = await res.json();
if (res.ok && data.ok) {
setRunStatus({ text: 'Run started successfully.', isError: false });
setPrompt('');
fetchEvents();
} else {
setRunStatus({ text: data.error || 'Failed to start run.', isError: true });
}
} catch (err: any) {
setRunStatus({ text: err.message, isError: true });
}
};
return (
<div className="dashboard-grid fade-in">
{/* Left Column */}
<div className="flex flex-col gap-4">
{/* Graph Visualizer Panel */}
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<Network size={18} className="text-accent" /> Graph Visualizer
</h2>
<div className="flex gap-4">
<label className="text-subtle">
Session
<select className="ml-2 bg-tertiary" value={selectedSession} onChange={e => setSelectedSession(e.target.value)}>
<option value="">Select Session</option>
{sessions.map(s => (
<option key={s.id} value={s.id}>{s.id}</option>
))}
</select>
</label>
<button className="primary" type="button" onClick={handleGraphRefresh} disabled={graphLoading}>
{graphLoading ? 'Loading...' : 'Refresh'}
</button>
</div>
</div>
<div className="h-64 flex items-center justify-center border border-dashed border-color rounded-lg text-subtle overflow-hidden relative">
{graphData ? (
<pre className="text-xs overflow-auto w-full h-full p-4 text-left">
{JSON.stringify(graphData, null, 2)}
</pre>
) : (
<span>Select a Session and Manifest to view execution graph structure</span>
)}
</div>
</section>
{/* Live Event Feed Panel */}
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<Activity size={18} className="text-info" /> Live Event Feed
</h2>
<button className="primary text-sm p-1" type="button" onClick={fetchEvents}>Refresh</button>
</div>
<div className="h-48 overflow-y-auto bg-tertiary rounded-lg p-2 text-sm font-mono text-subtle opacity-75 whitespace-pre-wrap">
{events.length === 0 ? '[Live Feed] Waiting for events...' : events.map((ev, i) => (
<div key={i}>[{new Date(ev.timestamp).toLocaleTimeString()}] {ev.type} - {ev.message}</div>
))}
</div>
</section>
</div>
{/* Right Column */}
<div className="flex flex-col gap-4">
{/* Job Trigger Panel */}
<aside className="panel mb-0">
<div className="panel-header">
<h2 className="panel-title">
<Play size={18} className="text-success" /> Job Trigger
</h2>
</div>
<form className="flex flex-col gap-4" onSubmit={handleStartRun}>
<div className="form-group mb-0">
<label>Prompt / Task</label>
<textarea rows={3} placeholder="Describe the run objective..." className="w-full" value={prompt} onChange={e => setPrompt(e.target.value)} required></textarea>
</div>
<div className="form-group mb-0">
<label>Manifest</label>
<select className="w-full" value={runManifest} onChange={e => setRunManifest(e.target.value)} required>
{manifests.map(m => (
<option key={m} value={m}>{m}</option>
))}
</select>
</div>
<div className="form-row mb-0">
<div className="form-group mb-0">
<label>Provider</label>
<select className="w-full" value={provider} onChange={e => setProvider(e.target.value)}>
<option value="codex">codex</option>
<option value="claude">claude</option>
</select>
</div>
<div className="form-group mb-0">
<label>Mode</label>
<select className="w-full" value={mode} onChange={e => setMode(e.target.value)}>
<option value="provider">provider</option>
<option value="mock">mock</option>
</select>
</div>
</div>
<div className="form-group mb-0">
<label>Topology Hint</label>
<input type="text" placeholder="sequential | parallel..." className="w-full" value={topologyHint} onChange={e => setTopologyHint(e.target.value)} />
</div>
<div className="flex justify-between items-center mt-2">
<button type="submit" className="primary w-full">Start Run</button>
</div>
{runStatus.text && (
<div className={`text-sm ${runStatus.isError ? 'text-danger' : 'text-success'}`}>{runStatus.text}</div>
)}
</form>
</aside>
{/* Node Inspector */}
<aside className="panel">
<div className="panel-header">
<h2 className="panel-title">
<Box size={18} className="text-warning" /> Node Inspector
</h2>
</div>
<div className="p-4 bg-tertiary rounded-lg text-center text-subtle text-sm">
Select a graph node to inspect details.
</div>
</aside>
</div>
</div>
);
};
// Add missing lucide import that was used from layout
import { Network } from 'lucide-react';
export default Dashboard;

88
ui/src/pages/History.tsx Normal file
View File

@@ -0,0 +1,88 @@
import React, { useEffect, useState } from 'react';
import { History as HistoryIcon, RefreshCw } from 'lucide-react';
const History: React.FC = () => {
const [runs, setRuns] = useState<any[]>([]);
const [loading, setLoading] = useState(false);
const fetchHistory = async () => {
setLoading(true);
try {
const res = await fetch('/api/sessions');
const data = await res.json();
if (data.ok) {
// Sort by descending update time (assuming default or simple sort needed)
const sortedRuns = (data.runs || []).sort((a: any, b: any) => {
return new Date(b.updatedAt || 0).getTime() - new Date(a.updatedAt || 0).getTime();
});
setRuns(sortedRuns);
}
} catch (e) {
console.error('Failed to fetch history', e);
} finally {
setLoading(false);
}
};
useEffect(() => {
fetchHistory();
}, []);
return (
<div className="fade-in">
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<HistoryIcon size={18} className="text-secondary" /> Run History
</h2>
<button className="primary" onClick={fetchHistory} disabled={loading}>
<RefreshCw size={14} className={loading ? 'animate-spin' : ''} /> Refresh
</button>
</div>
<div className="overflow-x-auto">
<table className="w-full text-left border-collapse">
<thead>
<tr className="border-b border-color text-subtle">
<th className="p-3 font-medium">Session ID</th>
<th className="p-3 font-medium">Status</th>
<th className="p-3 font-medium">Attempts</th>
<th className="p-3 font-medium">Updated</th>
</tr>
</thead>
<tbody>
{runs.length === 0 ? (
<tr>
<td className="p-3" colSpan={4}>
<div className="text-center py-8 text-subtle">
{loading ? 'Loading...' : 'No run history available.'}
</div>
</td>
</tr>
) : (
runs.map((run: any) => {
const dateStr = run.updatedAt ? new Date(run.updatedAt).toLocaleString() : 'N/A';
let statusColor = 'text-subtle';
if (run.status === 'success') statusColor = 'text-success';
if (run.status === 'failure' || run.status === 'cancelled') statusColor = 'text-danger';
if (run.status === 'validation_fail') statusColor = 'text-warning';
return (
<tr key={run.id} className="border-b border-color hover:bg-highlight transition-colors">
<td className="p-3 font-mono text-sm">{run.sessionId || 'N/A'}</td>
<td className={`p-3 font-medium ${statusColor}`}>{run.status || 'unknown'}</td>
<td className="p-3">{run.attempts ?? 0}</td>
<td className="p-3 text-sm text-subtle">{dateStr}</td>
</tr>
);
})
)}
</tbody>
</table>
</div>
</section>
</div>
);
};
export default History;

293
ui/src/pages/Settings.tsx Normal file
View File

@@ -0,0 +1,293 @@
import React, { useState, useEffect } from 'react';
import { Shield, Bell, HardDrive, FileJson } from 'lucide-react';
const Settings: React.FC = () => {
// Config Status
const [configStatus, setConfigStatus] = useState({ text: '', isError: false, section: '' });
// Notifications State
const [webhookUrl, setWebhookUrl] = useState('');
const [severity, setSeverity] = useState('info');
const [alwaysNotify, setAlwaysNotify] = useState('');
// Security State
const [violationMode, setViolationMode] = useState('hard_abort');
const [allowedBinaries, setAllowedBinaries] = useState('');
// Limits State
const [maxConcurrent, setMaxConcurrent] = useState('');
const [maxSession, setMaxSession] = useState('');
useEffect(() => {
fetch('/api/config')
.then(res => res.json())
.then(data => {
if (data.ok && data.config) {
const cfg = data.config;
setWebhookUrl(cfg.runtimeEvents?.discordWebhookUrl || '');
setSeverity(cfg.runtimeEvents?.minSeverity || 'info');
setAlwaysNotify(cfg.runtimeEvents?.alwaysNotifyTypes?.join(',') || '');
setViolationMode(cfg.securityPolicy?.violationMode || 'hard_abort');
setAllowedBinaries(cfg.securityPolicy?.allowedBinaries?.join(',') || '');
setMaxConcurrent(cfg.limits?.maxConcurrentSessionLimit?.toString() || '');
setMaxSession(cfg.limits?.maxSessionAgentLimit?.toString() || '');
}
})
.catch(e => console.error("Failed to load config", e));
}, []);
const handleSaveNotifications = async (e: React.FormEvent) => {
e.preventDefault();
setConfigStatus({ text: 'Saving notifications...', isError: false, section: 'notifications' });
try {
const res = await fetch('/api/config/runtime-events', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
discordWebhookUrl: webhookUrl || undefined,
minSeverity: severity,
alwaysNotifyTypes: alwaysNotify ? alwaysNotify.split(',').map(s => s.trim()) : undefined
})
});
const data = await res.json();
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'notifications' });
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'notifications' });
} catch (err: any) {
setConfigStatus({ text: err.message, isError: true, section: 'notifications' });
}
};
const handleSaveSecurity = async (e: React.FormEvent) => {
e.preventDefault();
setConfigStatus({ text: 'Saving security...', isError: false, section: 'security' });
try {
const res = await fetch('/api/config/security', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
violationMode,
allowedBinaries: allowedBinaries ? allowedBinaries.split(',').map(s => s.trim()) : undefined
})
});
const data = await res.json();
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'security' });
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'security' });
} catch (err: any) {
setConfigStatus({ text: err.message, isError: true, section: 'security' });
}
};
const handleSaveLimits = async (e: React.FormEvent) => {
e.preventDefault();
setConfigStatus({ text: 'Saving limits...', isError: false, section: 'limits' });
try {
const res = await fetch('/api/config/limits', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
maxConcurrentSessionLimit: parseInt(maxConcurrent) || undefined,
maxSessionAgentLimit: parseInt(maxSession) || undefined
})
});
const data = await res.json();
if (data.ok) setConfigStatus({ text: 'Saved successfully.', isError: false, section: 'limits' });
else setConfigStatus({ text: data.error || 'Failed to save.', isError: true, section: 'limits' });
} catch (err: any) {
setConfigStatus({ text: err.message, isError: true, section: 'limits' });
}
};
const [manifestPath, setManifestPath] = useState('');
const [manifestJson, setManifestJson] = useState('');
const [manifestStatus, setManifestStatus] = useState({ text: '', isError: false });
const handleLoadManifest = async () => {
if (!manifestPath) {
setManifestStatus({ text: 'Please enter a manifest path.', isError: true });
return;
}
setManifestStatus({ text: 'Loading...', isError: false });
try {
const res = await fetch(`/api/manifests/read?path=${encodeURIComponent(manifestPath)}`);
const data = await res.json();
if (data.ok) {
setManifestJson(JSON.stringify(data.manifest.source || data.manifest.manifest, null, 2));
setManifestStatus({ text: 'Manifest loaded successfully.', isError: false });
} else {
setManifestStatus({ text: data.error || 'Failed to load manifest.', isError: true });
}
} catch (e: any) {
setManifestStatus({ text: e.message || 'Error loading manifest.', isError: true });
}
};
const handleValidateManifest = async () => {
try {
const parsed = JSON.parse(manifestJson);
setManifestStatus({ text: 'Validating...', isError: false });
const res = await fetch('/api/manifests/validate', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ manifest: parsed })
});
const data = await res.json();
if (data.ok) {
setManifestStatus({ text: 'Manifest is valid.', isError: false });
} else {
setManifestStatus({ text: data.error || 'Manifest validation failed.', isError: true });
}
} catch (e: any) {
setManifestStatus({ text: 'Invalid JSON format.', isError: true });
}
};
const handleSaveManifest = async () => {
if (!manifestPath) {
setManifestStatus({ text: 'Please enter a manifest path.', isError: true });
return;
}
try {
const parsed = JSON.parse(manifestJson);
setManifestStatus({ text: 'Saving...', isError: false });
const res = await fetch('/api/manifests/save', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ path: manifestPath, manifest: parsed })
});
const data = await res.json();
if (data.ok) {
setManifestStatus({ text: 'Manifest saved successfully.', isError: false });
} else {
setManifestStatus({ text: data.error || 'Failed to save manifest.', isError: true });
}
} catch (e: any) {
setManifestStatus({ text: 'Invalid JSON format.', isError: true });
}
};
return (
<div className="fade-in">
<div className="config-grid">
{/* Notifications */}
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<Bell size={18} className="text-accent" /> Notifications
</h2>
</div>
<form className="flex flex-col gap-3" onSubmit={handleSaveNotifications}>
<div className="form-group mb-0">
<label>Discord Webhook URL</label>
<input type="text" className="w-full" placeholder="https://discord.com/api/webhooks/..." value={webhookUrl} onChange={(e) => setWebhookUrl(e.target.value)} />
</div>
<div className="form-group mb-0">
<label>Min Severity</label>
<select className="w-full" value={severity} onChange={(e) => setSeverity(e.target.value)}>
<option value="info">info</option>
<option value="warning">warning</option>
<option value="critical">critical</option>
</select>
</div>
<div className="form-group mb-0">
<label>Always Notify Types (CSV)</label>
<input type="text" className="w-full" value={alwaysNotify} onChange={(e) => setAlwaysNotify(e.target.value)} />
</div>
<button type="submit" className="primary mt-2">Save Notifications</button>
{configStatus.section === 'notifications' && (
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
)}
</form>
</section>
{/* Security */}
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<Shield size={18} className="text-warning" /> Security Policy
</h2>
</div>
<form className="flex flex-col gap-3" onSubmit={handleSaveSecurity}>
<div className="form-group mb-0">
<label>Violation Mode</label>
<select className="w-full" value={violationMode} onChange={(e) => setViolationMode(e.target.value)}>
<option value="hard_abort">hard_abort</option>
<option value="validation_fail">validation_fail</option>
</select>
</div>
<div className="form-group mb-0">
<label>Allowed Binaries (CSV)</label>
<input type="text" className="w-full" value={allowedBinaries} onChange={(e) => setAllowedBinaries(e.target.value)} />
</div>
<button type="submit" className="primary mt-2">Save Security</button>
{configStatus.section === 'security' && (
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
)}
</form>
</section>
{/* Limits */}
<section className="panel">
<div className="panel-header">
<h2 className="panel-title">
<HardDrive size={18} className="text-info" /> Platform Limits
</h2>
</div>
<form className="flex flex-col gap-3" onSubmit={handleSaveLimits}>
<div className="form-group mb-0">
<label>AGENT_MAX_CONCURRENT</label>
<input type="number" className="w-full" value={maxConcurrent} onChange={(e) => setMaxConcurrent(e.target.value)} />
</div>
<div className="form-group mb-0">
<label>AGENT_MAX_SESSION</label>
<input type="number" className="w-full" value={maxSession} onChange={(e) => setMaxSession(e.target.value)} />
</div>
<button type="submit" className="primary mt-2">Save Limits</button>
{configStatus.section === 'limits' && (
<div className={`text-sm mt-1 ${configStatus.isError ? 'text-danger' : 'text-success'}`}>{configStatus.text}</div>
)}
</form>
</section>
{/* Manifest Builder */}
<section className="panel md:col-span-2">
<div className="panel-header">
<h2 className="panel-title">
<FileJson size={18} className="text-success" /> Manifest Builder
</h2>
</div>
<div className="flex flex-col gap-4">
<div className="flex gap-4 mb-2">
<input
type="text"
className="flex-1"
placeholder=".ai_ops/manifests/default.json"
value={manifestPath}
onChange={(e) => setManifestPath(e.target.value)}
/>
<button type="button" onClick={handleLoadManifest}>Load</button>
<button type="button" onClick={handleValidateManifest}>Validate</button>
<button type="button" className="primary" onClick={handleSaveManifest}>Save</button>
</div>
<textarea
className="w-full font-mono text-sm bg-tertiary"
rows={16}
placeholder="{...}"
value={manifestJson}
onChange={(e) => setManifestJson(e.target.value)}
spellCheck={false}
></textarea>
{manifestStatus.text && (
<div className={`text-sm mt-2 ${manifestStatus.isError ? 'text-danger' : 'text-success'}`}>
{manifestStatus.text}
</div>
)}
</div>
</section>
</div>
</div>
);
};
export default Settings;

28
ui/tsconfig.app.json Normal file
View File

@@ -0,0 +1,28 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"types": ["vite/client"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

7
ui/tsconfig.json Normal file
View File

@@ -0,0 +1,7 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

26
ui/tsconfig.node.json Normal file
View File

@@ -0,0 +1,26 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2023",
"lib": ["ES2023"],
"module": "ESNext",
"types": ["node"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

15
ui/vite.config.ts Normal file
View File

@@ -0,0 +1,15 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
server: {
proxy: {
'/api': {
target: 'http://localhost:4317',
changeOrigin: true,
}
}
}
})