Refactor pipeline policies, MCP registry, and unified config/runtime

This commit is contained in:
2026-02-23 13:56:45 -05:00
parent 889087daa1
commit 9b4216dda9
22 changed files with 1441 additions and 587 deletions

View File

@@ -14,10 +14,10 @@ TypeScript runtime for deterministic multi-agent execution with:
## Architecture Summary ## Architecture Summary
- `SchemaDrivenExecutionEngine.runSession(...)` is the single execution entrypoint. - `SchemaDrivenExecutionEngine.runSession(...)` is the single execution entrypoint.
- `PipelineExecutor` owns runtime control flow and topology dispatch. - `PipelineExecutor` owns runtime control flow and topology dispatch while delegating failure classification and persistence/event side-effects to dedicated policies.
- `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required. - `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required.
- Session state is persisted under `AGENT_STATE_ROOT`. - Session state is persisted under `AGENT_STATE_ROOT`.
- Project state is persisted under `AGENT_PROJECT_CONTEXT_PATH` with domains: - Project state is persisted under `AGENT_PROJECT_CONTEXT_PATH` with schema-versioned JSON (`schemaVersion`) and domains:
- `globalFlags` - `globalFlags`
- `artifactPointers` - `artifactPointers`
- `taskQueue` - `taskQueue`
@@ -26,7 +26,9 @@ TypeScript runtime for deterministic multi-agent execution with:
- `src/agents` - `src/agents`
- `orchestration.ts`: engine facade and runtime wiring - `orchestration.ts`: engine facade and runtime wiring
- `pipeline.ts`: DAG runner, retry matrix, abort propagation, domain-event routing - `pipeline.ts`: DAG runner, retry matrix, aggregate session status, abort propagation, domain-event routing
- `failure-policy.ts`: hard/soft failure classification policy
- `lifecycle-observer.ts`: persistence/event lifecycle hooks for node attempts
- `manifest.ts`: schema parsing/validation for personas/topologies/edges - `manifest.ts`: schema parsing/validation for personas/topologies/edges
- `manager.ts`: recursive fan-out utility used by pipeline - `manager.ts`: recursive fan-out utility used by pipeline
- `state-context.ts`: persisted node handoffs + session state - `state-context.ts`: persisted node handoffs + session state
@@ -36,6 +38,7 @@ TypeScript runtime for deterministic multi-agent execution with:
- `provisioning.ts`: resource provisioning and child suballocation helpers - `provisioning.ts`: resource provisioning and child suballocation helpers
- `src/mcp`: MCP config types/conversion/handlers - `src/mcp`: MCP config types/conversion/handlers
- `src/examples`: provider entrypoints (`codex.ts`, `claude.ts`) - `src/examples`: provider entrypoints (`codex.ts`, `claude.ts`)
- `src/config.ts`: centralized env parsing/validation/defaulting
- `tests`: manager, manifest, pipeline/orchestration, state, provisioning, MCP - `tests`: manager, manifest, pipeline/orchestration, state, provisioning, MCP
## Setup ## Setup
@@ -93,6 +96,7 @@ Actors can emit events in `ActorExecutionResult.events`. Pipeline status also em
- hard failures: timeout/network/403-like failures tracked sequentially; at 2 consecutive hard failures the pipeline aborts fast - hard failures: timeout/network/403-like failures tracked sequentially; at 2 consecutive hard failures the pipeline aborts fast
- `AbortSignal` is passed into every actor execution input - `AbortSignal` is passed into every actor execution input
- session closure aborts child recursive work - session closure aborts child recursive work
- run summaries expose aggregate `status`: success requires successful terminal executed DAG nodes and no critical-path failure
## Environment Variables ## Environment Variables

30
docs/pipeline-policies.md Normal file
View File

@@ -0,0 +1,30 @@
# Pipeline Policies and Lifecycle Hooks
## Why this exists
`PipelineExecutor` previously handled DAG traversal, failure heuristics, state persistence, and domain-event emission in one execution loop. This made behavior harder to isolate and test.
## Current structure
- `FailurePolicy` (`src/agents/failure-policy.ts`)
- Owns hard vs soft failure classification.
- Determines whether a sequence of hard failures should abort execution.
- `PersistenceLifecycleObserver` (`src/agents/lifecycle-observer.ts`)
- Handles state patching, project-context updates, and domain-event publishing for each node attempt.
- `PipelineExecutor` (`src/agents/pipeline.ts`)
- Coordinates DAG traversal and retry behavior.
- Computes aggregate run status from executed terminal nodes plus critical-path failures.
## Aggregate status semantics
Run status is `success` only when both are true:
1. All executed terminal nodes (leaves in the executed subgraph) have final status `success`.
2. No executed node in the critical path has final status `failure`.
Otherwise status is `failure`.
## Persistence guarantees
State and project-context writes are now atomic via temp-file + rename.
Project-context patch/write operations are serialized both in-process (promise queue) and cross-process (lock file).

View File

@@ -0,0 +1,66 @@
import type { ActorExecutionResult, ActorFailureKind } from "./pipeline.js";
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function containsHardFailureSignal(value: string): boolean {
return /(timeout|timed out|network|econnreset|econnrefused|enotfound|403|forbidden)/i.test(value);
}
function toFailureCodeFromError(error: unknown): string | undefined {
if (!(error instanceof Error)) {
return undefined;
}
const maybeCode = (error as NodeJS.ErrnoException).code;
return typeof maybeCode === "string" ? maybeCode : undefined;
}
export class FailurePolicy {
isHardFailure(result: ActorExecutionResult): boolean {
if (result.failureKind === "hard") {
return true;
}
if (result.status !== "failure") {
return false;
}
const payloadText = (() => {
const message = result.payload?.error;
return typeof message === "string" ? message : "";
})();
const codeText = result.failureCode ?? "";
return containsHardFailureSignal(`${codeText} ${payloadText}`);
}
classifyFailureFromError(error: unknown): {
payloadErrorMessage: string;
failureCode?: string;
failureKind: ActorFailureKind;
} {
const message = toErrorMessage(error);
const failureCode = toFailureCodeFromError(error);
const failureKind = containsHardFailureSignal(`${failureCode ?? ""} ${message}`)
? "hard"
: "soft";
return {
payloadErrorMessage: message,
...(failureCode ? { failureCode } : {}),
failureKind,
};
}
shouldAbortAfterSequentialHardFailures(
sequentialHardFailureCount: number,
threshold: number,
): boolean {
return sequentialHardFailureCount >= threshold;
}
}

View File

@@ -0,0 +1,105 @@
import { randomUUID } from "node:crypto";
import { mkdir, open, rename, stat, unlink, writeFile } from "node:fs/promises";
import { basename, dirname, resolve } from "node:path";
function sleep(ms: number): Promise<void> {
return new Promise((resolveSleep) => {
setTimeout(resolveSleep, ms);
});
}
async function cleanupFile(path: string): Promise<void> {
try {
await unlink(path);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
export async function writeUtf8FileAtomic(path: string, content: string): Promise<void> {
const directory = dirname(path);
await mkdir(directory, { recursive: true });
const tempFileName = `.${basename(path)}.${String(process.pid)}.${randomUUID()}.tmp`;
const tempPath = resolve(directory, tempFileName);
try {
await writeFile(tempPath, content, "utf8");
await rename(tempPath, path);
} catch (error) {
await cleanupFile(tempPath);
throw error;
}
}
async function tryAcquireFileLock(
lockPath: string,
): Promise<Awaited<ReturnType<typeof open>> | undefined> {
try {
const handle = await open(lockPath, "wx");
await handle.writeFile(`${JSON.stringify({ pid: process.pid, acquiredAt: new Date().toISOString() })}\n`);
return handle;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "EEXIST") {
return undefined;
}
throw error;
}
}
async function clearStaleLock(lockPath: string, staleAfterMs: number): Promise<void> {
try {
const stats = await stat(lockPath);
const ageMs = Date.now() - stats.mtimeMs;
if (ageMs <= staleAfterMs) {
return;
}
await cleanupFile(lockPath);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
export async function withFileLock<T>(
lockPath: string,
operation: () => Promise<T>,
options?: {
maxWaitMs?: number;
retryDelayMs?: number;
staleAfterMs?: number;
},
): Promise<T> {
const maxWaitMs = options?.maxWaitMs ?? 5000;
const retryDelayMs = options?.retryDelayMs ?? 25;
const staleAfterMs = options?.staleAfterMs ?? 30_000;
await mkdir(dirname(lockPath), { recursive: true });
const startedAt = Date.now();
// Busy-wait with bounded retries to coordinate concurrent writers across processes.
while (true) {
const handle = await tryAcquireFileLock(lockPath);
if (handle) {
try {
return await operation();
} finally {
await handle.close();
await cleanupFile(lockPath);
}
}
await clearStaleLock(lockPath, staleAfterMs);
if (Date.now() - startedAt >= maxWaitMs) {
throw new Error(`Timed out waiting for file lock: ${lockPath}`);
}
await sleep(retryDelayMs);
}
}

View File

@@ -0,0 +1,126 @@
import {
DomainEventBus,
type DomainEvent,
type DomainEventType,
} from "./domain-events.js";
import type { PipelineNode } from "./manifest.js";
import { type ProjectContextPatch, type FileSystemProjectContextStore } from "./project-context.js";
import { PersonaRegistry } from "./persona-registry.js";
import {
FileSystemStateContextManager,
type SessionHistoryEntry,
} from "./state-context.js";
import type { ActorExecutionResult, ActorResultStatus } from "./pipeline.js";
export type PipelineNodeAttemptObservedEvent = {
sessionId: string;
node: PipelineNode;
attempt: number;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
};
function toBehaviorEvent(status: ActorResultStatus): "onTaskComplete" | "onValidationFail" | undefined {
if (status === "success") {
return "onTaskComplete";
}
if (status === "validation_fail") {
return "onValidationFail";
}
return undefined;
}
export interface PipelineLifecycleObserver {
onNodeAttempt(event: PipelineNodeAttemptObservedEvent): Promise<void>;
}
export class PersistenceLifecycleObserver implements PipelineLifecycleObserver {
constructor(
private readonly input: {
personaRegistry: PersonaRegistry;
stateManager: FileSystemStateContextManager;
projectContextStore: FileSystemProjectContextStore;
domainEventBus?: DomainEventBus;
},
) {}
async onNodeAttempt(event: PipelineNodeAttemptObservedEvent): Promise<void> {
const behaviorEvent = toBehaviorEvent(event.result.status);
const behaviorPatch = behaviorEvent
? await this.input.personaRegistry.emitBehaviorEvent({
personaId: event.node.personaId,
event: behaviorEvent,
sessionId: event.sessionId,
nodeId: event.node.id,
payload: event.result.payload ?? {},
})
: {};
const legacyHistoryEvent: SessionHistoryEntry = {
nodeId: event.node.id,
event: event.result.status,
timestamp: new Date().toISOString(),
...(event.result.payload ? { data: event.result.payload } : {}),
};
const domainHistoryEvents: SessionHistoryEntry[] = event.domainEvents.map((domainEvent) => ({
nodeId: event.node.id,
event: domainEvent.type,
timestamp: domainEvent.timestamp,
data: {
source: domainEvent.source,
attempt: domainEvent.attempt,
...(domainEvent.payload.summary ? { summary: domainEvent.payload.summary } : {}),
...(domainEvent.payload.errorCode ? { errorCode: domainEvent.payload.errorCode } : {}),
...(domainEvent.payload.artifactPointer
? { artifactPointer: domainEvent.payload.artifactPointer }
: {}),
...(domainEvent.payload.details ? { details: domainEvent.payload.details } : {}),
},
}));
await this.input.stateManager.patchState(event.sessionId, {
...(event.result.stateFlags ? { flags: event.result.stateFlags } : {}),
metadata: {
...(event.result.stateMetadata ?? {}),
...behaviorPatch,
},
historyEvent: legacyHistoryEvent,
historyEvents: domainHistoryEvents,
});
const domainEventBus = this.input.domainEventBus;
if (domainEventBus) {
for (const domainEvent of event.domainEvents) {
await domainEventBus.publish(domainEvent);
}
}
const patch: ProjectContextPatch = {
...(event.result.projectContextPatch ?? {}),
artifactPointers: {
[`sessions/${event.sessionId}/last_completed_node`]: event.node.id,
[`sessions/${event.sessionId}/last_attempt`]: String(event.attempt),
...(event.result.projectContextPatch?.artifactPointers ?? {}),
},
};
await this.input.projectContextStore.patchState(patch);
}
}
export class DomainEventCollector {
private readonly events: DomainEvent[] = [];
record(events: DomainEvent[]): void {
this.events.push(...events);
}
toEventTypes(events: DomainEvent[]): DomainEventType[] {
return events.map((event) => event.type);
}
getAll(): DomainEvent[] {
return [...this.events];
}
}

View File

@@ -1,4 +1,6 @@
import { resolve } from "node:path"; import { resolve } from "node:path";
import { getConfig, loadConfig, type AppConfig } from "../config.js";
import { createDefaultMcpRegistry, McpRegistry } from "../mcp.js";
import { parseAgentManifest, type AgentManifest } from "./manifest.js"; import { parseAgentManifest, type AgentManifest } from "./manifest.js";
import { AgentManager } from "./manager.js"; import { AgentManager } from "./manager.js";
import { import {
@@ -8,7 +10,6 @@ import {
} from "./persona-registry.js"; } from "./persona-registry.js";
import { PipelineExecutor, type ActorExecutor, type PipelineRunSummary } from "./pipeline.js"; import { PipelineExecutor, type ActorExecutor, type PipelineRunSummary } from "./pipeline.js";
import { FileSystemProjectContextStore } from "./project-context.js"; import { FileSystemProjectContextStore } from "./project-context.js";
import { loadAgentManagerLimitsFromEnv } from "./runtime.js";
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js"; import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
import type { JsonObject } from "./types.js"; import type { JsonObject } from "./types.js";
@@ -26,59 +27,16 @@ export type BehaviorHandlerRegistry = Partial<
Record<string, Partial<Record<PersonaBehaviorEvent, PersonaBehaviorHandler>>> Record<string, Partial<Record<PersonaBehaviorEvent, PersonaBehaviorHandler>>>
>; >;
function readOptionalIntegerEnv( export function loadOrchestrationSettingsFromEnv(
key: env: NodeJS.ProcessEnv = process.env,
| "AGENT_TOPOLOGY_MAX_DEPTH" ): Omit<OrchestrationSettings, "workspaceRoot" | "runtimeContext"> {
| "AGENT_TOPOLOGY_MAX_RETRIES" const config = loadConfig(env);
| "AGENT_RELATIONSHIP_MAX_CHILDREN",
fallback: number,
min: number,
): number {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed < min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(min)}.`);
}
return parsed;
}
function readOptionalStringEnv(key: "AGENT_STATE_ROOT", fallback: string): string {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
return raw;
}
function readOptionalProjectContextPathEnv(
key: "AGENT_PROJECT_CONTEXT_PATH",
fallback: string,
): string {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
return raw;
}
export function loadOrchestrationSettingsFromEnv(): Omit<
OrchestrationSettings,
"workspaceRoot" | "runtimeContext"
> {
return { return {
stateRoot: readOptionalStringEnv("AGENT_STATE_ROOT", ".ai_ops/state"), stateRoot: config.orchestration.stateRoot,
projectContextPath: readOptionalProjectContextPathEnv( projectContextPath: config.orchestration.projectContextPath,
"AGENT_PROJECT_CONTEXT_PATH", maxDepth: config.orchestration.maxDepth,
".ai_ops/project-context.json", maxRetries: config.orchestration.maxRetries,
), maxChildren: config.orchestration.maxChildren,
maxDepth: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_DEPTH", 4, 1),
maxRetries: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_RETRIES", 2, 0),
maxChildren: readOptionalIntegerEnv("AGENT_RELATIONSHIP_MAX_CHILDREN", 4, 1),
}; };
} }
@@ -115,6 +73,7 @@ export class SchemaDrivenExecutionEngine {
private readonly settings: OrchestrationSettings; private readonly settings: OrchestrationSettings;
private readonly childrenByParent: Map<string, AgentManifest["relationships"]>; private readonly childrenByParent: Map<string, AgentManifest["relationships"]>;
private readonly manager: AgentManager; private readonly manager: AgentManager;
private readonly mcpRegistry: McpRegistry;
constructor(input: { constructor(input: {
manifest: AgentManifest | unknown; manifest: AgentManifest | unknown;
@@ -125,17 +84,21 @@ export class SchemaDrivenExecutionEngine {
runtimeContext?: Record<string, string | number | boolean>; runtimeContext?: Record<string, string | number | boolean>;
}; };
manager?: AgentManager; manager?: AgentManager;
mcpRegistry?: McpRegistry;
config?: Readonly<AppConfig>;
}) { }) {
this.manifest = parseAgentManifest(input.manifest); this.manifest = parseAgentManifest(input.manifest);
const defaults = loadOrchestrationSettingsFromEnv(); const config = input.config ?? getConfig();
this.settings = { this.settings = {
workspaceRoot: resolve(input.settings?.workspaceRoot ?? process.cwd()), workspaceRoot: resolve(input.settings?.workspaceRoot ?? process.cwd()),
stateRoot: resolve(input.settings?.stateRoot ?? defaults.stateRoot), stateRoot: resolve(input.settings?.stateRoot ?? config.orchestration.stateRoot),
projectContextPath: resolve(input.settings?.projectContextPath ?? defaults.projectContextPath), projectContextPath: resolve(
maxDepth: input.settings?.maxDepth ?? defaults.maxDepth, input.settings?.projectContextPath ?? config.orchestration.projectContextPath,
maxRetries: input.settings?.maxRetries ?? defaults.maxRetries, ),
maxChildren: input.settings?.maxChildren ?? defaults.maxChildren, maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
runtimeContext: { runtimeContext: {
...(input.settings?.runtimeContext ?? {}), ...(input.settings?.runtimeContext ?? {}),
}, },
@@ -149,7 +112,14 @@ export class SchemaDrivenExecutionEngine {
}); });
this.actorExecutors = toExecutorMap(input.actorExecutors); this.actorExecutors = toExecutorMap(input.actorExecutors);
this.manager = input.manager ?? new AgentManager(loadAgentManagerLimitsFromEnv()); this.manager =
input.manager ??
new AgentManager({
maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
maxSessionAgents: config.agentManager.maxSessionAgents,
maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
});
this.mcpRegistry = input.mcpRegistry ?? createDefaultMcpRegistry();
for (const persona of this.manifest.personas) { for (const persona of this.manifest.personas) {
this.personaRegistry.register({ this.personaRegistry.register({
@@ -227,6 +197,7 @@ export class SchemaDrivenExecutionEngine {
manager: this.manager, manager: this.manager,
managerSessionId, managerSessionId,
projectContextStore: this.projectContextStore, projectContextStore: this.projectContextStore,
mcpRegistry: this.mcpRegistry,
}, },
); );
try { try {

View File

@@ -9,14 +9,19 @@ import {
type DomainEventPayload, type DomainEventPayload,
type DomainEventType, type DomainEventType,
} from "./domain-events.js"; } from "./domain-events.js";
import { FailurePolicy } from "./failure-policy.js";
import {
PersistenceLifecycleObserver,
type PipelineLifecycleObserver,
} from "./lifecycle-observer.js";
import type { AgentManifest, PipelineEdge, PipelineNode, RouteCondition } from "./manifest.js"; import type { AgentManifest, PipelineEdge, PipelineNode, RouteCondition } from "./manifest.js";
import type { AgentManager, RecursiveChildIntent } from "./manager.js"; import type { AgentManager, RecursiveChildIntent } from "./manager.js";
import type { McpRegistry } from "../mcp/handlers.js";
import { PersonaRegistry } from "./persona-registry.js"; import { PersonaRegistry } from "./persona-registry.js";
import { type ProjectContextPatch, type FileSystemProjectContextStore } from "./project-context.js"; import { type ProjectContextPatch, type FileSystemProjectContextStore } from "./project-context.js";
import { import {
FileSystemStateContextManager, FileSystemStateContextManager,
type NodeExecutionContext, type NodeExecutionContext,
type SessionHistoryEntry,
type StoredSessionState, type StoredSessionState,
} from "./state-context.js"; } from "./state-context.js";
import type { JsonObject } from "./types.js"; import type { JsonObject } from "./types.js";
@@ -59,11 +64,14 @@ export type PipelineExecutionRecord = {
export type PipelineRunSummary = { export type PipelineRunSummary = {
sessionId: string; sessionId: string;
status: PipelineAggregateStatus;
records: PipelineExecutionRecord[]; records: PipelineExecutionRecord[];
events: DomainEvent[]; events: DomainEvent[];
finalState: StoredSessionState; finalState: StoredSessionState;
}; };
export type PipelineAggregateStatus = "success" | "failure";
export type PipelineExecutorOptions = { export type PipelineExecutorOptions = {
workspaceRoot: string; workspaceRoot: string;
runtimeContext: Record<string, string | number | boolean>; runtimeContext: Record<string, string | number | boolean>;
@@ -72,6 +80,10 @@ export type PipelineExecutorOptions = {
manager: AgentManager; manager: AgentManager;
managerSessionId: string; managerSessionId: string;
projectContextStore: FileSystemProjectContextStore; projectContextStore: FileSystemProjectContextStore;
mcpRegistry: McpRegistry;
failurePolicy?: FailurePolicy;
lifecycleObserver?: PipelineLifecycleObserver;
hardFailureThreshold?: number;
}; };
type QueueItem = { type QueueItem = {
@@ -110,16 +122,6 @@ type NodeExecutionOutcome = {
hardFailureAttempts: boolean[]; hardFailureAttempts: boolean[];
}; };
function toBehaviorEvent(status: ActorResultStatus): "onTaskComplete" | "onValidationFail" | undefined {
if (status === "success") {
return "onTaskComplete";
}
if (status === "validation_fail") {
return "onValidationFail";
}
return undefined;
}
function shouldEdgeRun( function shouldEdgeRun(
edge: PipelineEdge, edge: PipelineEdge,
status: ActorResultStatus, status: ActorResultStatus,
@@ -228,51 +230,6 @@ function toAbortError(signal: AbortSignal): Error {
return error; return error;
} }
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function toErrorPayload(error: unknown): JsonObject {
const errorMessage = toErrorMessage(error);
return {
error: errorMessage,
};
}
function toFailureCodeFromError(error: unknown): string | undefined {
if (!(error instanceof Error)) {
return undefined;
}
const maybeCode = (error as NodeJS.ErrnoException).code;
return typeof maybeCode === "string" ? maybeCode : undefined;
}
function containsHardFailureSignal(value: string): boolean {
return /(timeout|timed out|network|econnreset|econnrefused|enotfound|403|forbidden)/i.test(value);
}
function inferHardFailure(result: ActorExecutionResult): boolean {
if (result.failureKind === "hard") {
return true;
}
if (result.status !== "failure") {
return false;
}
const payloadText = (() => {
const message = result.payload?.error;
return typeof message === "string" ? message : "";
})();
const codeText = result.failureCode ?? "";
return containsHardFailureSignal(`${codeText} ${payloadText}`);
}
function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload { function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload {
if (status === "success") { if (status === "success") {
return { return {
@@ -323,6 +280,9 @@ export class PipelineExecutor {
private readonly nodeById = new Map<string, PipelineNode>(); private readonly nodeById = new Map<string, PipelineNode>();
private readonly edgesBySource = new Map<string, PipelineEdge[]>(); private readonly edgesBySource = new Map<string, PipelineEdge[]>();
private readonly domainEventBus = new DomainEventBus(); private readonly domainEventBus = new DomainEventBus();
private readonly failurePolicy: FailurePolicy;
private readonly lifecycleObserver: PipelineLifecycleObserver;
private readonly hardFailureThreshold: number;
private managerRunCounter = 0; private managerRunCounter = 0;
constructor( constructor(
@@ -332,6 +292,17 @@ export class PipelineExecutor {
private readonly actorExecutors: ReadonlyMap<string, ActorExecutor>, private readonly actorExecutors: ReadonlyMap<string, ActorExecutor>,
private readonly options: PipelineExecutorOptions, private readonly options: PipelineExecutorOptions,
) { ) {
this.failurePolicy = options.failurePolicy ?? new FailurePolicy();
this.hardFailureThreshold = options.hardFailureThreshold ?? 2;
this.lifecycleObserver =
options.lifecycleObserver ??
new PersistenceLifecycleObserver({
personaRegistry: this.personaRegistry,
stateManager: this.stateManager,
projectContextStore: this.options.projectContextStore,
domainEventBus: this.domainEventBus,
});
for (const node of manifest.pipeline.nodes) { for (const node of manifest.pipeline.nodes) {
this.nodeById.set(node.id, node); this.nodeById.set(node.id, node);
} }
@@ -418,9 +389,14 @@ export class PipelineExecutor {
sequentialHardFailures = 0; sequentialHardFailures = 0;
} }
if (sequentialHardFailures >= 2) { if (
this.failurePolicy.shouldAbortAfterSequentialHardFailures(
sequentialHardFailures,
this.hardFailureThreshold,
)
) {
throw new Error( throw new Error(
"Hard failure threshold reached (>=2 sequential API/network/403 failures). Pipeline aborted.", `Hard failure threshold reached (>=${String(this.hardFailureThreshold)} sequential API/network/403 failures). Pipeline aborted.`,
); );
} }
} }
@@ -461,7 +437,8 @@ export class PipelineExecutor {
} }
const finalState = await this.stateManager.readState(input.sessionId); const finalState = await this.stateManager.readState(input.sessionId);
if (records.length > 0 && records[records.length - 1]?.status === "success") { const status = this.computeAggregateStatus(records);
if (status === "success") {
await this.options.projectContextStore.patchState({ await this.options.projectContextStore.patchState({
artifactPointers: { artifactPointers: {
[`sessions/${input.sessionId}/final_state`]: this.stateManager.getSessionStatePath(input.sessionId), [`sessions/${input.sessionId}/final_state`]: this.stateManager.getSessionStatePath(input.sessionId),
@@ -471,12 +448,39 @@ export class PipelineExecutor {
return { return {
sessionId: input.sessionId, sessionId: input.sessionId,
status,
records, records,
events, events,
finalState, finalState,
}; };
} }
private computeAggregateStatus(records: PipelineExecutionRecord[]): PipelineAggregateStatus {
if (records.length === 0) {
return "failure";
}
const finalStatusByNode = new Map<string, ActorResultStatus>();
for (const record of records) {
finalStatusByNode.set(record.nodeId, record.status);
}
const executedNodeIds = new Set(finalStatusByNode.keys());
const terminalNodeIds = [...executedNodeIds].filter((nodeId) => {
const outgoingEdges = this.edgesBySource.get(nodeId) ?? [];
return !outgoingEdges.some((edge) => executedNodeIds.has(edge.to));
});
const allTerminalNodesSucceeded =
terminalNodeIds.length > 0 &&
terminalNodeIds.every((nodeId) => finalStatusByNode.get(nodeId) === "success");
const hasCriticalPathFailure = [...finalStatusByNode.values()].some(
(status) => status === "failure",
);
return allTerminalNodesSucceeded && !hasCriticalPathFailure ? "success" : "failure";
}
private buildExecutionGroups(frontier: QueueItem[]): ExecutionGroup[] { private buildExecutionGroups(frontier: QueueItem[]): ExecutionGroup[] {
const groupsByKey = new Map<string, ExecutionGroup>(); const groupsByKey = new Map<string, ExecutionGroup>();
@@ -611,7 +615,7 @@ export class PipelineExecutor {
customEvents: result.events, customEvents: result.events,
}); });
await this.persistNodeAttempt({ await this.lifecycleObserver.onNodeAttempt({
sessionId, sessionId,
node, node,
attempt, attempt,
@@ -629,7 +633,7 @@ export class PipelineExecutor {
}); });
nodeEvents.push(...domainEvents); nodeEvents.push(...domainEvents);
const hardFailure = inferHardFailure(result); const hardFailure = this.failurePolicy.isHardFailure(result);
hardFailureAttempts.push(hardFailure); hardFailureAttempts.push(hardFailure);
const payloadForNext = result.payload ?? context.handoff.payload; const payloadForNext = result.payload ?? context.handoff.payload;
@@ -721,16 +725,15 @@ export class PipelineExecutor {
throw toAbortError(input.signal); throw toAbortError(input.signal);
} }
const failureCode = toFailureCodeFromError(error); const classified = this.failurePolicy.classifyFailureFromError(error);
const failureKind = containsHardFailureSignal(`${failureCode ?? ""} ${toErrorMessage(error)}`)
? "hard"
: "soft";
return { return {
status: "failure", status: "failure",
payload: toErrorPayload(error), payload: {
failureCode, error: classified.payloadErrorMessage,
failureKind, },
failureCode: classified.failureCode,
failureKind: classified.failureKind,
}; };
} }
} }
@@ -763,68 +766,4 @@ export class PipelineExecutor {
}), }),
); );
} }
private async persistNodeAttempt(input: {
sessionId: string;
node: PipelineNode;
attempt: number;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
}): Promise<void> {
const behaviorEvent = toBehaviorEvent(input.result.status);
const behaviorPatch = behaviorEvent
? await this.personaRegistry.emitBehaviorEvent({
personaId: input.node.personaId,
event: behaviorEvent,
sessionId: input.sessionId,
nodeId: input.node.id,
payload: input.result.payload ?? {},
})
: {};
const legacyHistoryEvent: SessionHistoryEntry = {
nodeId: input.node.id,
event: input.result.status,
timestamp: new Date().toISOString(),
...(input.result.payload ? { data: input.result.payload } : {}),
};
const domainHistoryEvents: SessionHistoryEntry[] = input.domainEvents.map((event) => ({
nodeId: input.node.id,
event: event.type,
timestamp: event.timestamp,
data: {
source: event.source,
attempt: event.attempt,
...(event.payload.summary ? { summary: event.payload.summary } : {}),
...(event.payload.errorCode ? { errorCode: event.payload.errorCode } : {}),
...(event.payload.artifactPointer ? { artifactPointer: event.payload.artifactPointer } : {}),
...(event.payload.details ? { details: event.payload.details } : {}),
},
}));
await this.stateManager.patchState(input.sessionId, {
...(input.result.stateFlags ? { flags: input.result.stateFlags } : {}),
metadata: {
...(input.result.stateMetadata ?? {}),
...behaviorPatch,
},
historyEvent: legacyHistoryEvent,
historyEvents: domainHistoryEvents,
});
for (const event of input.domainEvents) {
await this.domainEventBus.publish(event);
}
const patch: ProjectContextPatch = {
...(input.result.projectContextPatch ?? {}),
artifactPointers: {
[`sessions/${input.sessionId}/last_completed_node`]: input.node.id,
[`sessions/${input.sessionId}/last_attempt`]: String(input.attempt),
...(input.result.projectContextPatch?.artifactPointers ?? {}),
},
};
await this.options.projectContextStore.patchState(patch);
}
} }

View File

@@ -1,7 +1,10 @@
import { mkdir, readFile, writeFile } from "node:fs/promises"; import { readFile } from "node:fs/promises";
import { dirname, resolve } from "node:path"; import { resolve } from "node:path";
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js"; import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js";
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done"; export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
export type ProjectTask = { export type ProjectTask = {
@@ -13,6 +16,7 @@ export type ProjectTask = {
}; };
export type ProjectContextState = { export type ProjectContextState = {
schemaVersion: number;
globalFlags: Record<string, boolean>; globalFlags: Record<string, boolean>;
artifactPointers: Record<string, string>; artifactPointers: Record<string, string>;
taskQueue: ProjectTask[]; taskQueue: ProjectTask[];
@@ -27,6 +31,7 @@ export type ProjectContextPatch = {
}; };
const DEFAULT_PROJECT_CONTEXT: ProjectContextState = { const DEFAULT_PROJECT_CONTEXT: ProjectContextState = {
schemaVersion: PROJECT_CONTEXT_SCHEMA_VERSION,
globalFlags: {}, globalFlags: {},
artifactPointers: {}, artifactPointers: {},
taskQueue: [], taskQueue: [],
@@ -103,20 +108,41 @@ function toStringRecord(value: unknown, label: string): Record<string, string> {
return out; return out;
} }
function toSchemaVersion(value: unknown): number {
if (value === undefined) {
return PROJECT_CONTEXT_SCHEMA_VERSION;
}
if (typeof value !== "number" || !Number.isInteger(value) || value < 1) {
throw new Error("Project context schemaVersion must be an integer >= 1.");
}
return value;
}
function toProjectContextState(value: unknown): ProjectContextState { function toProjectContextState(value: unknown): ProjectContextState {
if (!isRecord(value)) { if (!isRecord(value)) {
throw new Error("Project context store is malformed."); throw new Error("Project context store is malformed.");
} }
const tasksRaw = value.taskQueue; const tasksRaw = value.taskQueue;
if (!Array.isArray(tasksRaw)) { if (tasksRaw !== undefined && !Array.isArray(tasksRaw)) {
throw new Error("Project context taskQueue is malformed."); throw new Error("Project context taskQueue is malformed.");
} }
return { return {
globalFlags: toBooleanRecord(value.globalFlags, "Project context globalFlags"), schemaVersion: toSchemaVersion(value.schemaVersion),
artifactPointers: toStringRecord(value.artifactPointers, "Project context artifactPointers"), globalFlags:
taskQueue: tasksRaw.map((task, index) => toProjectTask(task, `Project context taskQueue[${String(index)}]`)), value.globalFlags === undefined
? { ...DEFAULT_PROJECT_CONTEXT.globalFlags }
: toBooleanRecord(value.globalFlags, "Project context globalFlags"),
artifactPointers:
value.artifactPointers === undefined
? { ...DEFAULT_PROJECT_CONTEXT.artifactPointers }
: toStringRecord(value.artifactPointers, "Project context artifactPointers"),
taskQueue: (tasksRaw ?? []).map((task, index) =>
toProjectTask(task, `Project context taskQueue[${String(index)}]`),
),
}; };
} }
@@ -142,10 +168,12 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
export class FileSystemProjectContextStore { export class FileSystemProjectContextStore {
private readonly filePath: string; private readonly filePath: string;
private readonly lockPath: string;
private queue: Promise<void> = Promise.resolve(); private queue: Promise<void> = Promise.resolve();
constructor(input: { filePath: string }) { constructor(input: { filePath: string }) {
this.filePath = resolve(input.filePath); this.filePath = resolve(input.filePath);
this.lockPath = `${this.filePath}.lock`;
} }
getFilePath(): string { getFilePath(): string {
@@ -153,28 +181,22 @@ export class FileSystemProjectContextStore {
} }
async readState(): Promise<ProjectContextState> { async readState(): Promise<ProjectContextState> {
try { return this.readStateFromDisk();
const content = await readFile(this.filePath, "utf8");
const parsed = JSON.parse(content) as unknown;
return toProjectContextState(parsed);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return cloneState(DEFAULT_PROJECT_CONTEXT);
}
throw error;
}
} }
async writeState(state: ProjectContextState): Promise<void> { async writeState(state: ProjectContextState): Promise<void> {
await this.runSerialized(async () => { await this.runSerialized(async () => {
await mkdir(dirname(this.filePath), { recursive: true }); await withFileLock(this.lockPath, async () => {
await writeFile(this.filePath, `${JSON.stringify(state, null, 2)}\n`, "utf8"); const normalizedState = toProjectContextState(state);
await writeUtf8FileAtomic(this.filePath, `${JSON.stringify(normalizedState, null, 2)}\n`);
});
}); });
} }
async patchState(patch: ProjectContextPatch): Promise<ProjectContextState> { async patchState(patch: ProjectContextPatch): Promise<ProjectContextState> {
return this.runSerialized(async () => { return this.runSerialized(async () =>
const current = await this.readState(); withFileLock(this.lockPath, async () => {
const current = await this.readStateFromDisk();
if (patch.globalFlags) { if (patch.globalFlags) {
Object.assign(current.globalFlags, patch.globalFlags); Object.assign(current.globalFlags, patch.globalFlags);
@@ -201,10 +223,25 @@ export class FileSystemProjectContextStore {
current.taskQueue = mergeUpsertTasks(current.taskQueue, upsertTasks); current.taskQueue = mergeUpsertTasks(current.taskQueue, upsertTasks);
} }
await mkdir(dirname(this.filePath), { recursive: true }); current.schemaVersion = Math.max(current.schemaVersion, PROJECT_CONTEXT_SCHEMA_VERSION);
await writeFile(this.filePath, `${JSON.stringify(current, null, 2)}\n`, "utf8");
await writeUtf8FileAtomic(this.filePath, `${JSON.stringify(current, null, 2)}\n`);
return current; return current;
}); }),
);
}
private async readStateFromDisk(): Promise<ProjectContextState> {
try {
const content = await readFile(this.filePath, "utf8");
const parsed = JSON.parse(content) as unknown;
return toProjectContextState(parsed);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return cloneState(DEFAULT_PROJECT_CONTEXT);
}
throw error;
}
} }
private runSerialized<T>(operation: () => Promise<T>): Promise<T> { private runSerialized<T>(operation: () => Promise<T>): Promise<T> {

View File

@@ -1,3 +1,4 @@
import { getConfig, loadConfig, type AppConfig } from "../config.js";
import { AgentManager, type AgentManagerLimits } from "./manager.js"; import { AgentManager, type AgentManagerLimits } from "./manager.js";
import { import {
createDefaultResourceProvisioningOrchestrator, createDefaultResourceProvisioningOrchestrator,
@@ -5,143 +6,58 @@ import {
type ResourceProvisioningOrchestrator, type ResourceProvisioningOrchestrator,
} from "./provisioning.js"; } from "./provisioning.js";
const DEFAULT_LIMITS: AgentManagerLimits = { function toProvisioningConfig(input: Readonly<AppConfig>): BuiltInProvisioningConfigInput {
maxConcurrentAgents: 4, return {
maxSessionAgents: 2,
maxRecursiveDepth: 3,
};
const DEFAULT_PROVISIONING_CONFIG: BuiltInProvisioningConfigInput = {
gitWorktree: { gitWorktree: {
rootDirectory: ".ai_ops/worktrees", rootDirectory: input.provisioning.gitWorktree.rootDirectory,
baseRef: "HEAD", baseRef: input.provisioning.gitWorktree.baseRef,
}, },
portRange: { portRange: {
basePort: 36000, basePort: input.provisioning.portRange.basePort,
blockSize: 32, blockSize: input.provisioning.portRange.blockSize,
blockCount: 512, blockCount: input.provisioning.portRange.blockCount,
primaryPortOffset: 0, primaryPortOffset: input.provisioning.portRange.primaryPortOffset,
lockDirectory: ".ai_ops/locks/ports", lockDirectory: input.provisioning.portRange.lockDirectory,
}, },
}; };
function readPositiveIntegerEnv(
key: "AGENT_MAX_CONCURRENT" | "AGENT_MAX_SESSION" | "AGENT_MAX_RECURSIVE_DEPTH",
fallback: number,
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < 1) {
throw new Error(`Environment variable ${key} must be a positive integer.`);
}
return parsed;
} }
function readOptionalStringEnv(key: string, fallback: string): string { export function loadAgentManagerLimitsFromEnv(env: NodeJS.ProcessEnv = process.env): AgentManagerLimits {
const rawValue = process.env[key]?.trim(); const config = loadConfig(env);
if (!rawValue) {
return fallback;
}
return rawValue;
}
function readIntegerEnv(
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < bounds.min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(bounds.min)}.`);
}
return parsed;
}
export function loadAgentManagerLimitsFromEnv(): AgentManagerLimits {
return { return {
maxConcurrentAgents: readPositiveIntegerEnv( maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
"AGENT_MAX_CONCURRENT", maxSessionAgents: config.agentManager.maxSessionAgents,
DEFAULT_LIMITS.maxConcurrentAgents, maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
),
maxSessionAgents: readPositiveIntegerEnv(
"AGENT_MAX_SESSION",
DEFAULT_LIMITS.maxSessionAgents,
),
maxRecursiveDepth: readPositiveIntegerEnv(
"AGENT_MAX_RECURSIVE_DEPTH",
DEFAULT_LIMITS.maxRecursiveDepth,
),
}; };
} }
let managerSingleton: AgentManager | undefined; let managerSingleton: AgentManager | undefined;
let provisioningSingleton: ResourceProvisioningOrchestrator | undefined; let provisioningSingleton: ResourceProvisioningOrchestrator | undefined;
export function getAgentManager(): AgentManager { export function getAgentManager(config: Readonly<AppConfig> = getConfig()): AgentManager {
if (!managerSingleton) { if (!managerSingleton) {
managerSingleton = new AgentManager(loadAgentManagerLimitsFromEnv()); managerSingleton = new AgentManager({
maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
maxSessionAgents: config.agentManager.maxSessionAgents,
maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
});
} }
return managerSingleton; return managerSingleton;
} }
export function loadProvisioningConfigFromEnv(): BuiltInProvisioningConfigInput { export function loadProvisioningConfigFromEnv(
return { env: NodeJS.ProcessEnv = process.env,
gitWorktree: { ): BuiltInProvisioningConfigInput {
rootDirectory: readOptionalStringEnv( return toProvisioningConfig(loadConfig(env));
"AGENT_WORKTREE_ROOT",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.rootDirectory ?? ".ai_ops/worktrees",
),
baseRef: readOptionalStringEnv(
"AGENT_WORKTREE_BASE_REF",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.baseRef ?? "HEAD",
),
},
portRange: {
basePort: readIntegerEnv(
"AGENT_PORT_BASE",
DEFAULT_PROVISIONING_CONFIG.portRange?.basePort ?? 36000,
{ min: 1 },
),
blockSize: readIntegerEnv(
"AGENT_PORT_BLOCK_SIZE",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockSize ?? 32,
{ min: 1 },
),
blockCount: readIntegerEnv(
"AGENT_PORT_BLOCK_COUNT",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockCount ?? 512,
{ min: 1 },
),
primaryPortOffset: readIntegerEnv(
"AGENT_PORT_PRIMARY_OFFSET",
DEFAULT_PROVISIONING_CONFIG.portRange?.primaryPortOffset ?? 0,
{ min: 0 },
),
lockDirectory: readOptionalStringEnv(
"AGENT_PORT_LOCK_DIR",
DEFAULT_PROVISIONING_CONFIG.portRange?.lockDirectory ?? ".ai_ops/locks/ports",
),
},
};
} }
export function getResourceProvisioningOrchestrator(): ResourceProvisioningOrchestrator { export function getResourceProvisioningOrchestrator(
config: Readonly<AppConfig> = getConfig(),
): ResourceProvisioningOrchestrator {
if (!provisioningSingleton) { if (!provisioningSingleton) {
provisioningSingleton = createDefaultResourceProvisioningOrchestrator( provisioningSingleton = createDefaultResourceProvisioningOrchestrator(
loadProvisioningConfigFromEnv(), toProvisioningConfig(config),
); );
} }
return provisioningSingleton; return provisioningSingleton;

View File

@@ -1,5 +1,6 @@
import { mkdir, readFile, writeFile } from "node:fs/promises"; import { mkdir, readFile } from "node:fs/promises";
import { dirname, resolve } from "node:path"; import { dirname, resolve } from "node:path";
import { writeUtf8FileAtomic } from "./file-persistence.js";
import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js"; import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js";
export type SessionHistoryEntry = { export type SessionHistoryEntry = {
@@ -200,7 +201,7 @@ export class FileSystemStateContextManager {
async writeState(sessionId: string, state: StoredSessionState): Promise<void> { async writeState(sessionId: string, state: StoredSessionState): Promise<void> {
const path = toStatePath(this.rootDirectory, sessionId); const path = toStatePath(this.rootDirectory, sessionId);
await mkdir(dirname(path), { recursive: true }); await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(state, null, 2)}\n`, "utf8"); await writeUtf8FileAtomic(path, `${JSON.stringify(state, null, 2)}\n`);
} }
async patchState( async patchState(
@@ -248,7 +249,7 @@ export class FileSystemStateContextManager {
const path = toHandoffPath(this.rootDirectory, sessionId, handoff.nodeId); const path = toHandoffPath(this.rootDirectory, sessionId, handoff.nodeId);
await mkdir(dirname(path), { recursive: true }); await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(nodeHandoff, null, 2)}\n`, "utf8"); await writeUtf8FileAtomic(path, `${JSON.stringify(nodeHandoff, null, 2)}\n`);
return nodeHandoff; return nodeHandoff;
} }

277
src/config.ts Normal file
View File

@@ -0,0 +1,277 @@
import type { AgentManagerLimits } from "./agents/manager.js";
import type { BuiltInProvisioningConfig } from "./agents/provisioning.js";
export type ProviderRuntimeConfig = {
codexApiKey?: string;
openAiApiKey?: string;
openAiBaseUrl?: string;
codexSkipGitCheck: boolean;
anthropicApiKey?: string;
claudeModel?: string;
claudeCodePath?: string;
};
export type McpRuntimeConfig = {
configPath: string;
};
export type OrchestrationRuntimeConfig = {
stateRoot: string;
projectContextPath: string;
maxDepth: number;
maxRetries: number;
maxChildren: number;
};
export type DiscoveryRuntimeConfig = {
fileRelativePath: string;
};
export type AppConfig = {
provider: ProviderRuntimeConfig;
mcp: McpRuntimeConfig;
agentManager: AgentManagerLimits;
orchestration: OrchestrationRuntimeConfig;
provisioning: BuiltInProvisioningConfig;
discovery: DiscoveryRuntimeConfig;
};
const DEFAULT_AGENT_MANAGER: AgentManagerLimits = {
maxConcurrentAgents: 4,
maxSessionAgents: 2,
maxRecursiveDepth: 3,
};
const DEFAULT_ORCHESTRATION: OrchestrationRuntimeConfig = {
stateRoot: ".ai_ops/state",
projectContextPath: ".ai_ops/project-context.json",
maxDepth: 4,
maxRetries: 2,
maxChildren: 4,
};
const DEFAULT_PROVISIONING: BuiltInProvisioningConfig = {
gitWorktree: {
rootDirectory: ".ai_ops/worktrees",
baseRef: "HEAD",
},
portRange: {
basePort: 36000,
blockSize: 32,
blockCount: 512,
primaryPortOffset: 0,
lockDirectory: ".ai_ops/locks/ports",
},
};
const DEFAULT_DISCOVERY: DiscoveryRuntimeConfig = {
fileRelativePath: ".agent-context/resources.json",
};
function readOptionalString(
env: NodeJS.ProcessEnv,
key: string,
): string | undefined {
const value = env[key]?.trim();
if (!value) {
return undefined;
}
return value;
}
function readStringWithFallback(
env: NodeJS.ProcessEnv,
key: string,
fallback: string,
): string {
return readOptionalString(env, key) ?? fallback;
}
function readIntegerWithBounds(
env: NodeJS.ProcessEnv,
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const raw = env[key]?.trim();
if (!raw) {
return fallback;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed < bounds.min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(bounds.min)}.`);
}
return parsed;
}
function readBooleanWithFallback(
env: NodeJS.ProcessEnv,
key: string,
fallback: boolean,
): boolean {
const raw = env[key]?.trim();
if (!raw) {
return fallback;
}
if (raw === "true") {
return true;
}
if (raw === "false") {
return false;
}
throw new Error(`Environment variable ${key} must be "true" or "false".`);
}
function deepFreeze<T>(value: T): Readonly<T> {
if (value === null || typeof value !== "object") {
return value;
}
const record = value as Record<string, unknown>;
for (const nested of Object.values(record)) {
deepFreeze(nested);
}
return Object.freeze(value);
}
export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppConfig> {
const config: AppConfig = {
provider: {
codexApiKey: readOptionalString(env, "CODEX_API_KEY"),
openAiApiKey: readOptionalString(env, "OPENAI_API_KEY"),
openAiBaseUrl: readOptionalString(env, "OPENAI_BASE_URL"),
codexSkipGitCheck: readBooleanWithFallback(env, "CODEX_SKIP_GIT_CHECK", true),
anthropicApiKey: readOptionalString(env, "ANTHROPIC_API_KEY"),
claudeModel: readOptionalString(env, "CLAUDE_MODEL"),
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
},
mcp: {
configPath: readStringWithFallback(env, "MCP_CONFIG_PATH", "./mcp.config.json"),
},
agentManager: {
maxConcurrentAgents: readIntegerWithBounds(
env,
"AGENT_MAX_CONCURRENT",
DEFAULT_AGENT_MANAGER.maxConcurrentAgents,
{ min: 1 },
),
maxSessionAgents: readIntegerWithBounds(
env,
"AGENT_MAX_SESSION",
DEFAULT_AGENT_MANAGER.maxSessionAgents,
{ min: 1 },
),
maxRecursiveDepth: readIntegerWithBounds(
env,
"AGENT_MAX_RECURSIVE_DEPTH",
DEFAULT_AGENT_MANAGER.maxRecursiveDepth,
{ min: 1 },
),
},
orchestration: {
stateRoot: readStringWithFallback(
env,
"AGENT_STATE_ROOT",
DEFAULT_ORCHESTRATION.stateRoot,
),
projectContextPath: readStringWithFallback(
env,
"AGENT_PROJECT_CONTEXT_PATH",
DEFAULT_ORCHESTRATION.projectContextPath,
),
maxDepth: readIntegerWithBounds(
env,
"AGENT_TOPOLOGY_MAX_DEPTH",
DEFAULT_ORCHESTRATION.maxDepth,
{ min: 1 },
),
maxRetries: readIntegerWithBounds(
env,
"AGENT_TOPOLOGY_MAX_RETRIES",
DEFAULT_ORCHESTRATION.maxRetries,
{ min: 0 },
),
maxChildren: readIntegerWithBounds(
env,
"AGENT_RELATIONSHIP_MAX_CHILDREN",
DEFAULT_ORCHESTRATION.maxChildren,
{ min: 1 },
),
},
provisioning: {
gitWorktree: {
rootDirectory: readStringWithFallback(
env,
"AGENT_WORKTREE_ROOT",
DEFAULT_PROVISIONING.gitWorktree.rootDirectory,
),
baseRef: readStringWithFallback(
env,
"AGENT_WORKTREE_BASE_REF",
DEFAULT_PROVISIONING.gitWorktree.baseRef,
),
},
portRange: {
basePort: readIntegerWithBounds(
env,
"AGENT_PORT_BASE",
DEFAULT_PROVISIONING.portRange.basePort,
{ min: 1 },
),
blockSize: readIntegerWithBounds(
env,
"AGENT_PORT_BLOCK_SIZE",
DEFAULT_PROVISIONING.portRange.blockSize,
{ min: 1 },
),
blockCount: readIntegerWithBounds(
env,
"AGENT_PORT_BLOCK_COUNT",
DEFAULT_PROVISIONING.portRange.blockCount,
{ min: 1 },
),
primaryPortOffset: readIntegerWithBounds(
env,
"AGENT_PORT_PRIMARY_OFFSET",
DEFAULT_PROVISIONING.portRange.primaryPortOffset,
{ min: 0 },
),
lockDirectory: readStringWithFallback(
env,
"AGENT_PORT_LOCK_DIR",
DEFAULT_PROVISIONING.portRange.lockDirectory,
),
},
},
discovery: {
fileRelativePath: readStringWithFallback(
env,
"AGENT_DISCOVERY_FILE_RELATIVE_PATH",
DEFAULT_DISCOVERY.fileRelativePath,
),
},
};
return deepFreeze(config);
}
let configSingleton: Readonly<AppConfig> | undefined;
export function getConfig(): Readonly<AppConfig> {
if (!configSingleton) {
configSingleton = loadConfig(process.env);
}
return configSingleton;
}
export function clearConfigCacheForTests(): void {
configSingleton = undefined;
}

View File

@@ -1,63 +1,43 @@
import "dotenv/config"; import "dotenv/config";
import { query, type Options } from "@anthropic-ai/claude-agent-sdk"; import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
import { pathToFileURL } from "node:url"; import { pathToFileURL } from "node:url";
import { getAgentManager, getResourceProvisioningOrchestrator } from "../agents/runtime.js"; import { getConfig } from "../config.js";
import { loadMcpConfigFromEnv } from "../mcp.js"; import { createSessionContext } from "./session-context.js";
function requiredPrompt(argv: string[]): string { function requiredPrompt(argv: string[]): string {
const prompt = argv.slice(2).join(" ").trim(); const prompt = argv.slice(2).join(" ").trim();
if (!prompt) { if (!prompt) {
throw new Error("Usage: npm run claude -- \"your prompt\""); throw new Error('Usage: npm run claude -- "your prompt"');
} }
return prompt; return prompt;
} }
function buildOptions(): Options { function buildOptions(config = getConfig()): Options {
return { return {
maxTurns: 1, maxTurns: 1,
...(process.env.CLAUDE_MODEL ? { model: process.env.CLAUDE_MODEL } : {}), ...(config.provider.claudeModel ? { model: config.provider.claudeModel } : {}),
...(process.env.CLAUDE_CODE_PATH ...(config.provider.claudeCodePath
? { pathToClaudeCodeExecutable: process.env.CLAUDE_CODE_PATH } ? { pathToClaudeCodeExecutable: config.provider.claudeCodePath }
: {}), : {}),
}; };
} }
export async function runClaudePrompt(prompt: string): Promise<void> { export async function runClaudePrompt(prompt: string): Promise<void> {
const agentManager = getAgentManager(); const config = getConfig();
const agentSession = agentManager.createSession(); const sessionContext = await createSessionContext("claude", {
const resourceProvisioning = getResourceProvisioningOrchestrator();
const mcp = loadMcpConfigFromEnv({
providerHint: "claude",
prompt, prompt,
config,
}); });
let provisionedResources:
| Awaited<ReturnType<typeof resourceProvisioning.provisionSession>>
| undefined;
try { try {
provisionedResources = await resourceProvisioning.provisionSession({ const finalResponse = await sessionContext.runInSession(async () => {
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: process.env.AGENT_DISCOVERY_FILE_RELATIVE_PATH,
baseEnv: process.env,
});
const promptWithContext = provisionedResources.composePrompt(prompt, [
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const finalResponse = await agentSession.runAgent({
depth: 0,
run: async () => {
const session = query({ const session = query({
prompt: promptWithContext, prompt: sessionContext.promptWithContext,
options: { options: {
...buildOptions(), ...buildOptions(config),
...(mcp.claudeMcpServers ? { mcpServers: mcp.claudeMcpServers } : {}), ...(sessionContext.mcp.claudeMcpServers ? { mcpServers: sessionContext.mcp.claudeMcpServers } : {}),
cwd: runtimeInjection.workingDirectory, cwd: sessionContext.runtimeInjection.workingDirectory,
env: runtimeInjection.env, env: sessionContext.runtimeInjection.env,
}, },
}); });
@@ -85,15 +65,11 @@ export async function runClaudePrompt(prompt: string): Promise<void> {
} }
return result; return result;
},
}); });
console.log(finalResponse); console.log(finalResponse);
} finally { } finally {
if (provisionedResources) { await sessionContext.close();
await provisionedResources.release();
}
agentSession.close();
} }
} }

View File

@@ -1,67 +1,43 @@
import "dotenv/config"; import "dotenv/config";
import { Codex } from "@openai/codex-sdk"; import { Codex } from "@openai/codex-sdk";
import { pathToFileURL } from "node:url"; import { pathToFileURL } from "node:url";
import { getAgentManager, getResourceProvisioningOrchestrator } from "../agents/runtime.js"; import { getConfig } from "../config.js";
import { loadMcpConfigFromEnv } from "../mcp.js"; import { createSessionContext } from "./session-context.js";
function requiredPrompt(argv: string[]): string { function requiredPrompt(argv: string[]): string {
const prompt = argv.slice(2).join(" ").trim(); const prompt = argv.slice(2).join(" ").trim();
if (!prompt) { if (!prompt) {
throw new Error("Usage: npm run codex -- \"your prompt\""); throw new Error('Usage: npm run codex -- "your prompt"');
} }
return prompt; return prompt;
} }
export async function runCodexPrompt(prompt: string): Promise<void> { export async function runCodexPrompt(prompt: string): Promise<void> {
const agentManager = getAgentManager(); const config = getConfig();
const agentSession = agentManager.createSession(); const sessionContext = await createSessionContext("codex", {
const resourceProvisioning = getResourceProvisioningOrchestrator();
const apiKey = process.env.CODEX_API_KEY ?? process.env.OPENAI_API_KEY;
const mcp = loadMcpConfigFromEnv({
providerHint: "codex",
prompt, prompt,
config,
}); });
let provisionedResources:
| Awaited<ReturnType<typeof resourceProvisioning.provisionSession>>
| undefined;
try { try {
provisionedResources = await resourceProvisioning.provisionSession({ const apiKey = config.provider.codexApiKey ?? config.provider.openAiApiKey;
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: process.env.AGENT_DISCOVERY_FILE_RELATIVE_PATH,
baseEnv: process.env,
});
const codex = new Codex({ const codex = new Codex({
...(apiKey ? { apiKey } : {}), ...(apiKey ? { apiKey } : {}),
...(process.env.OPENAI_BASE_URL ? { baseUrl: process.env.OPENAI_BASE_URL } : {}), ...(config.provider.openAiBaseUrl ? { baseUrl: config.provider.openAiBaseUrl } : {}),
...(mcp.codexConfig ? { config: mcp.codexConfig } : {}), ...(sessionContext.mcp.codexConfig ? { config: sessionContext.mcp.codexConfig } : {}),
env: runtimeInjection.env, env: sessionContext.runtimeInjection.env,
}); });
const thread = codex.startThread({ const thread = codex.startThread({
workingDirectory: runtimeInjection.workingDirectory, workingDirectory: sessionContext.runtimeInjection.workingDirectory,
skipGitRepoCheck: process.env.CODEX_SKIP_GIT_CHECK !== "false", skipGitRepoCheck: config.provider.codexSkipGitCheck,
}); });
const promptWithContext = provisionedResources.composePrompt(prompt, [ const turn = await sessionContext.runInSession(() => thread.run(sessionContext.promptWithContext));
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const turn = await agentSession.runAgent({
depth: 0,
run: () => thread.run(promptWithContext),
});
console.log(turn.finalResponse.trim() || "(No response text returned)"); console.log(turn.finalResponse.trim() || "(No response text returned)");
} finally { } finally {
if (provisionedResources) { await sessionContext.close();
await provisionedResources.release();
}
agentSession.close();
} }
} }

View File

@@ -0,0 +1,105 @@
import { getConfig, type AppConfig } from "../config.js";
import type { AgentSession } from "../agents/manager.js";
import type { ProvisionedResources } from "../agents/provisioning.js";
import {
getAgentManager,
getResourceProvisioningOrchestrator,
} from "../agents/runtime.js";
import {
getDefaultMcpRegistry,
loadMcpConfigFromEnv,
type LoadedMcpConfig,
type McpRegistry,
} from "../mcp.js";
export type SessionProvider = "codex" | "claude";
export type SessionContext = {
provider: SessionProvider;
sessionId: string;
mcp: LoadedMcpConfig;
promptWithContext: string;
runtimeInjection: Awaited<ReturnType<ProvisionedResources["buildRuntimeInjection"]>>;
runInSession: <T>(run: () => Promise<T>) => Promise<T>;
close: () => Promise<void>;
};
export async function createSessionContext(
provider: SessionProvider,
input: {
prompt: string;
config?: Readonly<AppConfig>;
mcpRegistry?: McpRegistry;
},
): Promise<SessionContext> {
const config = input.config ?? getConfig();
const mcpRegistry = input.mcpRegistry ?? getDefaultMcpRegistry();
const agentManager = getAgentManager(config);
const agentSession = agentManager.createSession();
const resourceProvisioning = getResourceProvisioningOrchestrator(config);
let provisionedResources: ProvisionedResources | undefined;
let closed = false;
const close = async (): Promise<void> => {
if (closed) {
return;
}
closed = true;
if (provisionedResources) {
await provisionedResources.release();
}
agentSession.close();
};
try {
provisionedResources = await resourceProvisioning.provisionSession({
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: config.discovery.fileRelativePath,
baseEnv: process.env,
});
const promptWithContext = provisionedResources.composePrompt(input.prompt, [
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const mcp = loadMcpConfigFromEnv(
{
providerHint: provider,
prompt: input.prompt,
},
{
config,
registry: mcpRegistry,
},
);
return {
provider,
sessionId: agentSession.id,
mcp,
promptWithContext,
runtimeInjection,
runInSession: <T>(run: () => Promise<T>) =>
runWithAgentSession(agentSession, run),
close,
};
} catch (error) {
await close();
throw error;
}
}
async function runWithAgentSession<T>(agentSession: AgentSession, run: () => Promise<T>): Promise<T> {
return agentSession.runAgent({
depth: 0,
run,
});
}

View File

@@ -1,17 +1,18 @@
import { existsSync, readFileSync } from "node:fs"; import { existsSync, readFileSync } from "node:fs";
import { resolve } from "node:path"; import { resolve } from "node:path";
import type { CodexOptions } from "@openai/codex-sdk"; import type { CodexOptions } from "@openai/codex-sdk";
import { getConfig, type AppConfig } from "./config.js";
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
import { import {
createDefaultMcpRegistry,
createMcpHandlerShell, createMcpHandlerShell,
listMcpHandlers,
registerMcpHandler,
resolveServerWithHandler,
type McpHandlerBusinessLogic, type McpHandlerBusinessLogic,
type McpHandlerBusinessLogicInput, type McpHandlerBusinessLogicInput,
type McpHandlerInput, type McpHandlerInput,
type McpHandlerResult, type McpHandlerResult,
type McpHandlerShellOptions, type McpHandlerShellOptions,
type McpHandlerUtils, type McpHandlerUtils,
McpRegistry,
type McpServerHandler, type McpServerHandler,
} from "./mcp/handlers.js"; } from "./mcp/handlers.js";
import type { import type {
@@ -24,16 +25,15 @@ function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value); return typeof value === "object" && value !== null && !Array.isArray(value);
} }
function readConfigFile(pathFromEnv: string | undefined): { function readConfigFile(configPath: string): {
config?: SharedMcpConfigFile; config?: SharedMcpConfigFile;
sourcePath?: string; sourcePath?: string;
} { } {
const explicitPath = pathFromEnv?.trim(); const candidatePath = configPath.trim() || "./mcp.config.json";
const candidatePath = explicitPath || "./mcp.config.json";
const resolvedPath = resolve(process.cwd(), candidatePath); const resolvedPath = resolve(process.cwd(), candidatePath);
if (!existsSync(resolvedPath)) { if (!existsSync(resolvedPath)) {
if (explicitPath) { if (candidatePath !== "./mcp.config.json") {
throw new Error(`MCP config file not found: ${resolvedPath}`); throw new Error(`MCP config file not found: ${resolvedPath}`);
} }
return {}; return {};
@@ -45,11 +45,29 @@ function readConfigFile(pathFromEnv: string | undefined): {
throw new Error(`MCP config file must contain a JSON object: ${resolvedPath}`); throw new Error(`MCP config file must contain a JSON object: ${resolvedPath}`);
} }
return { config: parsed as SharedMcpConfigFile, sourcePath: resolvedPath }; return {
config: normalizeSharedMcpConfigFile(parsed as SharedMcpConfigFile),
sourcePath: resolvedPath,
};
} }
export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpConfig { const defaultMcpRegistry = createDefaultMcpRegistry();
const { config, sourcePath } = readConfigFile(process.env.MCP_CONFIG_PATH);
export function getDefaultMcpRegistry(): McpRegistry {
return defaultMcpRegistry;
}
export function loadMcpConfigFromEnv(
context: McpLoadContext = {},
options?: {
config?: Readonly<AppConfig>;
registry?: McpRegistry;
},
): LoadedMcpConfig {
const runtimeConfig = options?.config ?? getConfig();
const registry = options?.registry ?? defaultMcpRegistry;
const { config, sourcePath } = readConfigFile(runtimeConfig.mcp.configPath);
if (!config) { if (!config) {
return {}; return {};
} }
@@ -59,7 +77,7 @@ export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpCon
const resolvedHandlers: Record<string, string> = {}; const resolvedHandlers: Record<string, string> = {};
for (const [serverName, server] of Object.entries(config.servers ?? {})) { for (const [serverName, server] of Object.entries(config.servers ?? {})) {
const resolved = resolveServerWithHandler({ const resolved = registry.resolveServerWithHandler({
serverName, serverName,
server, server,
context, context,
@@ -102,7 +120,15 @@ export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpCon
}; };
} }
export { createMcpHandlerShell, listMcpHandlers, registerMcpHandler }; export function registerMcpHandler(handler: McpServerHandler): void {
defaultMcpRegistry.register(handler);
}
export function listMcpHandlers(): McpServerHandler[] {
return defaultMcpRegistry.listHandlers();
}
export { createDefaultMcpRegistry, createMcpHandlerShell, McpRegistry };
export type { export type {
LoadedMcpConfig, LoadedMcpConfig,
McpHandlerBusinessLogic, McpHandlerBusinessLogic,

View File

@@ -1,5 +1,42 @@
import type { McpServerConfig } from "@anthropic-ai/claude-agent-sdk"; import type { McpServerConfig } from "@anthropic-ai/claude-agent-sdk";
import type { CodexConfigObject, SharedMcpServer, Transport } from "./types.js"; import type {
CodexConfigObject,
SharedMcpConfigFile,
SharedMcpServer,
Transport,
} from "./types.js";
function mergeHeaders(server: SharedMcpServer): Record<string, string> | undefined {
const merged = {
...(server.http_headers ?? {}),
...(server.headers ?? {}),
};
return Object.keys(merged).length > 0 ? merged : undefined;
}
export function normalizeSharedMcpServer(server: SharedMcpServer): SharedMcpServer {
const { headers: _headers, http_headers: _httpHeaders, ...rest } = server;
const normalizedHeaders = mergeHeaders(server);
return {
...rest,
...(normalizedHeaders ? { headers: normalizedHeaders } : {}),
};
}
export function normalizeSharedMcpConfigFile(config: SharedMcpConfigFile): SharedMcpConfigFile {
const normalizedServers: Record<string, SharedMcpServer> = {};
for (const [serverName, server] of Object.entries(config.servers ?? {})) {
normalizedServers[serverName] = normalizeSharedMcpServer(server);
}
return {
...config,
...(Object.keys(normalizedServers).length > 0 ? { servers: normalizedServers } : {}),
};
}
export function inferTransport(server: SharedMcpServer): Transport { export function inferTransport(server: SharedMcpServer): Transport {
if (server.type) { if (server.type) {
@@ -10,6 +47,7 @@ export function inferTransport(server: SharedMcpServer): Transport {
export function toCodexServerConfig(serverName: string, server: SharedMcpServer): CodexConfigObject { export function toCodexServerConfig(serverName: string, server: SharedMcpServer): CodexConfigObject {
const type = inferTransport(server); const type = inferTransport(server);
const headers = mergeHeaders(server);
if (type === "stdio" && !server.command) { if (type === "stdio" && !server.command) {
throw new Error(`Shared MCP server "${serverName}" requires "command" for stdio transport.`); throw new Error(`Shared MCP server "${serverName}" requires "command" for stdio transport.`);
@@ -38,9 +76,8 @@ export function toCodexServerConfig(serverName: string, server: SharedMcpServer)
if (server.bearer_token_env_var) { if (server.bearer_token_env_var) {
config.bearer_token_env_var = server.bearer_token_env_var; config.bearer_token_env_var = server.bearer_token_env_var;
} }
const httpHeaders = server.http_headers ?? server.headers; if (headers) {
if (httpHeaders) { config.http_headers = headers;
config.http_headers = httpHeaders;
} }
if (server.env_http_headers) config.env_http_headers = server.env_http_headers; if (server.env_http_headers) config.env_http_headers = server.env_http_headers;
if (server.env_vars) config.env_vars = server.env_vars; if (server.env_vars) config.env_vars = server.env_vars;
@@ -50,6 +87,7 @@ export function toCodexServerConfig(serverName: string, server: SharedMcpServer)
export function toClaudeServerConfig(serverName: string, server: SharedMcpServer): McpServerConfig { export function toClaudeServerConfig(serverName: string, server: SharedMcpServer): McpServerConfig {
const type = inferTransport(server); const type = inferTransport(server);
const headers = mergeHeaders(server);
if (type === "stdio") { if (type === "stdio") {
if (!server.command) { if (!server.command) {
@@ -70,7 +108,6 @@ export function toClaudeServerConfig(serverName: string, server: SharedMcpServer
return { return {
type, type,
url: server.url, url: server.url,
...(server.headers ? { headers: server.headers } : {}), ...(headers ? { headers } : {}),
}; };
} }

View File

@@ -127,95 +127,74 @@ function applyEnabledByDefault(input: McpHandlerBusinessLogicInput): McpHandlerR
: input.baseResult; : input.baseResult;
} }
const context7Handler = createMcpHandlerShell({ function createBuiltinHandlers(): McpServerHandler[] {
const context7Handler = createMcpHandlerShell({
id: "context7", id: "context7",
description: description:
"Dedicated extension point for Context7 policy/behavior. Business logic belongs in applyBusinessLogic.", "Dedicated extension point for Context7 policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) => isNamedLike(input, ["context7"]), matches: (input) => isNamedLike(input, ["context7"]),
applyBusinessLogic: applyEnabledByDefault, applyBusinessLogic: applyEnabledByDefault,
}); });
const claudeTaskMasterHandler = createMcpHandlerShell({ const claudeTaskMasterHandler = createMcpHandlerShell({
id: "claude-task-master", id: "claude-task-master",
description: description:
"Dedicated extension point for Claude Task Master policy/behavior. Business logic belongs in applyBusinessLogic.", "Dedicated extension point for Claude Task Master policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) => matches: (input) =>
isNamedLike(input, ["claude-task-master", "task-master", "taskmaster"]), isNamedLike(input, ["claude-task-master", "task-master", "taskmaster"]),
applyBusinessLogic: applyEnabledByDefault, applyBusinessLogic: applyEnabledByDefault,
}); });
const genericHandler: McpServerHandler = { const genericHandler: McpServerHandler = {
id: "generic", id: "generic",
description: "Default passthrough mapping for project-specific MCP servers.", description: "Default passthrough mapping for project-specific MCP servers.",
matches: () => true, matches: () => true,
resolve: ({ serverName, server, utils: localUtils }) => resolve: ({ serverName, server, utils: localUtils }) =>
createDefaultResult({ serverName, server, localUtils }), createDefaultResult({ serverName, server, localUtils }),
}; };
const handlerRegistry = new Map<string, McpServerHandler>(); return [context7Handler, claudeTaskMasterHandler, genericHandler];
const handlerOrder: string[] = [];
function installBuiltinHandlers(): void {
registerMcpHandler(context7Handler);
registerMcpHandler(claudeTaskMasterHandler);
registerMcpHandler(genericHandler);
} }
export function registerMcpHandler(handler: McpServerHandler): void { export class McpRegistry {
if (handlerRegistry.has(handler.id)) { private readonly handlerRegistry = new Map<string, McpServerHandler>();
handlerRegistry.set(handler.id, handler); private readonly handlerOrder: string[] = [];
constructor(input?: { handlers?: McpServerHandler[] }) {
for (const handler of input?.handlers ?? []) {
this.register(handler);
}
}
register(handler: McpServerHandler): void {
if (this.handlerRegistry.has(handler.id)) {
this.handlerRegistry.set(handler.id, handler);
return; return;
} }
handlerRegistry.set(handler.id, handler);
handlerOrder.push(handler.id);
}
export function listMcpHandlers(): McpServerHandler[] { this.handlerRegistry.set(handler.id, handler);
return handlerOrder this.handlerOrder.push(handler.id);
.map((id) => handlerRegistry.get(id)) }
listHandlers(): McpServerHandler[] {
return this.handlerOrder
.map((id) => this.handlerRegistry.get(id))
.filter((handler): handler is McpServerHandler => Boolean(handler)); .filter((handler): handler is McpServerHandler => Boolean(handler));
}
function resolveHandler(serverName: string, server: SharedMcpServer): McpServerHandler {
if (server.handler) {
const explicit = handlerRegistry.get(server.handler);
if (!explicit) {
throw new Error(
`Unknown MCP handler "${server.handler}" configured for server "${serverName}".`,
);
}
return explicit;
} }
for (const id of handlerOrder) { resolveServerWithHandler(input: {
const handler = handlerRegistry.get(id);
if (!handler || id === "generic") {
continue;
}
if (handler.matches({ serverName, server })) {
return handler;
}
}
const fallback = handlerRegistry.get("generic");
if (!fallback) {
throw new Error('No MCP fallback handler registered. Expected handler id "generic".');
}
return fallback;
}
export function resolveServerWithHandler(input: {
serverName: string; serverName: string;
server: SharedMcpServer; server: SharedMcpServer;
context: McpLoadContext; context: McpLoadContext;
fullConfig: SharedMcpConfigFile; fullConfig: SharedMcpConfigFile;
}): McpHandlerResult & { handlerId: string } { }): McpHandlerResult & { handlerId: string } {
const { serverName, server, context, fullConfig } = input; const { serverName, server, context, fullConfig } = input;
const handler = resolveHandler(serverName, server); const handler = this.resolveHandler(serverName, server);
const handlerConfig = { const handlerConfig = {
...(fullConfig.handlerSettings?.[handler.id] ?? {}), ...(fullConfig.handlerSettings?.[handler.id] ?? {}),
...(server.handlerOptions ?? {}), ...(server.handlerOptions ?? {}),
}; };
const result = handler.resolve({ const result = handler.resolve({
serverName, serverName,
server, server,
@@ -224,10 +203,45 @@ export function resolveServerWithHandler(input: {
fullConfig, fullConfig,
utils, utils,
}); });
return { return {
...result, ...result,
handlerId: handler.id, handlerId: handler.id,
}; };
}
private resolveHandler(serverName: string, server: SharedMcpServer): McpServerHandler {
if (server.handler) {
const explicit = this.handlerRegistry.get(server.handler);
if (!explicit) {
throw new Error(
`Unknown MCP handler "${server.handler}" configured for server "${serverName}".`,
);
}
return explicit;
}
for (const id of this.handlerOrder) {
const handler = this.handlerRegistry.get(id);
if (!handler || id === "generic") {
continue;
}
if (handler.matches({ serverName, server })) {
return handler;
}
}
const fallback = this.handlerRegistry.get("generic");
if (!fallback) {
throw new Error('No MCP fallback handler registered. Expected handler id "generic".');
}
return fallback;
}
} }
installBuiltinHandlers(); export function createDefaultMcpRegistry(): McpRegistry {
return new McpRegistry({
handlers: createBuiltinHandlers(),
});
}

22
tests/config.test.ts Normal file
View File

@@ -0,0 +1,22 @@
import test from "node:test";
import assert from "node:assert/strict";
import { loadConfig } from "../src/config.js";
test("loads defaults and freezes config", () => {
const config = loadConfig({});
assert.equal(config.agentManager.maxConcurrentAgents, 4);
assert.equal(config.orchestration.maxDepth, 4);
assert.equal(config.provisioning.portRange.basePort, 36000);
assert.equal(config.discovery.fileRelativePath, ".agent-context/resources.json");
assert.equal(Object.isFrozen(config), true);
assert.equal(Object.isFrozen(config.orchestration), true);
});
test("validates boolean env values", () => {
assert.throws(
() => loadConfig({ CODEX_SKIP_GIT_CHECK: "maybe" }),
/must be "true" or "false"/,
);
});

View File

@@ -2,6 +2,7 @@ import test from "node:test";
import assert from "node:assert/strict"; import assert from "node:assert/strict";
import { import {
inferTransport, inferTransport,
normalizeSharedMcpServer,
toClaudeServerConfig, toClaudeServerConfig,
toCodexServerConfig, toCodexServerConfig,
} from "../src/mcp/converters.js"; } from "../src/mcp/converters.js";
@@ -41,6 +42,42 @@ test("maps shared headers to codex http_headers", () => {
}); });
}); });
test("normalizes header aliases into a single headers object", () => {
const normalized = normalizeSharedMcpServer({
url: "http://localhost:3000/mcp",
http_headers: {
"X-Source": "legacy",
},
headers: {
Authorization: "Bearer token",
},
});
assert.deepEqual(normalized.headers, {
"X-Source": "legacy",
Authorization: "Bearer token",
});
assert.equal("http_headers" in normalized, false);
});
test("maps legacy http_headers alias for claude conversion", () => {
const claudeConfig = toClaudeServerConfig("legacy-http-headers", {
type: "http",
url: "http://localhost:3000/mcp",
http_headers: {
Authorization: "Bearer token",
},
});
assert.deepEqual(claudeConfig, {
type: "http",
url: "http://localhost:3000/mcp",
headers: {
Authorization: "Bearer token",
},
});
});
test("throws for claude http server without url", () => { test("throws for claude http server without url", () => {
assert.throws( assert.throws(
() => toClaudeServerConfig("bad-http", { type: "http" }), () => toClaudeServerConfig("bad-http", { type: "http" }),

View File

@@ -0,0 +1,65 @@
import test from "node:test";
import assert from "node:assert/strict";
import {
McpRegistry,
createDefaultMcpRegistry,
createMcpHandlerShell,
} from "../src/mcp/handlers.js";
test("mcp registries are isolated instances", () => {
const registryA = createDefaultMcpRegistry();
const registryB = createDefaultMcpRegistry();
registryA.register(
createMcpHandlerShell({
id: "custom-a",
description: "custom handler",
matches: () => false,
}),
);
assert.equal(registryA.listHandlers().some((handler) => handler.id === "custom-a"), true);
assert.equal(registryB.listHandlers().some((handler) => handler.id === "custom-a"), false);
});
test("mcp registry resolves generic fallback by default", () => {
const registry = createDefaultMcpRegistry();
const resolved = registry.resolveServerWithHandler({
serverName: "local-files",
server: {
type: "stdio",
command: "npx",
args: ["-y", "@modelcontextprotocol/server-filesystem", "."],
},
context: {},
fullConfig: {
servers: {},
},
});
assert.equal(resolved.handlerId, "generic");
assert.ok(resolved.codex);
assert.ok(resolved.claude);
});
test("mcp registry rejects unknown explicit handlers", () => {
const registry = new McpRegistry();
assert.throws(
() =>
registry.resolveServerWithHandler({
serverName: "broken",
server: {
type: "http",
url: "http://localhost:3000/mcp",
handler: "missing-handler",
},
context: {},
fullConfig: {
servers: {},
},
}),
/Unknown MCP handler/,
);
});

View File

@@ -230,6 +230,7 @@ test("runs DAG pipeline with state-dependent routing and retry behavior", async
task: "Implement pipeline", task: "Implement pipeline",
}, },
}); });
assert.equal(result.status, "success");
assert.deepEqual( assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}:${String(record.attempt)}`), result.records.map((record) => `${record.nodeId}:${record.status}:${String(record.attempt)}`),
@@ -471,6 +472,7 @@ test("runs parallel topology blocks concurrently and routes via domain-event edg
const result = await runPromise; const result = await runPromise;
assert.equal(maxConcurrentCoders, 2); assert.equal(maxConcurrentCoders, 2);
assert.equal(result.status, "success");
assert.deepEqual( assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}`), result.records.map((record) => `${record.nodeId}:${record.status}`),
["plan:success", "code-a:success", "code-b:success", "integrate:success"], ["plan:success", "code-a:success", "code-b:success", "integrate:success"],
@@ -577,6 +579,96 @@ test("fails fast after two sequential hard failures", async () => {
); );
}); });
test("marks aggregate status as failure when a terminal node fails", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "coder",
displayName: "Coder",
systemPromptTemplate: "Coder",
toolClearance: {
allowlist: [],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 3,
maxRetries: 0,
},
pipeline: {
entryNodeId: "build",
nodes: [
{
id: "build",
actorId: "build_actor",
personaId: "coder",
},
{
id: "verify",
actorId: "verify_actor",
personaId: "coder",
},
],
edges: [
{
from: "build",
to: "verify",
on: "success",
},
],
},
} as const;
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
maxDepth: 3,
maxRetries: 0,
maxChildren: 2,
runtimeContext: {},
},
actorExecutors: {
build_actor: async () => ({
status: "success",
payload: {
step: "build",
},
}),
verify_actor: async () => ({
status: "failure",
payload: {
error: "verification failed",
},
failureKind: "soft",
}),
},
});
const result = await engine.runSession({
sessionId: "session-terminal-failure",
initialPayload: {
task: "Aggregate failure status",
},
});
assert.equal(result.status, "failure");
assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}`),
["build:success", "verify:failure"],
);
});
test("propagates abort signal into actor execution and stops the run", async () => { test("propagates abort signal into actor execution and stops the run", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-")); const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-")); const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));

View File

@@ -1,6 +1,6 @@
import test from "node:test"; import test from "node:test";
import assert from "node:assert/strict"; import assert from "node:assert/strict";
import { mkdtemp } from "node:fs/promises"; import { mkdtemp, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os"; import { tmpdir } from "node:os";
import { resolve } from "node:path"; import { resolve } from "node:path";
import { FileSystemProjectContextStore } from "../src/agents/project-context.js"; import { FileSystemProjectContextStore } from "../src/agents/project-context.js";
@@ -13,6 +13,7 @@ test("project context store reads defaults and applies domain patches", async ()
const initial = await store.readState(); const initial = await store.readState();
assert.deepEqual(initial, { assert.deepEqual(initial, {
schemaVersion: 1,
globalFlags: {}, globalFlags: {},
artifactPointers: {}, artifactPointers: {},
taskQueue: [], taskQueue: [],
@@ -55,4 +56,35 @@ test("project context store reads defaults and applies domain patches", async ()
updated.taskQueue.map((task) => `${task.id}:${task.status}`), updated.taskQueue.map((task) => `${task.id}:${task.status}`),
["task-1:in_progress", "task-2:pending"], ["task-1:in_progress", "task-2:pending"],
); );
assert.equal(updated.schemaVersion, 1);
});
test("project context parser merges missing root keys with defaults", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
const filePath = resolve(root, "project-context.json");
const store = new FileSystemProjectContextStore({ filePath });
await writeFile(
filePath,
`${JSON.stringify(
{
taskQueue: [
{
id: "task-1",
title: "Migrate",
status: "pending",
},
],
},
null,
2,
)}\n`,
"utf8",
);
const state = await store.readState();
assert.equal(state.schemaVersion, 1);
assert.deepEqual(state.globalFlags, {});
assert.deepEqual(state.artifactPointers, {});
assert.equal(state.taskQueue[0]?.id, "task-1");
}); });