Refactor pipeline policies, MCP registry, and unified config/runtime

This commit is contained in:
2026-02-23 13:56:45 -05:00
parent 889087daa1
commit 9b4216dda9
22 changed files with 1441 additions and 587 deletions

View File

@@ -0,0 +1,66 @@
import type { ActorExecutionResult, ActorFailureKind } from "./pipeline.js";
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function containsHardFailureSignal(value: string): boolean {
return /(timeout|timed out|network|econnreset|econnrefused|enotfound|403|forbidden)/i.test(value);
}
function toFailureCodeFromError(error: unknown): string | undefined {
if (!(error instanceof Error)) {
return undefined;
}
const maybeCode = (error as NodeJS.ErrnoException).code;
return typeof maybeCode === "string" ? maybeCode : undefined;
}
export class FailurePolicy {
isHardFailure(result: ActorExecutionResult): boolean {
if (result.failureKind === "hard") {
return true;
}
if (result.status !== "failure") {
return false;
}
const payloadText = (() => {
const message = result.payload?.error;
return typeof message === "string" ? message : "";
})();
const codeText = result.failureCode ?? "";
return containsHardFailureSignal(`${codeText} ${payloadText}`);
}
classifyFailureFromError(error: unknown): {
payloadErrorMessage: string;
failureCode?: string;
failureKind: ActorFailureKind;
} {
const message = toErrorMessage(error);
const failureCode = toFailureCodeFromError(error);
const failureKind = containsHardFailureSignal(`${failureCode ?? ""} ${message}`)
? "hard"
: "soft";
return {
payloadErrorMessage: message,
...(failureCode ? { failureCode } : {}),
failureKind,
};
}
shouldAbortAfterSequentialHardFailures(
sequentialHardFailureCount: number,
threshold: number,
): boolean {
return sequentialHardFailureCount >= threshold;
}
}

View File

@@ -0,0 +1,105 @@
import { randomUUID } from "node:crypto";
import { mkdir, open, rename, stat, unlink, writeFile } from "node:fs/promises";
import { basename, dirname, resolve } from "node:path";
function sleep(ms: number): Promise<void> {
return new Promise((resolveSleep) => {
setTimeout(resolveSleep, ms);
});
}
async function cleanupFile(path: string): Promise<void> {
try {
await unlink(path);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
export async function writeUtf8FileAtomic(path: string, content: string): Promise<void> {
const directory = dirname(path);
await mkdir(directory, { recursive: true });
const tempFileName = `.${basename(path)}.${String(process.pid)}.${randomUUID()}.tmp`;
const tempPath = resolve(directory, tempFileName);
try {
await writeFile(tempPath, content, "utf8");
await rename(tempPath, path);
} catch (error) {
await cleanupFile(tempPath);
throw error;
}
}
async function tryAcquireFileLock(
lockPath: string,
): Promise<Awaited<ReturnType<typeof open>> | undefined> {
try {
const handle = await open(lockPath, "wx");
await handle.writeFile(`${JSON.stringify({ pid: process.pid, acquiredAt: new Date().toISOString() })}\n`);
return handle;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "EEXIST") {
return undefined;
}
throw error;
}
}
async function clearStaleLock(lockPath: string, staleAfterMs: number): Promise<void> {
try {
const stats = await stat(lockPath);
const ageMs = Date.now() - stats.mtimeMs;
if (ageMs <= staleAfterMs) {
return;
}
await cleanupFile(lockPath);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
export async function withFileLock<T>(
lockPath: string,
operation: () => Promise<T>,
options?: {
maxWaitMs?: number;
retryDelayMs?: number;
staleAfterMs?: number;
},
): Promise<T> {
const maxWaitMs = options?.maxWaitMs ?? 5000;
const retryDelayMs = options?.retryDelayMs ?? 25;
const staleAfterMs = options?.staleAfterMs ?? 30_000;
await mkdir(dirname(lockPath), { recursive: true });
const startedAt = Date.now();
// Busy-wait with bounded retries to coordinate concurrent writers across processes.
while (true) {
const handle = await tryAcquireFileLock(lockPath);
if (handle) {
try {
return await operation();
} finally {
await handle.close();
await cleanupFile(lockPath);
}
}
await clearStaleLock(lockPath, staleAfterMs);
if (Date.now() - startedAt >= maxWaitMs) {
throw new Error(`Timed out waiting for file lock: ${lockPath}`);
}
await sleep(retryDelayMs);
}
}

View File

@@ -0,0 +1,126 @@
import {
DomainEventBus,
type DomainEvent,
type DomainEventType,
} from "./domain-events.js";
import type { PipelineNode } from "./manifest.js";
import { type ProjectContextPatch, type FileSystemProjectContextStore } from "./project-context.js";
import { PersonaRegistry } from "./persona-registry.js";
import {
FileSystemStateContextManager,
type SessionHistoryEntry,
} from "./state-context.js";
import type { ActorExecutionResult, ActorResultStatus } from "./pipeline.js";
export type PipelineNodeAttemptObservedEvent = {
sessionId: string;
node: PipelineNode;
attempt: number;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
};
function toBehaviorEvent(status: ActorResultStatus): "onTaskComplete" | "onValidationFail" | undefined {
if (status === "success") {
return "onTaskComplete";
}
if (status === "validation_fail") {
return "onValidationFail";
}
return undefined;
}
export interface PipelineLifecycleObserver {
onNodeAttempt(event: PipelineNodeAttemptObservedEvent): Promise<void>;
}
export class PersistenceLifecycleObserver implements PipelineLifecycleObserver {
constructor(
private readonly input: {
personaRegistry: PersonaRegistry;
stateManager: FileSystemStateContextManager;
projectContextStore: FileSystemProjectContextStore;
domainEventBus?: DomainEventBus;
},
) {}
async onNodeAttempt(event: PipelineNodeAttemptObservedEvent): Promise<void> {
const behaviorEvent = toBehaviorEvent(event.result.status);
const behaviorPatch = behaviorEvent
? await this.input.personaRegistry.emitBehaviorEvent({
personaId: event.node.personaId,
event: behaviorEvent,
sessionId: event.sessionId,
nodeId: event.node.id,
payload: event.result.payload ?? {},
})
: {};
const legacyHistoryEvent: SessionHistoryEntry = {
nodeId: event.node.id,
event: event.result.status,
timestamp: new Date().toISOString(),
...(event.result.payload ? { data: event.result.payload } : {}),
};
const domainHistoryEvents: SessionHistoryEntry[] = event.domainEvents.map((domainEvent) => ({
nodeId: event.node.id,
event: domainEvent.type,
timestamp: domainEvent.timestamp,
data: {
source: domainEvent.source,
attempt: domainEvent.attempt,
...(domainEvent.payload.summary ? { summary: domainEvent.payload.summary } : {}),
...(domainEvent.payload.errorCode ? { errorCode: domainEvent.payload.errorCode } : {}),
...(domainEvent.payload.artifactPointer
? { artifactPointer: domainEvent.payload.artifactPointer }
: {}),
...(domainEvent.payload.details ? { details: domainEvent.payload.details } : {}),
},
}));
await this.input.stateManager.patchState(event.sessionId, {
...(event.result.stateFlags ? { flags: event.result.stateFlags } : {}),
metadata: {
...(event.result.stateMetadata ?? {}),
...behaviorPatch,
},
historyEvent: legacyHistoryEvent,
historyEvents: domainHistoryEvents,
});
const domainEventBus = this.input.domainEventBus;
if (domainEventBus) {
for (const domainEvent of event.domainEvents) {
await domainEventBus.publish(domainEvent);
}
}
const patch: ProjectContextPatch = {
...(event.result.projectContextPatch ?? {}),
artifactPointers: {
[`sessions/${event.sessionId}/last_completed_node`]: event.node.id,
[`sessions/${event.sessionId}/last_attempt`]: String(event.attempt),
...(event.result.projectContextPatch?.artifactPointers ?? {}),
},
};
await this.input.projectContextStore.patchState(patch);
}
}
export class DomainEventCollector {
private readonly events: DomainEvent[] = [];
record(events: DomainEvent[]): void {
this.events.push(...events);
}
toEventTypes(events: DomainEvent[]): DomainEventType[] {
return events.map((event) => event.type);
}
getAll(): DomainEvent[] {
return [...this.events];
}
}

View File

@@ -1,4 +1,6 @@
import { resolve } from "node:path";
import { getConfig, loadConfig, type AppConfig } from "../config.js";
import { createDefaultMcpRegistry, McpRegistry } from "../mcp.js";
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
import { AgentManager } from "./manager.js";
import {
@@ -8,7 +10,6 @@ import {
} from "./persona-registry.js";
import { PipelineExecutor, type ActorExecutor, type PipelineRunSummary } from "./pipeline.js";
import { FileSystemProjectContextStore } from "./project-context.js";
import { loadAgentManagerLimitsFromEnv } from "./runtime.js";
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
import type { JsonObject } from "./types.js";
@@ -26,59 +27,16 @@ export type BehaviorHandlerRegistry = Partial<
Record<string, Partial<Record<PersonaBehaviorEvent, PersonaBehaviorHandler>>>
>;
function readOptionalIntegerEnv(
key:
| "AGENT_TOPOLOGY_MAX_DEPTH"
| "AGENT_TOPOLOGY_MAX_RETRIES"
| "AGENT_RELATIONSHIP_MAX_CHILDREN",
fallback: number,
min: number,
): number {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed < min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(min)}.`);
}
return parsed;
}
function readOptionalStringEnv(key: "AGENT_STATE_ROOT", fallback: string): string {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
return raw;
}
function readOptionalProjectContextPathEnv(
key: "AGENT_PROJECT_CONTEXT_PATH",
fallback: string,
): string {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
return raw;
}
export function loadOrchestrationSettingsFromEnv(): Omit<
OrchestrationSettings,
"workspaceRoot" | "runtimeContext"
> {
export function loadOrchestrationSettingsFromEnv(
env: NodeJS.ProcessEnv = process.env,
): Omit<OrchestrationSettings, "workspaceRoot" | "runtimeContext"> {
const config = loadConfig(env);
return {
stateRoot: readOptionalStringEnv("AGENT_STATE_ROOT", ".ai_ops/state"),
projectContextPath: readOptionalProjectContextPathEnv(
"AGENT_PROJECT_CONTEXT_PATH",
".ai_ops/project-context.json",
),
maxDepth: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_DEPTH", 4, 1),
maxRetries: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_RETRIES", 2, 0),
maxChildren: readOptionalIntegerEnv("AGENT_RELATIONSHIP_MAX_CHILDREN", 4, 1),
stateRoot: config.orchestration.stateRoot,
projectContextPath: config.orchestration.projectContextPath,
maxDepth: config.orchestration.maxDepth,
maxRetries: config.orchestration.maxRetries,
maxChildren: config.orchestration.maxChildren,
};
}
@@ -115,6 +73,7 @@ export class SchemaDrivenExecutionEngine {
private readonly settings: OrchestrationSettings;
private readonly childrenByParent: Map<string, AgentManifest["relationships"]>;
private readonly manager: AgentManager;
private readonly mcpRegistry: McpRegistry;
constructor(input: {
manifest: AgentManifest | unknown;
@@ -125,17 +84,21 @@ export class SchemaDrivenExecutionEngine {
runtimeContext?: Record<string, string | number | boolean>;
};
manager?: AgentManager;
mcpRegistry?: McpRegistry;
config?: Readonly<AppConfig>;
}) {
this.manifest = parseAgentManifest(input.manifest);
const defaults = loadOrchestrationSettingsFromEnv();
const config = input.config ?? getConfig();
this.settings = {
workspaceRoot: resolve(input.settings?.workspaceRoot ?? process.cwd()),
stateRoot: resolve(input.settings?.stateRoot ?? defaults.stateRoot),
projectContextPath: resolve(input.settings?.projectContextPath ?? defaults.projectContextPath),
maxDepth: input.settings?.maxDepth ?? defaults.maxDepth,
maxRetries: input.settings?.maxRetries ?? defaults.maxRetries,
maxChildren: input.settings?.maxChildren ?? defaults.maxChildren,
stateRoot: resolve(input.settings?.stateRoot ?? config.orchestration.stateRoot),
projectContextPath: resolve(
input.settings?.projectContextPath ?? config.orchestration.projectContextPath,
),
maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
runtimeContext: {
...(input.settings?.runtimeContext ?? {}),
},
@@ -149,7 +112,14 @@ export class SchemaDrivenExecutionEngine {
});
this.actorExecutors = toExecutorMap(input.actorExecutors);
this.manager = input.manager ?? new AgentManager(loadAgentManagerLimitsFromEnv());
this.manager =
input.manager ??
new AgentManager({
maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
maxSessionAgents: config.agentManager.maxSessionAgents,
maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
});
this.mcpRegistry = input.mcpRegistry ?? createDefaultMcpRegistry();
for (const persona of this.manifest.personas) {
this.personaRegistry.register({
@@ -227,6 +197,7 @@ export class SchemaDrivenExecutionEngine {
manager: this.manager,
managerSessionId,
projectContextStore: this.projectContextStore,
mcpRegistry: this.mcpRegistry,
},
);
try {

View File

@@ -9,14 +9,19 @@ import {
type DomainEventPayload,
type DomainEventType,
} from "./domain-events.js";
import { FailurePolicy } from "./failure-policy.js";
import {
PersistenceLifecycleObserver,
type PipelineLifecycleObserver,
} from "./lifecycle-observer.js";
import type { AgentManifest, PipelineEdge, PipelineNode, RouteCondition } from "./manifest.js";
import type { AgentManager, RecursiveChildIntent } from "./manager.js";
import type { McpRegistry } from "../mcp/handlers.js";
import { PersonaRegistry } from "./persona-registry.js";
import { type ProjectContextPatch, type FileSystemProjectContextStore } from "./project-context.js";
import {
FileSystemStateContextManager,
type NodeExecutionContext,
type SessionHistoryEntry,
type StoredSessionState,
} from "./state-context.js";
import type { JsonObject } from "./types.js";
@@ -59,11 +64,14 @@ export type PipelineExecutionRecord = {
export type PipelineRunSummary = {
sessionId: string;
status: PipelineAggregateStatus;
records: PipelineExecutionRecord[];
events: DomainEvent[];
finalState: StoredSessionState;
};
export type PipelineAggregateStatus = "success" | "failure";
export type PipelineExecutorOptions = {
workspaceRoot: string;
runtimeContext: Record<string, string | number | boolean>;
@@ -72,6 +80,10 @@ export type PipelineExecutorOptions = {
manager: AgentManager;
managerSessionId: string;
projectContextStore: FileSystemProjectContextStore;
mcpRegistry: McpRegistry;
failurePolicy?: FailurePolicy;
lifecycleObserver?: PipelineLifecycleObserver;
hardFailureThreshold?: number;
};
type QueueItem = {
@@ -110,16 +122,6 @@ type NodeExecutionOutcome = {
hardFailureAttempts: boolean[];
};
function toBehaviorEvent(status: ActorResultStatus): "onTaskComplete" | "onValidationFail" | undefined {
if (status === "success") {
return "onTaskComplete";
}
if (status === "validation_fail") {
return "onValidationFail";
}
return undefined;
}
function shouldEdgeRun(
edge: PipelineEdge,
status: ActorResultStatus,
@@ -228,51 +230,6 @@ function toAbortError(signal: AbortSignal): Error {
return error;
}
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function toErrorPayload(error: unknown): JsonObject {
const errorMessage = toErrorMessage(error);
return {
error: errorMessage,
};
}
function toFailureCodeFromError(error: unknown): string | undefined {
if (!(error instanceof Error)) {
return undefined;
}
const maybeCode = (error as NodeJS.ErrnoException).code;
return typeof maybeCode === "string" ? maybeCode : undefined;
}
function containsHardFailureSignal(value: string): boolean {
return /(timeout|timed out|network|econnreset|econnrefused|enotfound|403|forbidden)/i.test(value);
}
function inferHardFailure(result: ActorExecutionResult): boolean {
if (result.failureKind === "hard") {
return true;
}
if (result.status !== "failure") {
return false;
}
const payloadText = (() => {
const message = result.payload?.error;
return typeof message === "string" ? message : "";
})();
const codeText = result.failureCode ?? "";
return containsHardFailureSignal(`${codeText} ${payloadText}`);
}
function defaultEventPayloadForStatus(status: ActorResultStatus): DomainEventPayload {
if (status === "success") {
return {
@@ -323,6 +280,9 @@ export class PipelineExecutor {
private readonly nodeById = new Map<string, PipelineNode>();
private readonly edgesBySource = new Map<string, PipelineEdge[]>();
private readonly domainEventBus = new DomainEventBus();
private readonly failurePolicy: FailurePolicy;
private readonly lifecycleObserver: PipelineLifecycleObserver;
private readonly hardFailureThreshold: number;
private managerRunCounter = 0;
constructor(
@@ -332,6 +292,17 @@ export class PipelineExecutor {
private readonly actorExecutors: ReadonlyMap<string, ActorExecutor>,
private readonly options: PipelineExecutorOptions,
) {
this.failurePolicy = options.failurePolicy ?? new FailurePolicy();
this.hardFailureThreshold = options.hardFailureThreshold ?? 2;
this.lifecycleObserver =
options.lifecycleObserver ??
new PersistenceLifecycleObserver({
personaRegistry: this.personaRegistry,
stateManager: this.stateManager,
projectContextStore: this.options.projectContextStore,
domainEventBus: this.domainEventBus,
});
for (const node of manifest.pipeline.nodes) {
this.nodeById.set(node.id, node);
}
@@ -418,9 +389,14 @@ export class PipelineExecutor {
sequentialHardFailures = 0;
}
if (sequentialHardFailures >= 2) {
if (
this.failurePolicy.shouldAbortAfterSequentialHardFailures(
sequentialHardFailures,
this.hardFailureThreshold,
)
) {
throw new Error(
"Hard failure threshold reached (>=2 sequential API/network/403 failures). Pipeline aborted.",
`Hard failure threshold reached (>=${String(this.hardFailureThreshold)} sequential API/network/403 failures). Pipeline aborted.`,
);
}
}
@@ -461,7 +437,8 @@ export class PipelineExecutor {
}
const finalState = await this.stateManager.readState(input.sessionId);
if (records.length > 0 && records[records.length - 1]?.status === "success") {
const status = this.computeAggregateStatus(records);
if (status === "success") {
await this.options.projectContextStore.patchState({
artifactPointers: {
[`sessions/${input.sessionId}/final_state`]: this.stateManager.getSessionStatePath(input.sessionId),
@@ -471,12 +448,39 @@ export class PipelineExecutor {
return {
sessionId: input.sessionId,
status,
records,
events,
finalState,
};
}
private computeAggregateStatus(records: PipelineExecutionRecord[]): PipelineAggregateStatus {
if (records.length === 0) {
return "failure";
}
const finalStatusByNode = new Map<string, ActorResultStatus>();
for (const record of records) {
finalStatusByNode.set(record.nodeId, record.status);
}
const executedNodeIds = new Set(finalStatusByNode.keys());
const terminalNodeIds = [...executedNodeIds].filter((nodeId) => {
const outgoingEdges = this.edgesBySource.get(nodeId) ?? [];
return !outgoingEdges.some((edge) => executedNodeIds.has(edge.to));
});
const allTerminalNodesSucceeded =
terminalNodeIds.length > 0 &&
terminalNodeIds.every((nodeId) => finalStatusByNode.get(nodeId) === "success");
const hasCriticalPathFailure = [...finalStatusByNode.values()].some(
(status) => status === "failure",
);
return allTerminalNodesSucceeded && !hasCriticalPathFailure ? "success" : "failure";
}
private buildExecutionGroups(frontier: QueueItem[]): ExecutionGroup[] {
const groupsByKey = new Map<string, ExecutionGroup>();
@@ -611,7 +615,7 @@ export class PipelineExecutor {
customEvents: result.events,
});
await this.persistNodeAttempt({
await this.lifecycleObserver.onNodeAttempt({
sessionId,
node,
attempt,
@@ -629,7 +633,7 @@ export class PipelineExecutor {
});
nodeEvents.push(...domainEvents);
const hardFailure = inferHardFailure(result);
const hardFailure = this.failurePolicy.isHardFailure(result);
hardFailureAttempts.push(hardFailure);
const payloadForNext = result.payload ?? context.handoff.payload;
@@ -721,16 +725,15 @@ export class PipelineExecutor {
throw toAbortError(input.signal);
}
const failureCode = toFailureCodeFromError(error);
const failureKind = containsHardFailureSignal(`${failureCode ?? ""} ${toErrorMessage(error)}`)
? "hard"
: "soft";
const classified = this.failurePolicy.classifyFailureFromError(error);
return {
status: "failure",
payload: toErrorPayload(error),
failureCode,
failureKind,
payload: {
error: classified.payloadErrorMessage,
},
failureCode: classified.failureCode,
failureKind: classified.failureKind,
};
}
}
@@ -763,68 +766,4 @@ export class PipelineExecutor {
}),
);
}
private async persistNodeAttempt(input: {
sessionId: string;
node: PipelineNode;
attempt: number;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
}): Promise<void> {
const behaviorEvent = toBehaviorEvent(input.result.status);
const behaviorPatch = behaviorEvent
? await this.personaRegistry.emitBehaviorEvent({
personaId: input.node.personaId,
event: behaviorEvent,
sessionId: input.sessionId,
nodeId: input.node.id,
payload: input.result.payload ?? {},
})
: {};
const legacyHistoryEvent: SessionHistoryEntry = {
nodeId: input.node.id,
event: input.result.status,
timestamp: new Date().toISOString(),
...(input.result.payload ? { data: input.result.payload } : {}),
};
const domainHistoryEvents: SessionHistoryEntry[] = input.domainEvents.map((event) => ({
nodeId: input.node.id,
event: event.type,
timestamp: event.timestamp,
data: {
source: event.source,
attempt: event.attempt,
...(event.payload.summary ? { summary: event.payload.summary } : {}),
...(event.payload.errorCode ? { errorCode: event.payload.errorCode } : {}),
...(event.payload.artifactPointer ? { artifactPointer: event.payload.artifactPointer } : {}),
...(event.payload.details ? { details: event.payload.details } : {}),
},
}));
await this.stateManager.patchState(input.sessionId, {
...(input.result.stateFlags ? { flags: input.result.stateFlags } : {}),
metadata: {
...(input.result.stateMetadata ?? {}),
...behaviorPatch,
},
historyEvent: legacyHistoryEvent,
historyEvents: domainHistoryEvents,
});
for (const event of input.domainEvents) {
await this.domainEventBus.publish(event);
}
const patch: ProjectContextPatch = {
...(input.result.projectContextPatch ?? {}),
artifactPointers: {
[`sessions/${input.sessionId}/last_completed_node`]: input.node.id,
[`sessions/${input.sessionId}/last_attempt`]: String(input.attempt),
...(input.result.projectContextPatch?.artifactPointers ?? {}),
},
};
await this.options.projectContextStore.patchState(patch);
}
}

View File

@@ -1,7 +1,10 @@
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { dirname, resolve } from "node:path";
import { readFile } from "node:fs/promises";
import { resolve } from "node:path";
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js";
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
export type ProjectTask = {
@@ -13,6 +16,7 @@ export type ProjectTask = {
};
export type ProjectContextState = {
schemaVersion: number;
globalFlags: Record<string, boolean>;
artifactPointers: Record<string, string>;
taskQueue: ProjectTask[];
@@ -27,6 +31,7 @@ export type ProjectContextPatch = {
};
const DEFAULT_PROJECT_CONTEXT: ProjectContextState = {
schemaVersion: PROJECT_CONTEXT_SCHEMA_VERSION,
globalFlags: {},
artifactPointers: {},
taskQueue: [],
@@ -103,20 +108,41 @@ function toStringRecord(value: unknown, label: string): Record<string, string> {
return out;
}
function toSchemaVersion(value: unknown): number {
if (value === undefined) {
return PROJECT_CONTEXT_SCHEMA_VERSION;
}
if (typeof value !== "number" || !Number.isInteger(value) || value < 1) {
throw new Error("Project context schemaVersion must be an integer >= 1.");
}
return value;
}
function toProjectContextState(value: unknown): ProjectContextState {
if (!isRecord(value)) {
throw new Error("Project context store is malformed.");
}
const tasksRaw = value.taskQueue;
if (!Array.isArray(tasksRaw)) {
if (tasksRaw !== undefined && !Array.isArray(tasksRaw)) {
throw new Error("Project context taskQueue is malformed.");
}
return {
globalFlags: toBooleanRecord(value.globalFlags, "Project context globalFlags"),
artifactPointers: toStringRecord(value.artifactPointers, "Project context artifactPointers"),
taskQueue: tasksRaw.map((task, index) => toProjectTask(task, `Project context taskQueue[${String(index)}]`)),
schemaVersion: toSchemaVersion(value.schemaVersion),
globalFlags:
value.globalFlags === undefined
? { ...DEFAULT_PROJECT_CONTEXT.globalFlags }
: toBooleanRecord(value.globalFlags, "Project context globalFlags"),
artifactPointers:
value.artifactPointers === undefined
? { ...DEFAULT_PROJECT_CONTEXT.artifactPointers }
: toStringRecord(value.artifactPointers, "Project context artifactPointers"),
taskQueue: (tasksRaw ?? []).map((task, index) =>
toProjectTask(task, `Project context taskQueue[${String(index)}]`),
),
};
}
@@ -142,10 +168,12 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
export class FileSystemProjectContextStore {
private readonly filePath: string;
private readonly lockPath: string;
private queue: Promise<void> = Promise.resolve();
constructor(input: { filePath: string }) {
this.filePath = resolve(input.filePath);
this.lockPath = `${this.filePath}.lock`;
}
getFilePath(): string {
@@ -153,6 +181,57 @@ export class FileSystemProjectContextStore {
}
async readState(): Promise<ProjectContextState> {
return this.readStateFromDisk();
}
async writeState(state: ProjectContextState): Promise<void> {
await this.runSerialized(async () => {
await withFileLock(this.lockPath, async () => {
const normalizedState = toProjectContextState(state);
await writeUtf8FileAtomic(this.filePath, `${JSON.stringify(normalizedState, null, 2)}\n`);
});
});
}
async patchState(patch: ProjectContextPatch): Promise<ProjectContextState> {
return this.runSerialized(async () =>
withFileLock(this.lockPath, async () => {
const current = await this.readStateFromDisk();
if (patch.globalFlags) {
Object.assign(current.globalFlags, patch.globalFlags);
}
if (patch.artifactPointers) {
Object.assign(current.artifactPointers, patch.artifactPointers);
}
if (patch.taskQueue) {
current.taskQueue = patch.taskQueue.map((task, index) =>
toProjectTask(task, `Project context patch taskQueue[${String(index)}]`),
);
}
if (patch.enqueueTasks && patch.enqueueTasks.length > 0) {
current.taskQueue.push(
...patch.enqueueTasks.map((task, index) =>
toProjectTask(task, `Project context patch enqueueTasks[${String(index)}]`),
),
);
}
if (patch.upsertTasks && patch.upsertTasks.length > 0) {
const upsertTasks = patch.upsertTasks.map((task, index) =>
toProjectTask(task, `Project context patch upsertTasks[${String(index)}]`),
);
current.taskQueue = mergeUpsertTasks(current.taskQueue, upsertTasks);
}
current.schemaVersion = Math.max(current.schemaVersion, PROJECT_CONTEXT_SCHEMA_VERSION);
await writeUtf8FileAtomic(this.filePath, `${JSON.stringify(current, null, 2)}\n`);
return current;
}),
);
}
private async readStateFromDisk(): Promise<ProjectContextState> {
try {
const content = await readFile(this.filePath, "utf8");
const parsed = JSON.parse(content) as unknown;
@@ -165,48 +244,6 @@ export class FileSystemProjectContextStore {
}
}
async writeState(state: ProjectContextState): Promise<void> {
await this.runSerialized(async () => {
await mkdir(dirname(this.filePath), { recursive: true });
await writeFile(this.filePath, `${JSON.stringify(state, null, 2)}\n`, "utf8");
});
}
async patchState(patch: ProjectContextPatch): Promise<ProjectContextState> {
return this.runSerialized(async () => {
const current = await this.readState();
if (patch.globalFlags) {
Object.assign(current.globalFlags, patch.globalFlags);
}
if (patch.artifactPointers) {
Object.assign(current.artifactPointers, patch.artifactPointers);
}
if (patch.taskQueue) {
current.taskQueue = patch.taskQueue.map((task, index) =>
toProjectTask(task, `Project context patch taskQueue[${String(index)}]`),
);
}
if (patch.enqueueTasks && patch.enqueueTasks.length > 0) {
current.taskQueue.push(
...patch.enqueueTasks.map((task, index) =>
toProjectTask(task, `Project context patch enqueueTasks[${String(index)}]`),
),
);
}
if (patch.upsertTasks && patch.upsertTasks.length > 0) {
const upsertTasks = patch.upsertTasks.map((task, index) =>
toProjectTask(task, `Project context patch upsertTasks[${String(index)}]`),
);
current.taskQueue = mergeUpsertTasks(current.taskQueue, upsertTasks);
}
await mkdir(dirname(this.filePath), { recursive: true });
await writeFile(this.filePath, `${JSON.stringify(current, null, 2)}\n`, "utf8");
return current;
});
}
private runSerialized<T>(operation: () => Promise<T>): Promise<T> {
const run = this.queue.then(operation, operation);
this.queue = run.then(

View File

@@ -1,3 +1,4 @@
import { getConfig, loadConfig, type AppConfig } from "../config.js";
import { AgentManager, type AgentManagerLimits } from "./manager.js";
import {
createDefaultResourceProvisioningOrchestrator,
@@ -5,143 +6,58 @@ import {
type ResourceProvisioningOrchestrator,
} from "./provisioning.js";
const DEFAULT_LIMITS: AgentManagerLimits = {
maxConcurrentAgents: 4,
maxSessionAgents: 2,
maxRecursiveDepth: 3,
};
const DEFAULT_PROVISIONING_CONFIG: BuiltInProvisioningConfigInput = {
gitWorktree: {
rootDirectory: ".ai_ops/worktrees",
baseRef: "HEAD",
},
portRange: {
basePort: 36000,
blockSize: 32,
blockCount: 512,
primaryPortOffset: 0,
lockDirectory: ".ai_ops/locks/ports",
},
};
function readPositiveIntegerEnv(
key: "AGENT_MAX_CONCURRENT" | "AGENT_MAX_SESSION" | "AGENT_MAX_RECURSIVE_DEPTH",
fallback: number,
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < 1) {
throw new Error(`Environment variable ${key} must be a positive integer.`);
}
return parsed;
}
function readOptionalStringEnv(key: string, fallback: string): string {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
return rawValue;
}
function readIntegerEnv(
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < bounds.min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(bounds.min)}.`);
}
return parsed;
}
export function loadAgentManagerLimitsFromEnv(): AgentManagerLimits {
function toProvisioningConfig(input: Readonly<AppConfig>): BuiltInProvisioningConfigInput {
return {
maxConcurrentAgents: readPositiveIntegerEnv(
"AGENT_MAX_CONCURRENT",
DEFAULT_LIMITS.maxConcurrentAgents,
),
maxSessionAgents: readPositiveIntegerEnv(
"AGENT_MAX_SESSION",
DEFAULT_LIMITS.maxSessionAgents,
),
maxRecursiveDepth: readPositiveIntegerEnv(
"AGENT_MAX_RECURSIVE_DEPTH",
DEFAULT_LIMITS.maxRecursiveDepth,
),
gitWorktree: {
rootDirectory: input.provisioning.gitWorktree.rootDirectory,
baseRef: input.provisioning.gitWorktree.baseRef,
},
portRange: {
basePort: input.provisioning.portRange.basePort,
blockSize: input.provisioning.portRange.blockSize,
blockCount: input.provisioning.portRange.blockCount,
primaryPortOffset: input.provisioning.portRange.primaryPortOffset,
lockDirectory: input.provisioning.portRange.lockDirectory,
},
};
}
export function loadAgentManagerLimitsFromEnv(env: NodeJS.ProcessEnv = process.env): AgentManagerLimits {
const config = loadConfig(env);
return {
maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
maxSessionAgents: config.agentManager.maxSessionAgents,
maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
};
}
let managerSingleton: AgentManager | undefined;
let provisioningSingleton: ResourceProvisioningOrchestrator | undefined;
export function getAgentManager(): AgentManager {
export function getAgentManager(config: Readonly<AppConfig> = getConfig()): AgentManager {
if (!managerSingleton) {
managerSingleton = new AgentManager(loadAgentManagerLimitsFromEnv());
managerSingleton = new AgentManager({
maxConcurrentAgents: config.agentManager.maxConcurrentAgents,
maxSessionAgents: config.agentManager.maxSessionAgents,
maxRecursiveDepth: config.agentManager.maxRecursiveDepth,
});
}
return managerSingleton;
}
export function loadProvisioningConfigFromEnv(): BuiltInProvisioningConfigInput {
return {
gitWorktree: {
rootDirectory: readOptionalStringEnv(
"AGENT_WORKTREE_ROOT",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.rootDirectory ?? ".ai_ops/worktrees",
),
baseRef: readOptionalStringEnv(
"AGENT_WORKTREE_BASE_REF",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.baseRef ?? "HEAD",
),
},
portRange: {
basePort: readIntegerEnv(
"AGENT_PORT_BASE",
DEFAULT_PROVISIONING_CONFIG.portRange?.basePort ?? 36000,
{ min: 1 },
),
blockSize: readIntegerEnv(
"AGENT_PORT_BLOCK_SIZE",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockSize ?? 32,
{ min: 1 },
),
blockCount: readIntegerEnv(
"AGENT_PORT_BLOCK_COUNT",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockCount ?? 512,
{ min: 1 },
),
primaryPortOffset: readIntegerEnv(
"AGENT_PORT_PRIMARY_OFFSET",
DEFAULT_PROVISIONING_CONFIG.portRange?.primaryPortOffset ?? 0,
{ min: 0 },
),
lockDirectory: readOptionalStringEnv(
"AGENT_PORT_LOCK_DIR",
DEFAULT_PROVISIONING_CONFIG.portRange?.lockDirectory ?? ".ai_ops/locks/ports",
),
},
};
export function loadProvisioningConfigFromEnv(
env: NodeJS.ProcessEnv = process.env,
): BuiltInProvisioningConfigInput {
return toProvisioningConfig(loadConfig(env));
}
export function getResourceProvisioningOrchestrator(): ResourceProvisioningOrchestrator {
export function getResourceProvisioningOrchestrator(
config: Readonly<AppConfig> = getConfig(),
): ResourceProvisioningOrchestrator {
if (!provisioningSingleton) {
provisioningSingleton = createDefaultResourceProvisioningOrchestrator(
loadProvisioningConfigFromEnv(),
toProvisioningConfig(config),
);
}
return provisioningSingleton;

View File

@@ -1,5 +1,6 @@
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { mkdir, readFile } from "node:fs/promises";
import { dirname, resolve } from "node:path";
import { writeUtf8FileAtomic } from "./file-persistence.js";
import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js";
export type SessionHistoryEntry = {
@@ -200,7 +201,7 @@ export class FileSystemStateContextManager {
async writeState(sessionId: string, state: StoredSessionState): Promise<void> {
const path = toStatePath(this.rootDirectory, sessionId);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(state, null, 2)}\n`, "utf8");
await writeUtf8FileAtomic(path, `${JSON.stringify(state, null, 2)}\n`);
}
async patchState(
@@ -248,7 +249,7 @@ export class FileSystemStateContextManager {
const path = toHandoffPath(this.rootDirectory, sessionId, handoff.nodeId);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(nodeHandoff, null, 2)}\n`, "utf8");
await writeUtf8FileAtomic(path, `${JSON.stringify(nodeHandoff, null, 2)}\n`);
return nodeHandoff;
}

277
src/config.ts Normal file
View File

@@ -0,0 +1,277 @@
import type { AgentManagerLimits } from "./agents/manager.js";
import type { BuiltInProvisioningConfig } from "./agents/provisioning.js";
export type ProviderRuntimeConfig = {
codexApiKey?: string;
openAiApiKey?: string;
openAiBaseUrl?: string;
codexSkipGitCheck: boolean;
anthropicApiKey?: string;
claudeModel?: string;
claudeCodePath?: string;
};
export type McpRuntimeConfig = {
configPath: string;
};
export type OrchestrationRuntimeConfig = {
stateRoot: string;
projectContextPath: string;
maxDepth: number;
maxRetries: number;
maxChildren: number;
};
export type DiscoveryRuntimeConfig = {
fileRelativePath: string;
};
export type AppConfig = {
provider: ProviderRuntimeConfig;
mcp: McpRuntimeConfig;
agentManager: AgentManagerLimits;
orchestration: OrchestrationRuntimeConfig;
provisioning: BuiltInProvisioningConfig;
discovery: DiscoveryRuntimeConfig;
};
const DEFAULT_AGENT_MANAGER: AgentManagerLimits = {
maxConcurrentAgents: 4,
maxSessionAgents: 2,
maxRecursiveDepth: 3,
};
const DEFAULT_ORCHESTRATION: OrchestrationRuntimeConfig = {
stateRoot: ".ai_ops/state",
projectContextPath: ".ai_ops/project-context.json",
maxDepth: 4,
maxRetries: 2,
maxChildren: 4,
};
const DEFAULT_PROVISIONING: BuiltInProvisioningConfig = {
gitWorktree: {
rootDirectory: ".ai_ops/worktrees",
baseRef: "HEAD",
},
portRange: {
basePort: 36000,
blockSize: 32,
blockCount: 512,
primaryPortOffset: 0,
lockDirectory: ".ai_ops/locks/ports",
},
};
const DEFAULT_DISCOVERY: DiscoveryRuntimeConfig = {
fileRelativePath: ".agent-context/resources.json",
};
function readOptionalString(
env: NodeJS.ProcessEnv,
key: string,
): string | undefined {
const value = env[key]?.trim();
if (!value) {
return undefined;
}
return value;
}
function readStringWithFallback(
env: NodeJS.ProcessEnv,
key: string,
fallback: string,
): string {
return readOptionalString(env, key) ?? fallback;
}
function readIntegerWithBounds(
env: NodeJS.ProcessEnv,
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const raw = env[key]?.trim();
if (!raw) {
return fallback;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed < bounds.min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(bounds.min)}.`);
}
return parsed;
}
function readBooleanWithFallback(
env: NodeJS.ProcessEnv,
key: string,
fallback: boolean,
): boolean {
const raw = env[key]?.trim();
if (!raw) {
return fallback;
}
if (raw === "true") {
return true;
}
if (raw === "false") {
return false;
}
throw new Error(`Environment variable ${key} must be "true" or "false".`);
}
function deepFreeze<T>(value: T): Readonly<T> {
if (value === null || typeof value !== "object") {
return value;
}
const record = value as Record<string, unknown>;
for (const nested of Object.values(record)) {
deepFreeze(nested);
}
return Object.freeze(value);
}
export function loadConfig(env: NodeJS.ProcessEnv = process.env): Readonly<AppConfig> {
const config: AppConfig = {
provider: {
codexApiKey: readOptionalString(env, "CODEX_API_KEY"),
openAiApiKey: readOptionalString(env, "OPENAI_API_KEY"),
openAiBaseUrl: readOptionalString(env, "OPENAI_BASE_URL"),
codexSkipGitCheck: readBooleanWithFallback(env, "CODEX_SKIP_GIT_CHECK", true),
anthropicApiKey: readOptionalString(env, "ANTHROPIC_API_KEY"),
claudeModel: readOptionalString(env, "CLAUDE_MODEL"),
claudeCodePath: readOptionalString(env, "CLAUDE_CODE_PATH"),
},
mcp: {
configPath: readStringWithFallback(env, "MCP_CONFIG_PATH", "./mcp.config.json"),
},
agentManager: {
maxConcurrentAgents: readIntegerWithBounds(
env,
"AGENT_MAX_CONCURRENT",
DEFAULT_AGENT_MANAGER.maxConcurrentAgents,
{ min: 1 },
),
maxSessionAgents: readIntegerWithBounds(
env,
"AGENT_MAX_SESSION",
DEFAULT_AGENT_MANAGER.maxSessionAgents,
{ min: 1 },
),
maxRecursiveDepth: readIntegerWithBounds(
env,
"AGENT_MAX_RECURSIVE_DEPTH",
DEFAULT_AGENT_MANAGER.maxRecursiveDepth,
{ min: 1 },
),
},
orchestration: {
stateRoot: readStringWithFallback(
env,
"AGENT_STATE_ROOT",
DEFAULT_ORCHESTRATION.stateRoot,
),
projectContextPath: readStringWithFallback(
env,
"AGENT_PROJECT_CONTEXT_PATH",
DEFAULT_ORCHESTRATION.projectContextPath,
),
maxDepth: readIntegerWithBounds(
env,
"AGENT_TOPOLOGY_MAX_DEPTH",
DEFAULT_ORCHESTRATION.maxDepth,
{ min: 1 },
),
maxRetries: readIntegerWithBounds(
env,
"AGENT_TOPOLOGY_MAX_RETRIES",
DEFAULT_ORCHESTRATION.maxRetries,
{ min: 0 },
),
maxChildren: readIntegerWithBounds(
env,
"AGENT_RELATIONSHIP_MAX_CHILDREN",
DEFAULT_ORCHESTRATION.maxChildren,
{ min: 1 },
),
},
provisioning: {
gitWorktree: {
rootDirectory: readStringWithFallback(
env,
"AGENT_WORKTREE_ROOT",
DEFAULT_PROVISIONING.gitWorktree.rootDirectory,
),
baseRef: readStringWithFallback(
env,
"AGENT_WORKTREE_BASE_REF",
DEFAULT_PROVISIONING.gitWorktree.baseRef,
),
},
portRange: {
basePort: readIntegerWithBounds(
env,
"AGENT_PORT_BASE",
DEFAULT_PROVISIONING.portRange.basePort,
{ min: 1 },
),
blockSize: readIntegerWithBounds(
env,
"AGENT_PORT_BLOCK_SIZE",
DEFAULT_PROVISIONING.portRange.blockSize,
{ min: 1 },
),
blockCount: readIntegerWithBounds(
env,
"AGENT_PORT_BLOCK_COUNT",
DEFAULT_PROVISIONING.portRange.blockCount,
{ min: 1 },
),
primaryPortOffset: readIntegerWithBounds(
env,
"AGENT_PORT_PRIMARY_OFFSET",
DEFAULT_PROVISIONING.portRange.primaryPortOffset,
{ min: 0 },
),
lockDirectory: readStringWithFallback(
env,
"AGENT_PORT_LOCK_DIR",
DEFAULT_PROVISIONING.portRange.lockDirectory,
),
},
},
discovery: {
fileRelativePath: readStringWithFallback(
env,
"AGENT_DISCOVERY_FILE_RELATIVE_PATH",
DEFAULT_DISCOVERY.fileRelativePath,
),
},
};
return deepFreeze(config);
}
let configSingleton: Readonly<AppConfig> | undefined;
export function getConfig(): Readonly<AppConfig> {
if (!configSingleton) {
configSingleton = loadConfig(process.env);
}
return configSingleton;
}
export function clearConfigCacheForTests(): void {
configSingleton = undefined;
}

View File

@@ -1,99 +1,75 @@
import "dotenv/config";
import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
import { pathToFileURL } from "node:url";
import { getAgentManager, getResourceProvisioningOrchestrator } from "../agents/runtime.js";
import { loadMcpConfigFromEnv } from "../mcp.js";
import { getConfig } from "../config.js";
import { createSessionContext } from "./session-context.js";
function requiredPrompt(argv: string[]): string {
const prompt = argv.slice(2).join(" ").trim();
if (!prompt) {
throw new Error("Usage: npm run claude -- \"your prompt\"");
throw new Error('Usage: npm run claude -- "your prompt"');
}
return prompt;
}
function buildOptions(): Options {
function buildOptions(config = getConfig()): Options {
return {
maxTurns: 1,
...(process.env.CLAUDE_MODEL ? { model: process.env.CLAUDE_MODEL } : {}),
...(process.env.CLAUDE_CODE_PATH
? { pathToClaudeCodeExecutable: process.env.CLAUDE_CODE_PATH }
...(config.provider.claudeModel ? { model: config.provider.claudeModel } : {}),
...(config.provider.claudeCodePath
? { pathToClaudeCodeExecutable: config.provider.claudeCodePath }
: {}),
};
}
export async function runClaudePrompt(prompt: string): Promise<void> {
const agentManager = getAgentManager();
const agentSession = agentManager.createSession();
const resourceProvisioning = getResourceProvisioningOrchestrator();
const mcp = loadMcpConfigFromEnv({
providerHint: "claude",
const config = getConfig();
const sessionContext = await createSessionContext("claude", {
prompt,
config,
});
let provisionedResources:
| Awaited<ReturnType<typeof resourceProvisioning.provisionSession>>
| undefined;
try {
provisionedResources = await resourceProvisioning.provisionSession({
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: process.env.AGENT_DISCOVERY_FILE_RELATIVE_PATH,
baseEnv: process.env,
});
const promptWithContext = provisionedResources.composePrompt(prompt, [
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const finalResponse = await sessionContext.runInSession(async () => {
const session = query({
prompt: sessionContext.promptWithContext,
options: {
...buildOptions(config),
...(sessionContext.mcp.claudeMcpServers ? { mcpServers: sessionContext.mcp.claudeMcpServers } : {}),
cwd: sessionContext.runtimeInjection.workingDirectory,
env: sessionContext.runtimeInjection.env,
},
});
const finalResponse = await agentSession.runAgent({
depth: 0,
run: async () => {
const session = query({
prompt: promptWithContext,
options: {
...buildOptions(),
...(mcp.claudeMcpServers ? { mcpServers: mcp.claudeMcpServers } : {}),
cwd: runtimeInjection.workingDirectory,
env: runtimeInjection.env,
},
});
let result = "";
let result = "";
try {
for await (const message of session) {
if (message.type === "result" && message.subtype === "success") {
result = message.result.trim();
}
if (message.type === "result" && message.subtype !== "success") {
const detail = message.errors.join("; ");
throw new Error(
`Claude query failed (${message.subtype})${detail ? `: ${detail}` : ""}`,
);
}
try {
for await (const message of session) {
if (message.type === "result" && message.subtype === "success") {
result = message.result.trim();
}
} finally {
session.close();
}
if (!result) {
throw new Error("Claude run completed without a final result.");
if (message.type === "result" && message.subtype !== "success") {
const detail = message.errors.join("; ");
throw new Error(
`Claude query failed (${message.subtype})${detail ? `: ${detail}` : ""}`,
);
}
}
} finally {
session.close();
}
return result;
},
if (!result) {
throw new Error("Claude run completed without a final result.");
}
return result;
});
console.log(finalResponse);
} finally {
if (provisionedResources) {
await provisionedResources.release();
}
agentSession.close();
await sessionContext.close();
}
}

View File

@@ -1,67 +1,43 @@
import "dotenv/config";
import { Codex } from "@openai/codex-sdk";
import { pathToFileURL } from "node:url";
import { getAgentManager, getResourceProvisioningOrchestrator } from "../agents/runtime.js";
import { loadMcpConfigFromEnv } from "../mcp.js";
import { getConfig } from "../config.js";
import { createSessionContext } from "./session-context.js";
function requiredPrompt(argv: string[]): string {
const prompt = argv.slice(2).join(" ").trim();
if (!prompt) {
throw new Error("Usage: npm run codex -- \"your prompt\"");
throw new Error('Usage: npm run codex -- "your prompt"');
}
return prompt;
}
export async function runCodexPrompt(prompt: string): Promise<void> {
const agentManager = getAgentManager();
const agentSession = agentManager.createSession();
const resourceProvisioning = getResourceProvisioningOrchestrator();
const apiKey = process.env.CODEX_API_KEY ?? process.env.OPENAI_API_KEY;
const mcp = loadMcpConfigFromEnv({
providerHint: "codex",
const config = getConfig();
const sessionContext = await createSessionContext("codex", {
prompt,
config,
});
let provisionedResources:
| Awaited<ReturnType<typeof resourceProvisioning.provisionSession>>
| undefined;
try {
provisionedResources = await resourceProvisioning.provisionSession({
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: process.env.AGENT_DISCOVERY_FILE_RELATIVE_PATH,
baseEnv: process.env,
});
const apiKey = config.provider.codexApiKey ?? config.provider.openAiApiKey;
const codex = new Codex({
...(apiKey ? { apiKey } : {}),
...(process.env.OPENAI_BASE_URL ? { baseUrl: process.env.OPENAI_BASE_URL } : {}),
...(mcp.codexConfig ? { config: mcp.codexConfig } : {}),
env: runtimeInjection.env,
...(config.provider.openAiBaseUrl ? { baseUrl: config.provider.openAiBaseUrl } : {}),
...(sessionContext.mcp.codexConfig ? { config: sessionContext.mcp.codexConfig } : {}),
env: sessionContext.runtimeInjection.env,
});
const thread = codex.startThread({
workingDirectory: runtimeInjection.workingDirectory,
skipGitRepoCheck: process.env.CODEX_SKIP_GIT_CHECK !== "false",
workingDirectory: sessionContext.runtimeInjection.workingDirectory,
skipGitRepoCheck: config.provider.codexSkipGitCheck,
});
const promptWithContext = provisionedResources.composePrompt(prompt, [
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const turn = await agentSession.runAgent({
depth: 0,
run: () => thread.run(promptWithContext),
});
const turn = await sessionContext.runInSession(() => thread.run(sessionContext.promptWithContext));
console.log(turn.finalResponse.trim() || "(No response text returned)");
} finally {
if (provisionedResources) {
await provisionedResources.release();
}
agentSession.close();
await sessionContext.close();
}
}

View File

@@ -0,0 +1,105 @@
import { getConfig, type AppConfig } from "../config.js";
import type { AgentSession } from "../agents/manager.js";
import type { ProvisionedResources } from "../agents/provisioning.js";
import {
getAgentManager,
getResourceProvisioningOrchestrator,
} from "../agents/runtime.js";
import {
getDefaultMcpRegistry,
loadMcpConfigFromEnv,
type LoadedMcpConfig,
type McpRegistry,
} from "../mcp.js";
export type SessionProvider = "codex" | "claude";
export type SessionContext = {
provider: SessionProvider;
sessionId: string;
mcp: LoadedMcpConfig;
promptWithContext: string;
runtimeInjection: Awaited<ReturnType<ProvisionedResources["buildRuntimeInjection"]>>;
runInSession: <T>(run: () => Promise<T>) => Promise<T>;
close: () => Promise<void>;
};
export async function createSessionContext(
provider: SessionProvider,
input: {
prompt: string;
config?: Readonly<AppConfig>;
mcpRegistry?: McpRegistry;
},
): Promise<SessionContext> {
const config = input.config ?? getConfig();
const mcpRegistry = input.mcpRegistry ?? getDefaultMcpRegistry();
const agentManager = getAgentManager(config);
const agentSession = agentManager.createSession();
const resourceProvisioning = getResourceProvisioningOrchestrator(config);
let provisionedResources: ProvisionedResources | undefined;
let closed = false;
const close = async (): Promise<void> => {
if (closed) {
return;
}
closed = true;
if (provisionedResources) {
await provisionedResources.release();
}
agentSession.close();
};
try {
provisionedResources = await resourceProvisioning.provisionSession({
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
});
const runtimeInjection = await provisionedResources.buildRuntimeInjection({
discoveryFileRelativePath: config.discovery.fileRelativePath,
baseEnv: process.env,
});
const promptWithContext = provisionedResources.composePrompt(input.prompt, [
`Discovery file: ${runtimeInjection.discoveryFilePath}`,
"Resource env vars are pre-injected (AGENT_WORKTREE_PATH, AGENT_PORT_RANGE_START, AGENT_PORT_RANGE_END, AGENT_PORT_PRIMARY).",
]);
const mcp = loadMcpConfigFromEnv(
{
providerHint: provider,
prompt: input.prompt,
},
{
config,
registry: mcpRegistry,
},
);
return {
provider,
sessionId: agentSession.id,
mcp,
promptWithContext,
runtimeInjection,
runInSession: <T>(run: () => Promise<T>) =>
runWithAgentSession(agentSession, run),
close,
};
} catch (error) {
await close();
throw error;
}
}
async function runWithAgentSession<T>(agentSession: AgentSession, run: () => Promise<T>): Promise<T> {
return agentSession.runAgent({
depth: 0,
run,
});
}

View File

@@ -1,17 +1,18 @@
import { existsSync, readFileSync } from "node:fs";
import { resolve } from "node:path";
import type { CodexOptions } from "@openai/codex-sdk";
import { getConfig, type AppConfig } from "./config.js";
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
import {
createDefaultMcpRegistry,
createMcpHandlerShell,
listMcpHandlers,
registerMcpHandler,
resolveServerWithHandler,
type McpHandlerBusinessLogic,
type McpHandlerBusinessLogicInput,
type McpHandlerInput,
type McpHandlerResult,
type McpHandlerShellOptions,
type McpHandlerUtils,
McpRegistry,
type McpServerHandler,
} from "./mcp/handlers.js";
import type {
@@ -24,16 +25,15 @@ function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function readConfigFile(pathFromEnv: string | undefined): {
function readConfigFile(configPath: string): {
config?: SharedMcpConfigFile;
sourcePath?: string;
} {
const explicitPath = pathFromEnv?.trim();
const candidatePath = explicitPath || "./mcp.config.json";
const candidatePath = configPath.trim() || "./mcp.config.json";
const resolvedPath = resolve(process.cwd(), candidatePath);
if (!existsSync(resolvedPath)) {
if (explicitPath) {
if (candidatePath !== "./mcp.config.json") {
throw new Error(`MCP config file not found: ${resolvedPath}`);
}
return {};
@@ -45,11 +45,29 @@ function readConfigFile(pathFromEnv: string | undefined): {
throw new Error(`MCP config file must contain a JSON object: ${resolvedPath}`);
}
return { config: parsed as SharedMcpConfigFile, sourcePath: resolvedPath };
return {
config: normalizeSharedMcpConfigFile(parsed as SharedMcpConfigFile),
sourcePath: resolvedPath,
};
}
export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpConfig {
const { config, sourcePath } = readConfigFile(process.env.MCP_CONFIG_PATH);
const defaultMcpRegistry = createDefaultMcpRegistry();
export function getDefaultMcpRegistry(): McpRegistry {
return defaultMcpRegistry;
}
export function loadMcpConfigFromEnv(
context: McpLoadContext = {},
options?: {
config?: Readonly<AppConfig>;
registry?: McpRegistry;
},
): LoadedMcpConfig {
const runtimeConfig = options?.config ?? getConfig();
const registry = options?.registry ?? defaultMcpRegistry;
const { config, sourcePath } = readConfigFile(runtimeConfig.mcp.configPath);
if (!config) {
return {};
}
@@ -59,7 +77,7 @@ export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpCon
const resolvedHandlers: Record<string, string> = {};
for (const [serverName, server] of Object.entries(config.servers ?? {})) {
const resolved = resolveServerWithHandler({
const resolved = registry.resolveServerWithHandler({
serverName,
server,
context,
@@ -102,7 +120,15 @@ export function loadMcpConfigFromEnv(context: McpLoadContext = {}): LoadedMcpCon
};
}
export { createMcpHandlerShell, listMcpHandlers, registerMcpHandler };
export function registerMcpHandler(handler: McpServerHandler): void {
defaultMcpRegistry.register(handler);
}
export function listMcpHandlers(): McpServerHandler[] {
return defaultMcpRegistry.listHandlers();
}
export { createDefaultMcpRegistry, createMcpHandlerShell, McpRegistry };
export type {
LoadedMcpConfig,
McpHandlerBusinessLogic,

View File

@@ -1,5 +1,42 @@
import type { McpServerConfig } from "@anthropic-ai/claude-agent-sdk";
import type { CodexConfigObject, SharedMcpServer, Transport } from "./types.js";
import type {
CodexConfigObject,
SharedMcpConfigFile,
SharedMcpServer,
Transport,
} from "./types.js";
function mergeHeaders(server: SharedMcpServer): Record<string, string> | undefined {
const merged = {
...(server.http_headers ?? {}),
...(server.headers ?? {}),
};
return Object.keys(merged).length > 0 ? merged : undefined;
}
export function normalizeSharedMcpServer(server: SharedMcpServer): SharedMcpServer {
const { headers: _headers, http_headers: _httpHeaders, ...rest } = server;
const normalizedHeaders = mergeHeaders(server);
return {
...rest,
...(normalizedHeaders ? { headers: normalizedHeaders } : {}),
};
}
export function normalizeSharedMcpConfigFile(config: SharedMcpConfigFile): SharedMcpConfigFile {
const normalizedServers: Record<string, SharedMcpServer> = {};
for (const [serverName, server] of Object.entries(config.servers ?? {})) {
normalizedServers[serverName] = normalizeSharedMcpServer(server);
}
return {
...config,
...(Object.keys(normalizedServers).length > 0 ? { servers: normalizedServers } : {}),
};
}
export function inferTransport(server: SharedMcpServer): Transport {
if (server.type) {
@@ -10,6 +47,7 @@ export function inferTransport(server: SharedMcpServer): Transport {
export function toCodexServerConfig(serverName: string, server: SharedMcpServer): CodexConfigObject {
const type = inferTransport(server);
const headers = mergeHeaders(server);
if (type === "stdio" && !server.command) {
throw new Error(`Shared MCP server "${serverName}" requires "command" for stdio transport.`);
@@ -38,9 +76,8 @@ export function toCodexServerConfig(serverName: string, server: SharedMcpServer)
if (server.bearer_token_env_var) {
config.bearer_token_env_var = server.bearer_token_env_var;
}
const httpHeaders = server.http_headers ?? server.headers;
if (httpHeaders) {
config.http_headers = httpHeaders;
if (headers) {
config.http_headers = headers;
}
if (server.env_http_headers) config.env_http_headers = server.env_http_headers;
if (server.env_vars) config.env_vars = server.env_vars;
@@ -50,6 +87,7 @@ export function toCodexServerConfig(serverName: string, server: SharedMcpServer)
export function toClaudeServerConfig(serverName: string, server: SharedMcpServer): McpServerConfig {
const type = inferTransport(server);
const headers = mergeHeaders(server);
if (type === "stdio") {
if (!server.command) {
@@ -70,7 +108,6 @@ export function toClaudeServerConfig(serverName: string, server: SharedMcpServer
return {
type,
url: server.url,
...(server.headers ? { headers: server.headers } : {}),
...(headers ? { headers } : {}),
};
}

View File

@@ -127,107 +127,121 @@ function applyEnabledByDefault(input: McpHandlerBusinessLogicInput): McpHandlerR
: input.baseResult;
}
const context7Handler = createMcpHandlerShell({
id: "context7",
description:
"Dedicated extension point for Context7 policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) => isNamedLike(input, ["context7"]),
applyBusinessLogic: applyEnabledByDefault,
});
const claudeTaskMasterHandler = createMcpHandlerShell({
id: "claude-task-master",
description:
"Dedicated extension point for Claude Task Master policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) =>
isNamedLike(input, ["claude-task-master", "task-master", "taskmaster"]),
applyBusinessLogic: applyEnabledByDefault,
});
const genericHandler: McpServerHandler = {
id: "generic",
description: "Default passthrough mapping for project-specific MCP servers.",
matches: () => true,
resolve: ({ serverName, server, utils: localUtils }) =>
createDefaultResult({ serverName, server, localUtils }),
};
const handlerRegistry = new Map<string, McpServerHandler>();
const handlerOrder: string[] = [];
function installBuiltinHandlers(): void {
registerMcpHandler(context7Handler);
registerMcpHandler(claudeTaskMasterHandler);
registerMcpHandler(genericHandler);
}
export function registerMcpHandler(handler: McpServerHandler): void {
if (handlerRegistry.has(handler.id)) {
handlerRegistry.set(handler.id, handler);
return;
}
handlerRegistry.set(handler.id, handler);
handlerOrder.push(handler.id);
}
export function listMcpHandlers(): McpServerHandler[] {
return handlerOrder
.map((id) => handlerRegistry.get(id))
.filter((handler): handler is McpServerHandler => Boolean(handler));
}
function resolveHandler(serverName: string, server: SharedMcpServer): McpServerHandler {
if (server.handler) {
const explicit = handlerRegistry.get(server.handler);
if (!explicit) {
throw new Error(
`Unknown MCP handler "${server.handler}" configured for server "${serverName}".`,
);
}
return explicit;
}
for (const id of handlerOrder) {
const handler = handlerRegistry.get(id);
if (!handler || id === "generic") {
continue;
}
if (handler.matches({ serverName, server })) {
return handler;
}
}
const fallback = handlerRegistry.get("generic");
if (!fallback) {
throw new Error('No MCP fallback handler registered. Expected handler id "generic".');
}
return fallback;
}
export function resolveServerWithHandler(input: {
serverName: string;
server: SharedMcpServer;
context: McpLoadContext;
fullConfig: SharedMcpConfigFile;
}): McpHandlerResult & { handlerId: string } {
const { serverName, server, context, fullConfig } = input;
const handler = resolveHandler(serverName, server);
const handlerConfig = {
...(fullConfig.handlerSettings?.[handler.id] ?? {}),
...(server.handlerOptions ?? {}),
};
const result = handler.resolve({
serverName,
server,
context,
handlerConfig,
fullConfig,
utils,
function createBuiltinHandlers(): McpServerHandler[] {
const context7Handler = createMcpHandlerShell({
id: "context7",
description:
"Dedicated extension point for Context7 policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) => isNamedLike(input, ["context7"]),
applyBusinessLogic: applyEnabledByDefault,
});
return {
...result,
handlerId: handler.id,
const claudeTaskMasterHandler = createMcpHandlerShell({
id: "claude-task-master",
description:
"Dedicated extension point for Claude Task Master policy/behavior. Business logic belongs in applyBusinessLogic.",
matches: (input) =>
isNamedLike(input, ["claude-task-master", "task-master", "taskmaster"]),
applyBusinessLogic: applyEnabledByDefault,
});
const genericHandler: McpServerHandler = {
id: "generic",
description: "Default passthrough mapping for project-specific MCP servers.",
matches: () => true,
resolve: ({ serverName, server, utils: localUtils }) =>
createDefaultResult({ serverName, server, localUtils }),
};
return [context7Handler, claudeTaskMasterHandler, genericHandler];
}
installBuiltinHandlers();
export class McpRegistry {
private readonly handlerRegistry = new Map<string, McpServerHandler>();
private readonly handlerOrder: string[] = [];
constructor(input?: { handlers?: McpServerHandler[] }) {
for (const handler of input?.handlers ?? []) {
this.register(handler);
}
}
register(handler: McpServerHandler): void {
if (this.handlerRegistry.has(handler.id)) {
this.handlerRegistry.set(handler.id, handler);
return;
}
this.handlerRegistry.set(handler.id, handler);
this.handlerOrder.push(handler.id);
}
listHandlers(): McpServerHandler[] {
return this.handlerOrder
.map((id) => this.handlerRegistry.get(id))
.filter((handler): handler is McpServerHandler => Boolean(handler));
}
resolveServerWithHandler(input: {
serverName: string;
server: SharedMcpServer;
context: McpLoadContext;
fullConfig: SharedMcpConfigFile;
}): McpHandlerResult & { handlerId: string } {
const { serverName, server, context, fullConfig } = input;
const handler = this.resolveHandler(serverName, server);
const handlerConfig = {
...(fullConfig.handlerSettings?.[handler.id] ?? {}),
...(server.handlerOptions ?? {}),
};
const result = handler.resolve({
serverName,
server,
context,
handlerConfig,
fullConfig,
utils,
});
return {
...result,
handlerId: handler.id,
};
}
private resolveHandler(serverName: string, server: SharedMcpServer): McpServerHandler {
if (server.handler) {
const explicit = this.handlerRegistry.get(server.handler);
if (!explicit) {
throw new Error(
`Unknown MCP handler "${server.handler}" configured for server "${serverName}".`,
);
}
return explicit;
}
for (const id of this.handlerOrder) {
const handler = this.handlerRegistry.get(id);
if (!handler || id === "generic") {
continue;
}
if (handler.matches({ serverName, server })) {
return handler;
}
}
const fallback = this.handlerRegistry.get("generic");
if (!fallback) {
throw new Error('No MCP fallback handler registered. Expected handler id "generic".');
}
return fallback;
}
}
export function createDefaultMcpRegistry(): McpRegistry {
return new McpRegistry({
handlers: createBuiltinHandlers(),
});
}