first commit

This commit is contained in:
2026-02-23 12:06:13 -05:00
commit 53af0d44cd
33 changed files with 6483 additions and 0 deletions

686
src/agents/manager.ts Normal file
View File

@@ -0,0 +1,686 @@
import { randomUUID } from "node:crypto";
export type AgentManagerLimits = {
maxConcurrentAgents: number;
maxSessionAgents: number;
maxRecursiveDepth: number;
};
type SessionState = {
activeAgents: number;
closed: boolean;
abortController: AbortController;
parentSessionId?: string;
childSessionIds: Set<string>;
};
type Waiter = {
sessionId: string;
depth: number;
signal?: AbortSignal;
resolve: () => void;
reject: (error: Error) => void;
cleanupSignalListener?: () => void;
};
function assertPositiveInteger(value: number, name: string): void {
if (!Number.isInteger(value) || value < 1) {
throw new Error(`${name} must be a positive integer.`);
}
}
function toError(error: unknown): Error {
if (error instanceof Error) {
return error;
}
return new Error(String(error));
}
function toAbortError(reason: unknown): Error {
if (reason instanceof Error) {
return reason;
}
const error = new Error(
reason === undefined ? "The operation was aborted." : `The operation was aborted: ${String(reason)}.`,
);
error.name = "AbortError";
return error;
}
function throwIfAborted(signal?: AbortSignal): void {
if (signal?.aborted) {
throw toAbortError(signal.reason);
}
}
function createLinkedAbortSignal(signals: Array<AbortSignal | undefined>): {
signal: AbortSignal;
cleanup: () => void;
} {
const controller = new AbortController();
const cleanups: Array<() => void> = [];
for (const signal of signals) {
if (!signal) {
continue;
}
if (signal.aborted) {
controller.abort(signal.reason);
break;
}
const onAbort = () => {
if (!controller.signal.aborted) {
controller.abort(signal.reason);
}
};
signal.addEventListener("abort", onAbort, { once: true });
cleanups.push(() => signal.removeEventListener("abort", onAbort));
}
return {
signal: controller.signal,
cleanup: () => {
for (const cleanup of cleanups) {
cleanup();
}
},
};
}
export type RecursiveChildIntent = {
persona: string;
task: string;
context?: Record<string, unknown>;
};
export type RecursiveRunContext<TIntent extends RecursiveChildIntent> = {
sessionId: string;
depth: number;
signal: AbortSignal;
parentSessionId?: string;
intent?: TIntent;
};
export type RecursiveChildResult<TIntent extends RecursiveChildIntent, TOutput> = {
childSessionId: string;
intent: TIntent;
output: TOutput;
};
export type RecursiveFanoutPlan<TIntent extends RecursiveChildIntent, TOutput> = {
type: "fanout";
intents: TIntent[];
aggregate: (input: {
sessionId: string;
depth: number;
signal: AbortSignal;
childResults: ReadonlyArray<RecursiveChildResult<TIntent, TOutput>>;
}) => Promise<TOutput> | TOutput;
};
export type RecursiveCompleteResult<TOutput> = {
type: "complete";
output: TOutput;
};
export type RecursiveRunOutput<TIntent extends RecursiveChildIntent, TOutput> =
| TOutput
| RecursiveCompleteResult<TOutput>
| RecursiveFanoutPlan<TIntent, TOutput>;
type RecursiveChildOutcome = { status: "success" } | { status: "failure"; error: Error };
export type RecursiveChildMiddleware<TIntent extends RecursiveChildIntent> = {
allocateForChild?: (input: {
parentSessionId: string;
childSessionId: string;
depth: number;
childIndex: number;
childCount: number;
intent: TIntent;
signal: AbortSignal;
}) => Promise<void> | void;
releaseForChild?: (input: {
parentSessionId: string;
childSessionId: string;
depth: number;
childIndex: number;
childCount: number;
intent: TIntent;
signal: AbortSignal;
outcome: RecursiveChildOutcome;
}) => Promise<void> | void;
};
function isObject(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function isRecursiveFanoutPlan<TIntent extends RecursiveChildIntent, TOutput>(
value: unknown,
): value is RecursiveFanoutPlan<TIntent, TOutput> {
if (!isObject(value)) {
return false;
}
if (value.type !== "fanout") {
return false;
}
return Array.isArray(value.intents) && typeof value.aggregate === "function";
}
function isRecursiveCompleteResult<TOutput>(value: unknown): value is RecursiveCompleteResult<TOutput> {
if (!isObject(value)) {
return false;
}
return value.type === "complete" && "output" in value;
}
function toRecursiveComplete<TOutput>(value: TOutput): RecursiveCompleteResult<TOutput> {
return {
type: "complete",
output: value,
};
}
export class AgentSession {
constructor(
private readonly manager: AgentManager,
public readonly id: string,
) {}
async runAgent<T>(input: {
depth?: number;
signal?: AbortSignal;
run: () => Promise<T> | T;
}): Promise<T> {
return this.manager.runInSession({
sessionId: this.id,
depth: input.depth ?? 0,
signal: input.signal,
run: input.run,
});
}
close(): void {
this.manager.closeSession(this.id);
}
}
export class AgentManager {
private readonly limits: AgentManagerLimits;
private readonly sessions = new Map<string, SessionState>();
private readonly waiters: Waiter[] = [];
private activeAgents = 0;
constructor(limits: AgentManagerLimits) {
assertPositiveInteger(limits.maxConcurrentAgents, "maxConcurrentAgents");
assertPositiveInteger(limits.maxSessionAgents, "maxSessionAgents");
assertPositiveInteger(limits.maxRecursiveDepth, "maxRecursiveDepth");
this.limits = limits;
}
createSession(
sessionId: string = randomUUID(),
options?: {
parentSessionId?: string;
},
): AgentSession {
if (this.sessions.has(sessionId)) {
throw new Error(`Agent session "${sessionId}" already exists.`);
}
const parentSessionId = options?.parentSessionId;
if (parentSessionId) {
const parent = this.sessions.get(parentSessionId);
if (!parent || parent.closed) {
throw new Error(`Parent agent session "${parentSessionId}" is not active.`);
}
}
this.sessions.set(sessionId, {
activeAgents: 0,
closed: false,
abortController: new AbortController(),
...(parentSessionId ? { parentSessionId } : {}),
childSessionIds: new Set<string>(),
});
if (parentSessionId) {
this.sessions.get(parentSessionId)?.childSessionIds.add(sessionId);
}
return new AgentSession(this, sessionId);
}
closeSession(sessionId: string): void {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
session.closed = true;
if (!session.abortController.signal.aborted) {
session.abortController.abort(new Error(`Agent session "${sessionId}" was closed.`));
}
this.rejectWaitersForSession(sessionId);
for (const childSessionId of [...session.childSessionIds]) {
this.closeSession(childSessionId);
}
if (session.activeAgents === 0 && session.childSessionIds.size === 0) {
this.deleteSession(sessionId);
}
}
async runInSession<T>(input: {
sessionId: string;
depth: number;
signal?: AbortSignal;
run: () => Promise<T> | T;
}): Promise<T> {
const { sessionId, depth, signal, run } = input;
this.assertDepth(depth);
await this.acquire(sessionId, depth, signal);
try {
throwIfAborted(signal);
return await run();
} finally {
this.release(sessionId);
}
}
async runRecursiveAgent<TIntent extends RecursiveChildIntent, TOutput>(input: {
sessionId: string;
depth: number;
signal?: AbortSignal;
run: (
input: RecursiveRunContext<TIntent>,
) => Promise<RecursiveRunOutput<TIntent, TOutput>> | RecursiveRunOutput<TIntent, TOutput>;
childMiddleware?: RecursiveChildMiddleware<TIntent>;
}): Promise<TOutput> {
return this.runRecursiveNode({
sessionId: input.sessionId,
depth: input.depth,
signal: input.signal,
run: input.run,
childMiddleware: input.childMiddleware,
});
}
getLimits(): AgentManagerLimits {
return { ...this.limits };
}
getActiveAgentCount(): number {
return this.activeAgents;
}
private assertDepth(depth: number): void {
if (!Number.isInteger(depth) || depth < 0) {
throw new Error("Agent depth must be a non-negative integer.");
}
if (depth > this.limits.maxRecursiveDepth) {
throw new Error(
`Agent depth ${depth} exceeds maxRecursiveDepth ${this.limits.maxRecursiveDepth}.`,
);
}
}
private canAcquire(session: SessionState): boolean {
return (
this.activeAgents < this.limits.maxConcurrentAgents &&
session.activeAgents < this.limits.maxSessionAgents
);
}
private acquire(sessionId: string, depth: number, signal?: AbortSignal): Promise<void> {
const session = this.sessions.get(sessionId);
if (!session || session.closed) {
throw new Error(`Agent session "${sessionId}" is not active.`);
}
this.assertDepth(depth);
throwIfAborted(signal);
if (this.canAcquire(session)) {
this.activeAgents += 1;
session.activeAgents += 1;
return Promise.resolve();
}
return new Promise<void>((resolve, reject) => {
this.waiters.push({
sessionId,
depth,
signal,
resolve,
reject,
});
if (!signal) {
return;
}
const waiter = this.waiters[this.waiters.length - 1];
if (!waiter) {
return;
}
const onAbort = () => {
const index = this.waiters.indexOf(waiter);
if (index >= 0) {
this.waiters.splice(index, 1);
waiter.reject(toAbortError(signal.reason));
}
};
signal.addEventListener("abort", onAbort, { once: true });
waiter.cleanupSignalListener = () => {
signal.removeEventListener("abort", onAbort);
};
});
}
private release(sessionId: string): void {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
this.activeAgents = Math.max(this.activeAgents - 1, 0);
session.activeAgents = Math.max(session.activeAgents - 1, 0);
if (session.closed && session.activeAgents === 0) {
if (session.childSessionIds.size === 0) {
this.deleteSession(sessionId);
}
}
this.drainWaiters();
}
private rejectWaitersForSession(sessionId: string): void {
for (let index = 0; index < this.waiters.length; ) {
const waiter = this.waiters[index];
if (waiter?.sessionId !== sessionId) {
index += 1;
continue;
}
this.waiters.splice(index, 1);
waiter.cleanupSignalListener?.();
waiter.reject(new Error(`Agent session "${sessionId}" was closed.`));
}
}
private drainWaiters(): void {
for (let index = 0; index < this.waiters.length; ) {
const waiter = this.waiters[index];
if (!waiter) {
index += 1;
continue;
}
if (waiter.signal?.aborted) {
this.waiters.splice(index, 1);
waiter.cleanupSignalListener?.();
waiter.reject(toAbortError(waiter.signal.reason));
continue;
}
const session = this.sessions.get(waiter.sessionId);
if (!session || session.closed) {
this.waiters.splice(index, 1);
waiter.cleanupSignalListener?.();
waiter.reject(new Error(`Agent session "${waiter.sessionId}" is not active.`));
continue;
}
if (waiter.depth > this.limits.maxRecursiveDepth) {
this.waiters.splice(index, 1);
waiter.cleanupSignalListener?.();
waiter.reject(
new Error(
`Agent depth ${waiter.depth} exceeds maxRecursiveDepth ${this.limits.maxRecursiveDepth}.`,
),
);
continue;
}
if (!this.canAcquire(session)) {
index += 1;
continue;
}
this.activeAgents += 1;
session.activeAgents += 1;
this.waiters.splice(index, 1);
waiter.cleanupSignalListener?.();
waiter.resolve();
}
}
private deleteSession(sessionId: string): void {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
const parentSessionId = session.parentSessionId;
this.sessions.delete(sessionId);
if (parentSessionId) {
const parent = this.sessions.get(parentSessionId);
parent?.childSessionIds.delete(sessionId);
if (parent && parent.closed && parent.activeAgents === 0 && parent.childSessionIds.size === 0) {
this.deleteSession(parentSessionId);
}
}
}
private getSessionSignal(sessionId: string): AbortSignal {
const session = this.sessions.get(sessionId);
if (!session || session.closed) {
throw new Error(`Agent session "${sessionId}" is not active.`);
}
return session.abortController.signal;
}
private assertCanSpawnChild(depth: number): void {
const childDepth = depth + 1;
if (childDepth > this.limits.maxRecursiveDepth) {
throw new Error(
`Cannot spawn child at depth ${childDepth} because maxRecursiveDepth is ${this.limits.maxRecursiveDepth}.`,
);
}
}
private buildChildSessionId(parentSessionId: string, childIndex: number): string {
return `${parentSessionId}_child_${childIndex + 1}`;
}
private async runRecursiveNode<TIntent extends RecursiveChildIntent, TOutput>(input: {
sessionId: string;
depth: number;
signal?: AbortSignal;
run: (
input: RecursiveRunContext<TIntent>,
) => Promise<RecursiveRunOutput<TIntent, TOutput>> | RecursiveRunOutput<TIntent, TOutput>;
childMiddleware?: RecursiveChildMiddleware<TIntent>;
parentSessionId?: string;
intent?: TIntent;
}): Promise<TOutput> {
const sessionSignal = this.getSessionSignal(input.sessionId);
const linkedSignal = createLinkedAbortSignal([input.signal, sessionSignal]);
try {
throwIfAborted(linkedSignal.signal);
const nodeResultRaw = await this.runInSession({
sessionId: input.sessionId,
depth: input.depth,
signal: linkedSignal.signal,
run: () =>
input.run({
sessionId: input.sessionId,
depth: input.depth,
signal: linkedSignal.signal,
...(input.parentSessionId ? { parentSessionId: input.parentSessionId } : {}),
...(input.intent !== undefined ? { intent: input.intent } : {}),
}),
});
const nodeResult = isRecursiveFanoutPlan<TIntent, TOutput>(nodeResultRaw)
? nodeResultRaw
: isRecursiveCompleteResult<TOutput>(nodeResultRaw)
? nodeResultRaw
: toRecursiveComplete(nodeResultRaw);
if (nodeResult.type === "complete") {
return nodeResult.output;
}
this.assertCanSpawnChild(input.depth);
const childDepth = input.depth + 1;
const fanoutAbortController = new AbortController();
const fanoutSignal = createLinkedAbortSignal([linkedSignal.signal, fanoutAbortController.signal]);
try {
const childRuns = nodeResult.intents.map((intent, childIndex) =>
this.runRecursiveChild({
parentSessionId: input.sessionId,
childDepth,
childIndex,
childCount: nodeResult.intents.length,
intent,
signal: fanoutSignal.signal,
run: input.run,
childMiddleware: input.childMiddleware,
}).catch((error: unknown) => {
if (!fanoutAbortController.signal.aborted) {
fanoutAbortController.abort(error);
}
throw error;
}),
);
const settledChildren = await Promise.allSettled(childRuns);
const rejected = settledChildren.find(
(entry): entry is PromiseRejectedResult => entry.status === "rejected",
);
if (rejected) {
throw toError(rejected.reason);
}
const childResults = settledChildren
.filter(
(entry): entry is PromiseFulfilledResult<RecursiveChildResult<TIntent, TOutput>> =>
entry.status === "fulfilled",
)
.map((entry) => entry.value);
return this.runInSession({
sessionId: input.sessionId,
depth: input.depth,
signal: linkedSignal.signal,
run: () =>
nodeResult.aggregate({
sessionId: input.sessionId,
depth: input.depth,
signal: linkedSignal.signal,
childResults,
}),
});
} finally {
fanoutSignal.cleanup();
}
} finally {
linkedSignal.cleanup();
}
}
private async runRecursiveChild<TIntent extends RecursiveChildIntent, TOutput>(input: {
parentSessionId: string;
childDepth: number;
childIndex: number;
childCount: number;
intent: TIntent;
signal: AbortSignal;
run: (
input: RecursiveRunContext<TIntent>,
) => Promise<RecursiveRunOutput<TIntent, TOutput>> | RecursiveRunOutput<TIntent, TOutput>;
childMiddleware?: RecursiveChildMiddleware<TIntent>;
}): Promise<RecursiveChildResult<TIntent, TOutput>> {
const childSessionId = this.buildChildSessionId(input.parentSessionId, input.childIndex);
const childSession = this.createSession(childSessionId, {
parentSessionId: input.parentSessionId,
});
let childOutput: TOutput | undefined;
let childError: Error | undefined;
try {
await input.childMiddleware?.allocateForChild?.({
parentSessionId: input.parentSessionId,
childSessionId,
depth: input.childDepth,
childIndex: input.childIndex,
childCount: input.childCount,
intent: input.intent,
signal: input.signal,
});
childOutput = await this.runRecursiveNode({
sessionId: childSessionId,
depth: input.childDepth,
signal: input.signal,
run: input.run,
childMiddleware: input.childMiddleware,
parentSessionId: input.parentSessionId,
intent: input.intent,
});
} catch (error) {
childError = toError(error);
}
let releaseError: Error | undefined;
try {
await input.childMiddleware?.releaseForChild?.({
parentSessionId: input.parentSessionId,
childSessionId,
depth: input.childDepth,
childIndex: input.childIndex,
childCount: input.childCount,
intent: input.intent,
signal: input.signal,
outcome: childError ? { status: "failure", error: childError } : { status: "success" },
});
} catch (error) {
releaseError = toError(error);
} finally {
childSession.close();
}
if (releaseError) {
throw releaseError;
}
if (childError) {
throw childError;
}
if (childOutput === undefined) {
throw new Error(`Child session "${childSessionId}" completed without an output.`);
}
return {
childSessionId,
intent: input.intent,
output: childOutput,
};
}
}

506
src/agents/manifest.ts Normal file
View File

@@ -0,0 +1,506 @@
import { isRecord } from "./types.js";
export type ToolClearancePolicy = {
allowlist: string[];
banlist: string[];
};
export type ManifestPersona = {
id: string;
displayName: string;
systemPromptTemplate: string;
toolClearance: ToolClearancePolicy;
};
export type RelationshipConstraint = {
maxDepth?: number;
maxChildren?: number;
};
export type RelationshipEdge = {
parentPersonaId: string;
childPersonaId: string;
constraints?: RelationshipConstraint;
};
export type RouteCondition =
| {
type: "always";
}
| {
type: "state_flag";
key: string;
equals: boolean;
}
| {
type: "history_has_event";
event: string;
}
| {
type: "file_exists";
path: string;
};
export type PipelineConstraint = {
maxRetries?: number;
};
export type PipelineNode = {
id: string;
actorId: string;
personaId: string;
constraints?: PipelineConstraint;
};
export type PipelineEdge = {
from: string;
to: string;
on:
| "success"
| "validation_fail"
| "failure"
| "always"
| "onTaskComplete"
| "onValidationFail";
when?: RouteCondition[];
};
export type PipelineGraph = {
entryNodeId: string;
nodes: PipelineNode[];
edges: PipelineEdge[];
};
export type TopologyKind = "hierarchical" | "retry-unrolled" | "sequential";
export type TopologyConstraint = {
maxDepth: number;
maxRetries: number;
};
export type AgentManifest = {
schemaVersion: "1";
topologies: TopologyKind[];
personas: ManifestPersona[];
relationships: RelationshipEdge[];
pipeline: PipelineGraph;
topologyConstraints: TopologyConstraint;
};
function readString(record: Record<string, unknown>, key: string): string {
const value = record[key];
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`Manifest field \"${key}\" must be a non-empty string.`);
}
return value.trim();
}
function readOptionalInteger(
record: Record<string, unknown>,
key: string,
input: {
min: number;
},
): number | undefined {
const value = record[key];
if (value === undefined) {
return undefined;
}
if (typeof value !== "number" || !Number.isInteger(value) || value < input.min) {
throw new Error(`Manifest field \"${key}\" must be an integer >= ${String(input.min)}.`);
}
return value;
}
function readStringArray(record: Record<string, unknown>, key: string): string[] {
const value = record[key];
if (!Array.isArray(value)) {
throw new Error(`Manifest field \"${key}\" must be an array.`);
}
const output: string[] = [];
for (const item of value) {
if (typeof item !== "string" || item.trim().length === 0) {
throw new Error(`Manifest field \"${key}\" contains an invalid string.`);
}
output.push(item.trim());
}
return output;
}
function parseToolClearance(value: unknown): ToolClearancePolicy {
if (!isRecord(value)) {
throw new Error("Manifest persona toolClearance must be an object.");
}
return {
allowlist: readStringArray(value, "allowlist"),
banlist: readStringArray(value, "banlist"),
};
}
function parsePersona(value: unknown): ManifestPersona {
if (!isRecord(value)) {
throw new Error("Manifest persona entry must be an object.");
}
return {
id: readString(value, "id"),
displayName: readString(value, "displayName"),
systemPromptTemplate: readString(value, "systemPromptTemplate"),
toolClearance: parseToolClearance(value.toolClearance),
};
}
function parseRelationship(value: unknown): RelationshipEdge {
if (!isRecord(value)) {
throw new Error("Manifest relationship entry must be an object.");
}
const constraints = isRecord(value.constraints)
? {
maxDepth: readOptionalInteger(value.constraints, "maxDepth", { min: 1 }),
maxChildren: readOptionalInteger(value.constraints, "maxChildren", { min: 1 }),
}
: undefined;
return {
parentPersonaId: readString(value, "parentPersonaId"),
childPersonaId: readString(value, "childPersonaId"),
constraints,
};
}
function parseCondition(value: unknown): RouteCondition {
if (!isRecord(value)) {
throw new Error("Route condition must be an object.");
}
const type = readString(value, "type");
if (type === "always") {
return { type };
}
if (type === "state_flag") {
const key = readString(value, "key");
const equals = value.equals;
if (typeof equals !== "boolean") {
throw new Error('Route condition field "equals" must be a boolean.');
}
return {
type,
key,
equals,
};
}
if (type === "history_has_event") {
return {
type,
event: readString(value, "event"),
};
}
if (type === "file_exists") {
return {
type,
path: readString(value, "path"),
};
}
throw new Error(`Unsupported route condition type \"${type}\".`);
}
function parsePipelineNode(value: unknown): PipelineNode {
if (!isRecord(value)) {
throw new Error("Pipeline node must be an object.");
}
const constraints = isRecord(value.constraints)
? {
maxRetries: readOptionalInteger(value.constraints, "maxRetries", { min: 0 }),
}
: undefined;
return {
id: readString(value, "id"),
actorId: readString(value, "actorId"),
personaId: readString(value, "personaId"),
constraints,
};
}
function parsePipelineEdge(value: unknown): PipelineEdge {
if (!isRecord(value)) {
throw new Error("Pipeline edge must be an object.");
}
const on = readString(value, "on");
const validEvents: PipelineEdge["on"][] = [
"success",
"validation_fail",
"failure",
"always",
"onTaskComplete",
"onValidationFail",
];
if (!validEvents.includes(on as PipelineEdge["on"])) {
throw new Error(`Pipeline edge field \"on\" has unsupported event \"${on}\".`);
}
const rawWhen = value.when;
const when: RouteCondition[] = [];
if (rawWhen !== undefined) {
if (!Array.isArray(rawWhen)) {
throw new Error('Pipeline edge field "when" must be an array when provided.');
}
for (const condition of rawWhen) {
when.push(parseCondition(condition));
}
}
return {
from: readString(value, "from"),
to: readString(value, "to"),
on: on as PipelineEdge["on"],
...(when.length > 0 ? { when } : {}),
};
}
function parsePipeline(value: unknown): PipelineGraph {
if (!isRecord(value)) {
throw new Error("Manifest pipeline must be an object.");
}
const nodesValue = value.nodes;
if (!Array.isArray(nodesValue) || nodesValue.length === 0) {
throw new Error("Manifest pipeline.nodes must be a non-empty array.");
}
const edgesValue = value.edges;
if (!Array.isArray(edgesValue)) {
throw new Error("Manifest pipeline.edges must be an array.");
}
const nodes = nodesValue.map(parsePipelineNode);
const edges = edgesValue.map(parsePipelineEdge);
return {
entryNodeId: readString(value, "entryNodeId"),
nodes,
edges,
};
}
function parseTopologies(value: unknown): TopologyKind[] {
if (!Array.isArray(value) || value.length === 0) {
throw new Error("Manifest topologies must be a non-empty array.");
}
const valid = new Set<TopologyKind>(["hierarchical", "retry-unrolled", "sequential"]);
const result: TopologyKind[] = [];
for (const item of value) {
if (typeof item !== "string" || !valid.has(item as TopologyKind)) {
throw new Error("Manifest topologies contains an unsupported topology kind.");
}
result.push(item as TopologyKind);
}
return result;
}
function parseTopologyConstraints(value: unknown): TopologyConstraint {
if (!isRecord(value)) {
throw new Error("Manifest topologyConstraints must be an object.");
}
const maxDepth = readOptionalInteger(value, "maxDepth", { min: 1 });
const maxRetries = readOptionalInteger(value, "maxRetries", { min: 0 });
return {
maxDepth: maxDepth ?? 4,
maxRetries: maxRetries ?? 2,
};
}
function assertNoDuplicates(items: string[], label: string): void {
const seen = new Set<string>();
for (const item of items) {
if (seen.has(item)) {
throw new Error(`${label} contains duplicate id \"${item}\".`);
}
seen.add(item);
}
}
function assertPipelineDag(pipeline: PipelineGraph): void {
const adjacency = new Map<string, string[]>();
const indegree = new Map<string, number>();
const nodeIds = new Set<string>(pipeline.nodes.map((node) => node.id));
for (const node of pipeline.nodes) {
adjacency.set(node.id, []);
indegree.set(node.id, 0);
}
if (!nodeIds.has(pipeline.entryNodeId)) {
throw new Error(`Pipeline entry node \"${pipeline.entryNodeId}\" is not defined.`);
}
for (const edge of pipeline.edges) {
if (!nodeIds.has(edge.from)) {
throw new Error(`Pipeline edge references unknown from node \"${edge.from}\".`);
}
if (!nodeIds.has(edge.to)) {
throw new Error(`Pipeline edge references unknown to node \"${edge.to}\".`);
}
const neighbors = adjacency.get(edge.from);
if (!neighbors) {
throw new Error(`Internal DAG error for node \"${edge.from}\".`);
}
neighbors.push(edge.to);
const currentInDegree = indegree.get(edge.to);
indegree.set(edge.to, (currentInDegree ?? 0) + 1);
}
const queue: string[] = [];
for (const [nodeId, degree] of indegree.entries()) {
if (degree === 0) {
queue.push(nodeId);
}
}
let visited = 0;
for (let cursor = 0; cursor < queue.length; cursor += 1) {
const current = queue[cursor];
if (!current) {
continue;
}
visited += 1;
const neighbors = adjacency.get(current) ?? [];
for (const neighbor of neighbors) {
const degree = indegree.get(neighbor);
if (degree === undefined) {
continue;
}
const nextDegree = degree - 1;
indegree.set(neighbor, nextDegree);
if (nextDegree === 0) {
queue.push(neighbor);
}
}
}
if (visited !== pipeline.nodes.length) {
throw new Error("Pipeline graph must be a strict DAG (cycle detected).");
}
}
function assertRelationshipDag(relationships: RelationshipEdge[]): void {
const adjacency = new Map<string, string[]>();
for (const relationship of relationships) {
const children = adjacency.get(relationship.parentPersonaId);
if (children) {
children.push(relationship.childPersonaId);
} else {
adjacency.set(relationship.parentPersonaId, [relationship.childPersonaId]);
}
if (!adjacency.has(relationship.childPersonaId)) {
adjacency.set(relationship.childPersonaId, []);
}
}
const visiting = new Set<string>();
const visited = new Set<string>();
const visit = (nodeId: string): void => {
if (visiting.has(nodeId)) {
throw new Error("Relationship graph must be acyclic (cycle detected).");
}
if (visited.has(nodeId)) {
return;
}
visiting.add(nodeId);
for (const childId of adjacency.get(nodeId) ?? []) {
visit(childId);
}
visiting.delete(nodeId);
visited.add(nodeId);
};
for (const nodeId of adjacency.keys()) {
visit(nodeId);
}
}
export function parseAgentManifest(input: unknown): AgentManifest {
if (!isRecord(input)) {
throw new Error("AgentManifest must be an object.");
}
const schemaVersion = readString(input, "schemaVersion");
if (schemaVersion !== "1") {
throw new Error(`Unsupported AgentManifest schemaVersion \"${schemaVersion}\".`);
}
const personasValue = input.personas;
if (!Array.isArray(personasValue) || personasValue.length === 0) {
throw new Error("Manifest personas must be a non-empty array.");
}
const relationshipsValue = input.relationships;
if (!Array.isArray(relationshipsValue)) {
throw new Error("Manifest relationships must be an array.");
}
const manifest: AgentManifest = {
schemaVersion: "1",
topologies: parseTopologies(input.topologies),
personas: personasValue.map(parsePersona),
relationships: relationshipsValue.map(parseRelationship),
pipeline: parsePipeline(input.pipeline),
topologyConstraints: parseTopologyConstraints(input.topologyConstraints),
};
assertNoDuplicates(
manifest.personas.map((persona) => persona.id),
"Manifest personas",
);
assertNoDuplicates(
manifest.pipeline.nodes.map((node) => node.id),
"Manifest pipeline.nodes",
);
const personaIds = new Set(manifest.personas.map((persona) => persona.id));
for (const relation of manifest.relationships) {
if (!personaIds.has(relation.parentPersonaId)) {
throw new Error(
`Relationship references unknown parent persona \"${relation.parentPersonaId}\".`,
);
}
if (!personaIds.has(relation.childPersonaId)) {
throw new Error(
`Relationship references unknown child persona \"${relation.childPersonaId}\".`,
);
}
}
assertRelationshipDag(manifest.relationships);
for (const node of manifest.pipeline.nodes) {
if (!personaIds.has(node.personaId)) {
throw new Error(`Pipeline node \"${node.id}\" references unknown persona \"${node.personaId}\".`);
}
}
assertPipelineDag(manifest.pipeline);
return manifest;
}

215
src/agents/orchestration.ts Normal file
View File

@@ -0,0 +1,215 @@
import { resolve } from "node:path";
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
import {
PersonaRegistry,
type PersonaBehaviorEvent,
type PersonaBehaviorHandler,
} from "./persona-registry.js";
import { PipelineExecutor, type ActorExecutor, type PipelineRunSummary } from "./pipeline.js";
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
import type { JsonObject } from "./types.js";
export type OrchestrationSettings = {
workspaceRoot: string;
stateRoot: string;
maxDepth: number;
maxRetries: number;
maxChildren: number;
runtimeContext: Record<string, string | number | boolean>;
};
export type BehaviorHandlerRegistry = Partial<
Record<string, Partial<Record<PersonaBehaviorEvent, PersonaBehaviorHandler>>>
>;
function readOptionalIntegerEnv(
key:
| "AGENT_TOPOLOGY_MAX_DEPTH"
| "AGENT_TOPOLOGY_MAX_RETRIES"
| "AGENT_RELATIONSHIP_MAX_CHILDREN",
fallback: number,
min: number,
): number {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed < min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(min)}.`);
}
return parsed;
}
function readOptionalStringEnv(key: "AGENT_STATE_ROOT", fallback: string): string {
const raw = process.env[key]?.trim();
if (!raw) {
return fallback;
}
return raw;
}
export function loadOrchestrationSettingsFromEnv(): Omit<
OrchestrationSettings,
"workspaceRoot" | "runtimeContext"
> {
return {
stateRoot: readOptionalStringEnv("AGENT_STATE_ROOT", ".ai_ops/state"),
maxDepth: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_DEPTH", 4, 1),
maxRetries: readOptionalIntegerEnv("AGENT_TOPOLOGY_MAX_RETRIES", 2, 0),
maxChildren: readOptionalIntegerEnv("AGENT_RELATIONSHIP_MAX_CHILDREN", 4, 1),
};
}
function toExecutorMap(
executors: ReadonlyMap<string, ActorExecutor> | Record<string, ActorExecutor>,
): ReadonlyMap<string, ActorExecutor> {
if (executors instanceof Map) {
return executors;
}
return new Map(Object.entries(executors));
}
function getChildrenByParent(manifest: AgentManifest): Map<string, AgentManifest["relationships"]> {
const map = new Map<string, AgentManifest["relationships"]>();
for (const edge of manifest.relationships) {
const current = map.get(edge.parentPersonaId);
if (current) {
current.push(edge);
} else {
map.set(edge.parentPersonaId, [edge]);
}
}
return map;
}
export class SchemaDrivenExecutionEngine {
private readonly manifest: AgentManifest;
private readonly personaRegistry = new PersonaRegistry();
private readonly stateManager: FileSystemStateContextManager;
private readonly actorExecutors: ReadonlyMap<string, ActorExecutor>;
private readonly settings: OrchestrationSettings;
private readonly childrenByParent: Map<string, AgentManifest["relationships"]>;
constructor(input: {
manifest: AgentManifest | unknown;
actorExecutors: ReadonlyMap<string, ActorExecutor> | Record<string, ActorExecutor>;
behaviorHandlers?: BehaviorHandlerRegistry;
settings?: Partial<Omit<OrchestrationSettings, "workspaceRoot" | "runtimeContext">> & {
workspaceRoot?: string;
runtimeContext?: Record<string, string | number | boolean>;
};
}) {
this.manifest = parseAgentManifest(input.manifest);
const defaults = loadOrchestrationSettingsFromEnv();
this.settings = {
workspaceRoot: resolve(input.settings?.workspaceRoot ?? process.cwd()),
stateRoot: resolve(input.settings?.stateRoot ?? defaults.stateRoot),
maxDepth: input.settings?.maxDepth ?? defaults.maxDepth,
maxRetries: input.settings?.maxRetries ?? defaults.maxRetries,
maxChildren: input.settings?.maxChildren ?? defaults.maxChildren,
runtimeContext: {
...(input.settings?.runtimeContext ?? {}),
},
};
this.stateManager = new FileSystemStateContextManager({
rootDirectory: this.settings.stateRoot,
});
this.actorExecutors = toExecutorMap(input.actorExecutors);
for (const persona of this.manifest.personas) {
this.personaRegistry.register({
...persona,
behaviorHandlers: input.behaviorHandlers?.[persona.id],
});
}
this.childrenByParent = getChildrenByParent(this.manifest);
this.assertRelationshipConstraints();
}
getManifest(): AgentManifest {
return this.manifest;
}
getStateManager(): FileSystemStateContextManager {
return this.stateManager;
}
planChildPersonas(input: {
parentPersonaId: string;
depth: number;
maxChildren?: number;
}): string[] {
if (input.depth > this.settings.maxDepth) {
throw new Error(
`Requested child planning depth ${String(input.depth)} exceeds configured maxDepth ${String(this.settings.maxDepth)}.`,
);
}
const relations = this.childrenByParent.get(input.parentPersonaId) ?? [];
const effectiveLimit = input.maxChildren ?? this.settings.maxChildren;
const plannedChildren: string[] = [];
for (const relation of relations) {
if (relation.constraints?.maxDepth !== undefined && input.depth > relation.constraints.maxDepth) {
continue;
}
plannedChildren.push(relation.childPersonaId);
const relationMax = relation.constraints?.maxChildren;
if (relationMax !== undefined && plannedChildren.length >= relationMax) {
break;
}
if (plannedChildren.length >= effectiveLimit) {
break;
}
}
return plannedChildren;
}
async runSession(input: {
sessionId: string;
initialPayload: JsonObject;
initialState?: Partial<StoredSessionState>;
}): Promise<PipelineRunSummary> {
const executor = new PipelineExecutor(
this.manifest,
this.personaRegistry,
this.stateManager,
this.actorExecutors,
{
workspaceRoot: this.settings.workspaceRoot,
runtimeContext: this.settings.runtimeContext,
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
},
);
return executor.run({
sessionId: input.sessionId,
initialPayload: input.initialPayload,
initialState: input.initialState,
});
}
private assertRelationshipConstraints(): void {
for (const [parent, edges] of this.childrenByParent.entries()) {
if (edges.length > this.settings.maxChildren) {
throw new Error(
`Persona \"${parent}\" exceeds AGENT_RELATIONSHIP_MAX_CHILDREN (${String(this.settings.maxChildren)}).`,
);
}
}
}
}

View File

@@ -0,0 +1,112 @@
import type { JsonObject } from "./types.js";
import type { ManifestPersona, ToolClearancePolicy } from "./manifest.js";
export type PersonaBehaviorEvent = "onTaskComplete" | "onValidationFail";
export type PersonaBehaviorContext = {
event: PersonaBehaviorEvent;
sessionId: string;
nodeId: string;
payload: JsonObject;
};
export type PersonaBehaviorHandler = (
context: PersonaBehaviorContext,
) => Promise<void | JsonObject> | void | JsonObject;
export type PersonaRuntimeDefinition = ManifestPersona & {
behaviorHandlers?: Partial<Record<PersonaBehaviorEvent, PersonaBehaviorHandler>>;
};
function renderTemplate(
template: string,
runtimeContext: Record<string, string | number | boolean>,
): string {
return template.replace(/{{\s*([a-zA-Z0-9_.-]+)\s*}}/g, (_, key: string) => {
const value = runtimeContext[key];
return value === undefined ? "" : String(value);
});
}
function uniqueStrings(values: string[]): string[] {
const output: string[] = [];
const seen = new Set<string>();
for (const value of values) {
if (!seen.has(value)) {
output.push(value);
seen.add(value);
}
}
return output;
}
export class PersonaRegistry {
private readonly personas = new Map<string, PersonaRuntimeDefinition>();
registerMany(personas: PersonaRuntimeDefinition[]): void {
for (const persona of personas) {
this.register(persona);
}
}
register(persona: PersonaRuntimeDefinition): void {
if (this.personas.has(persona.id)) {
throw new Error(`Persona \"${persona.id}\" is already registered.`);
}
this.personas.set(persona.id, {
...persona,
toolClearance: {
allowlist: uniqueStrings(persona.toolClearance.allowlist),
banlist: uniqueStrings(persona.toolClearance.banlist),
},
});
}
getById(personaId: string): PersonaRuntimeDefinition {
const persona = this.personas.get(personaId);
if (!persona) {
throw new Error(`Persona \"${personaId}\" is not registered.`);
}
return persona;
}
renderSystemPrompt(input: {
personaId: string;
runtimeContext: Record<string, string | number | boolean>;
}): string {
const persona = this.getById(input.personaId);
return renderTemplate(persona.systemPromptTemplate, input.runtimeContext);
}
getToolClearance(personaId: string): ToolClearancePolicy {
const persona = this.getById(personaId);
// TODO(security): enforce allowlist/banlist in the tool execution boundary.
return {
allowlist: [...persona.toolClearance.allowlist],
banlist: [...persona.toolClearance.banlist],
};
}
async emitBehaviorEvent(input: PersonaBehaviorContext & { personaId: string }): Promise<JsonObject> {
const persona = this.getById(input.personaId);
const handler = persona.behaviorHandlers?.[input.event];
if (!handler) {
return {};
}
const result = await handler({
event: input.event,
sessionId: input.sessionId,
nodeId: input.nodeId,
payload: input.payload,
});
if (!result) {
return {};
}
return result;
}
}

314
src/agents/pipeline.ts Normal file
View File

@@ -0,0 +1,314 @@
import { access } from "node:fs/promises";
import { constants as fsConstants } from "node:fs";
import { resolve } from "node:path";
import type { AgentManifest, PipelineEdge, PipelineNode, RouteCondition } from "./manifest.js";
import { PersonaRegistry } from "./persona-registry.js";
import {
FileSystemStateContextManager,
type NodeExecutionContext,
type SessionHistoryEntry,
type StoredSessionState,
} from "./state-context.js";
import type { JsonObject } from "./types.js";
export type ActorResultStatus = "success" | "validation_fail" | "failure";
export type ActorExecutionResult = {
status: ActorResultStatus;
payload?: JsonObject;
stateFlags?: Record<string, boolean>;
stateMetadata?: JsonObject;
};
export type ActorExecutionInput = {
sessionId: string;
node: PipelineNode;
prompt: string;
context: NodeExecutionContext;
toolClearance: {
allowlist: string[];
banlist: string[];
};
};
export type ActorExecutor = (input: ActorExecutionInput) => Promise<ActorExecutionResult>;
export type PipelineExecutionRecord = {
nodeId: string;
depth: number;
attempt: number;
status: ActorResultStatus;
};
export type PipelineRunSummary = {
sessionId: string;
records: PipelineExecutionRecord[];
finalState: StoredSessionState;
};
export type PipelineExecutorOptions = {
workspaceRoot: string;
runtimeContext: Record<string, string | number | boolean>;
maxDepth: number;
maxRetries: number;
};
type QueueItem = {
nodeId: string;
depth: number;
};
function toBehaviorEvent(status: ActorResultStatus): "onTaskComplete" | "onValidationFail" | undefined {
if (status === "success") {
return "onTaskComplete";
}
if (status === "validation_fail") {
return "onValidationFail";
}
return undefined;
}
function shouldEdgeRun(edge: PipelineEdge, status: ActorResultStatus): boolean {
if (edge.on === "always") {
return true;
}
if (edge.on === "success" && status === "success") {
return true;
}
if (edge.on === "validation_fail" && status === "validation_fail") {
return true;
}
if (edge.on === "failure" && status === "failure") {
return true;
}
if (edge.on === "onTaskComplete" && status === "success") {
return true;
}
if (edge.on === "onValidationFail" && status === "validation_fail") {
return true;
}
return false;
}
async function evaluateCondition(
condition: RouteCondition,
state: StoredSessionState,
workspaceRoot: string,
): Promise<boolean> {
if (condition.type === "always") {
return true;
}
if (condition.type === "state_flag") {
return state.flags[condition.key] === condition.equals;
}
if (condition.type === "history_has_event") {
return state.history.some((entry) => entry.event === condition.event);
}
const absolutePath = resolve(workspaceRoot, condition.path);
try {
await access(absolutePath, fsConstants.F_OK);
return true;
} catch {
return false;
}
}
async function edgeConditionsSatisfied(
edge: PipelineEdge,
state: StoredSessionState,
workspaceRoot: string,
): Promise<boolean> {
if (!edge.when || edge.when.length === 0) {
return true;
}
for (const condition of edge.when) {
const result = await evaluateCondition(condition, state, workspaceRoot);
if (!result) {
return false;
}
}
return true;
}
export class PipelineExecutor {
private readonly nodeById = new Map<string, PipelineNode>();
private readonly edgesBySource = new Map<string, PipelineEdge[]>();
constructor(
private readonly manifest: AgentManifest,
private readonly personaRegistry: PersonaRegistry,
private readonly stateManager: FileSystemStateContextManager,
private readonly actorExecutors: ReadonlyMap<string, ActorExecutor>,
private readonly options: PipelineExecutorOptions,
) {
for (const node of manifest.pipeline.nodes) {
this.nodeById.set(node.id, node);
}
for (const edge of manifest.pipeline.edges) {
const entries = this.edgesBySource.get(edge.from);
if (entries) {
entries.push(edge);
} else {
this.edgesBySource.set(edge.from, [edge]);
}
}
}
async run(input: {
sessionId: string;
initialPayload: JsonObject;
initialState?: Partial<StoredSessionState>;
}): Promise<PipelineRunSummary> {
await this.stateManager.initializeSession(input.sessionId, input.initialState);
await this.stateManager.writeHandoff(input.sessionId, {
nodeId: this.manifest.pipeline.entryNodeId,
payload: input.initialPayload,
});
const records: PipelineExecutionRecord[] = [];
const queue: QueueItem[] = [{ nodeId: this.manifest.pipeline.entryNodeId, depth: 0 }];
const attemptsByNode = new Map<string, number>();
const maxExecutions = this.manifest.pipeline.nodes.length * (this.options.maxRetries + 3);
let executionCount = 0;
while (queue.length > 0) {
const item = queue.shift();
if (!item) {
continue;
}
executionCount += 1;
if (executionCount > maxExecutions) {
throw new Error("Pipeline execution exceeded the configured safe execution bound.");
}
if (item.depth > this.options.maxDepth) {
throw new Error(
`Pipeline depth ${String(item.depth)} exceeds configured maxDepth ${String(this.options.maxDepth)}.`,
);
}
const node = this.nodeById.get(item.nodeId);
if (!node) {
throw new Error(`Pipeline node \"${item.nodeId}\" is not defined.`);
}
const executor = this.actorExecutors.get(node.actorId);
if (!executor) {
throw new Error(`No actor executor registered for actor \"${node.actorId}\".`);
}
const context = await this.stateManager.buildFreshNodeContext(input.sessionId, node.id);
const prompt = this.personaRegistry.renderSystemPrompt({
personaId: node.personaId,
runtimeContext: {
...this.options.runtimeContext,
session_id: input.sessionId,
node_id: node.id,
depth: item.depth,
},
});
const result = await executor({
sessionId: input.sessionId,
node,
prompt,
context,
toolClearance: this.personaRegistry.getToolClearance(node.personaId),
});
const attempt = (attemptsByNode.get(node.id) ?? 0) + 1;
attemptsByNode.set(node.id, attempt);
records.push({
nodeId: node.id,
depth: item.depth,
attempt,
status: result.status,
});
const behaviorEvent = toBehaviorEvent(result.status);
const behaviorPatch = behaviorEvent
? await this.personaRegistry.emitBehaviorEvent({
personaId: node.personaId,
event: behaviorEvent,
sessionId: input.sessionId,
nodeId: node.id,
payload: result.payload ?? {},
})
: {};
const historyEvent: SessionHistoryEntry = {
nodeId: node.id,
event: result.status,
timestamp: new Date().toISOString(),
...(result.payload ? { data: result.payload } : {}),
};
await this.stateManager.patchState(input.sessionId, {
...(result.stateFlags ? { flags: result.stateFlags } : {}),
metadata: {
...(result.stateMetadata ?? {}),
...behaviorPatch,
},
historyEvent,
});
const maxRetriesForNode = Math.min(
node.constraints?.maxRetries ?? this.manifest.topologyConstraints.maxRetries,
this.options.maxRetries,
);
if (result.status !== "success" && attempt <= maxRetriesForNode + 1) {
await this.stateManager.writeHandoff(input.sessionId, {
nodeId: node.id,
fromNodeId: node.id,
payload: result.payload ?? context.handoff.payload,
});
queue.push({
nodeId: node.id,
depth: item.depth,
});
continue;
}
const state = await this.stateManager.readState(input.sessionId);
const candidateEdges = this.edgesBySource.get(node.id) ?? [];
for (const edge of candidateEdges) {
if (!shouldEdgeRun(edge, result.status)) {
continue;
}
if (!(await edgeConditionsSatisfied(edge, state, this.options.workspaceRoot))) {
continue;
}
await this.stateManager.writeHandoff(input.sessionId, {
nodeId: edge.to,
fromNodeId: node.id,
payload: result.payload ?? context.handoff.payload,
});
queue.push({
nodeId: edge.to,
depth: item.depth + 1,
});
}
}
return {
sessionId: input.sessionId,
records,
finalState: await this.stateManager.readState(input.sessionId),
};
}
}

700
src/agents/provisioning.ts Normal file
View File

@@ -0,0 +1,700 @@
import { execFile } from "node:child_process";
import { createHash } from "node:crypto";
import { mkdir, open, unlink, writeFile } from "node:fs/promises";
import { dirname, isAbsolute, resolve } from "node:path";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
type JsonPrimitive = string | number | boolean | null;
type JsonValue = JsonPrimitive | JsonValue[] | { [key: string]: JsonValue };
export type ResourceRequest = {
kind: string;
options?: Record<string, unknown>;
};
export type DiscoverySnapshot = {
sessionId: string;
workspaceRoot: string;
workingDirectory: string;
hardConstraints: Array<{
kind: string;
allocation: Record<string, JsonValue>;
}>;
softConstraints: {
env: Record<string, string>;
promptSections: string[];
metadata: Record<string, JsonValue>;
};
};
export type ChildResourceSuballocationInput = {
parentSnapshot: DiscoverySnapshot;
childSessionId: string;
childIndex: number;
childCount: number;
};
type ResourceContextPatch = {
env?: Record<string, string>;
promptSections?: string[];
metadata?: Record<string, JsonValue>;
preferredWorkingDirectory?: string;
};
type ResourceLease = {
kind: string;
hard: Record<string, JsonValue>;
soft?: ResourceContextPatch;
release: () => Promise<void>;
};
export type ResourceProvider<Options extends Record<string, unknown> = Record<string, unknown>> = {
kind: string;
provision: (input: {
sessionId: string;
workspaceRoot: string;
options: Options;
}) => Promise<ResourceLease>;
};
type RuntimeInjection = {
workingDirectory: string;
env: Record<string, string>;
discoveryFilePath: string;
};
type ProvisionedResourcesState = {
sessionId: string;
workspaceRoot: string;
workingDirectory: string;
hardConstraints: Array<{
kind: string;
allocation: Record<string, JsonValue>;
}>;
env: Record<string, string>;
promptSections: string[];
metadata: Record<string, JsonValue>;
releases: Array<{
kind: string;
release: () => Promise<void>;
}>;
};
export class ProvisionedResources {
private released = false;
constructor(private readonly state: ProvisionedResourcesState) {}
getWorkingDirectory(): string {
return this.state.workingDirectory;
}
getInjectedEnv(baseEnv: Record<string, string | undefined> = process.env): Record<string, string> {
return {
...sanitizeEnv(baseEnv),
...this.state.env,
};
}
composePrompt(prompt: string, extraPromptSections: string[] = []): string {
const sections = [...this.state.promptSections, ...extraPromptSections];
if (sections.length === 0) {
return prompt;
}
return [
"Runtime resource constraints are already enforced by the orchestrator:",
...sections.map((section) => `- ${section}`),
"",
prompt,
].join("\n");
}
toDiscoverySnapshot(): DiscoverySnapshot {
return {
sessionId: this.state.sessionId,
workspaceRoot: this.state.workspaceRoot,
workingDirectory: this.state.workingDirectory,
hardConstraints: this.state.hardConstraints.map((entry) => ({
kind: entry.kind,
allocation: { ...entry.allocation },
})),
softConstraints: {
env: { ...this.state.env },
promptSections: [...this.state.promptSections],
metadata: { ...this.state.metadata },
},
};
}
async buildRuntimeInjection(input?: {
discoveryFileRelativePath?: string;
baseEnv?: Record<string, string | undefined>;
}): Promise<RuntimeInjection> {
const relativePath = input?.discoveryFileRelativePath?.trim() || ".agent-context/resources.json";
const discoveryFilePath = resolve(this.state.workingDirectory, relativePath);
await mkdir(dirname(discoveryFilePath), { recursive: true });
await writeFile(
discoveryFilePath,
`${JSON.stringify(this.toDiscoverySnapshot(), null, 2)}\n`,
"utf8",
);
const env = this.getInjectedEnv(input?.baseEnv);
env.AGENT_DISCOVERY_FILE = discoveryFilePath;
return {
workingDirectory: this.state.workingDirectory,
env,
discoveryFilePath,
};
}
async release(): Promise<void> {
if (this.released) {
return;
}
this.released = true;
const errors: string[] = [];
for (let index = this.state.releases.length - 1; index >= 0; index -= 1) {
const releaser = this.state.releases[index];
if (!releaser) {
continue;
}
try {
await releaser.release();
} catch (error) {
errors.push(`${releaser.kind}: ${toErrorMessage(error)}`);
}
}
if (errors.length > 0) {
throw new Error(`Failed to release provisioned resources: ${errors.join(" | ")}`);
}
}
}
export class ResourceProvisioningOrchestrator {
private readonly providers = new Map<string, ResourceProvider>();
constructor(providers: ResourceProvider[] = []) {
for (const provider of providers) {
this.registerProvider(provider);
}
}
registerProvider(provider: ResourceProvider): void {
if (this.providers.has(provider.kind)) {
throw new Error(`Resource provider "${provider.kind}" is already registered.`);
}
this.providers.set(provider.kind, provider);
}
async provisionSession(input: {
sessionId: string;
resources: ResourceRequest[];
workspaceRoot?: string;
}): Promise<ProvisionedResources> {
const workspaceRoot = resolve(input.workspaceRoot ?? process.cwd());
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
const releases: ProvisionedResourcesState["releases"] = [];
const env: Record<string, string> = {};
const promptSections: string[] = [];
const metadata: Record<string, JsonValue> = {};
let workingDirectory = workspaceRoot;
try {
for (const resource of input.resources) {
const provider = this.providers.get(resource.kind);
if (!provider) {
throw new Error(`No provider registered for resource kind "${resource.kind}".`);
}
const lease = await provider.provision({
sessionId: input.sessionId,
workspaceRoot,
options: (resource.options ?? {}) as Record<string, unknown>,
});
hardConstraints.push({
kind: lease.kind,
allocation: lease.hard,
});
releases.push({
kind: lease.kind,
release: lease.release,
});
if (lease.soft?.env) {
Object.assign(env, lease.soft.env);
}
if (lease.soft?.promptSections) {
promptSections.push(...lease.soft.promptSections);
}
if (lease.soft?.metadata) {
Object.assign(metadata, lease.soft.metadata);
}
if (lease.soft?.preferredWorkingDirectory) {
workingDirectory = resolve(lease.soft.preferredWorkingDirectory);
}
}
} catch (error) {
await releaseInReverse(releases);
throw new Error(`Resource provisioning failed: ${toErrorMessage(error)}`);
}
return new ProvisionedResources({
sessionId: input.sessionId,
workspaceRoot,
workingDirectory,
hardConstraints,
env,
promptSections,
metadata,
releases,
});
}
async provisionChildSession(input: ChildResourceSuballocationInput): Promise<ProvisionedResources> {
const childResources = buildChildResourceRequests(input);
return this.provisionSession({
sessionId: input.childSessionId,
resources: childResources,
workspaceRoot: input.parentSnapshot.workspaceRoot,
});
}
}
export type GitWorktreeProviderConfig = {
rootDirectory: string;
baseRef: string;
};
export type PortRangeProviderConfig = {
basePort: number;
blockSize: number;
blockCount: number;
primaryPortOffset: number;
lockDirectory: string;
};
export type BuiltInProvisioningConfig = {
gitWorktree: GitWorktreeProviderConfig;
portRange: PortRangeProviderConfig;
};
export type BuiltInProvisioningConfigInput = {
gitWorktree?: Partial<GitWorktreeProviderConfig>;
portRange?: Partial<PortRangeProviderConfig>;
};
export const DEFAULT_GIT_WORKTREE_CONFIG: GitWorktreeProviderConfig = {
rootDirectory: ".ai_ops/worktrees",
baseRef: "HEAD",
};
export const DEFAULT_PORT_RANGE_CONFIG: PortRangeProviderConfig = {
basePort: 36000,
blockSize: 32,
blockCount: 512,
primaryPortOffset: 0,
lockDirectory: ".ai_ops/locks/ports",
};
export function createGitWorktreeProvider(
config: GitWorktreeProviderConfig = DEFAULT_GIT_WORKTREE_CONFIG,
): ResourceProvider {
return {
kind: "git-worktree",
provision: async ({ sessionId, workspaceRoot, options }) => {
const rootDirectory = readOptionalString(options, "rootDirectory", config.rootDirectory);
const baseRef = readOptionalString(options, "baseRef", config.baseRef);
const repoRoot = await runGit(["-C", workspaceRoot, "rev-parse", "--show-toplevel"]);
const worktreeRoot = resolvePath(repoRoot, rootDirectory);
await mkdir(worktreeRoot, { recursive: true });
const worktreeName = buildScopedName(sessionId);
const worktreePath = resolve(worktreeRoot, worktreeName);
await runGit(["-C", repoRoot, "worktree", "add", "--detach", worktreePath, baseRef]);
return {
kind: "git-worktree",
hard: {
repoRoot,
worktreeRoot,
worktreePath,
baseRef,
},
soft: {
env: {
AGENT_REPO_ROOT: repoRoot,
AGENT_WORKTREE_PATH: worktreePath,
AGENT_WORKTREE_BASE_REF: baseRef,
},
promptSections: [
`Git worktree: ${worktreePath}`,
`Worktree base ref: ${baseRef}`,
],
metadata: {
git_worktree_path: worktreePath,
git_worktree_base_ref: baseRef,
},
preferredWorkingDirectory: worktreePath,
},
release: async () => {
await runGit(["-C", repoRoot, "worktree", "remove", "--force", worktreePath]);
await runGit(["-C", repoRoot, "worktree", "prune"]);
},
};
},
};
}
export function createPortRangeProvider(
config: PortRangeProviderConfig = DEFAULT_PORT_RANGE_CONFIG,
): ResourceProvider {
return {
kind: "port-range",
provision: async ({ sessionId, workspaceRoot, options }) => {
const basePort = readOptionalInteger(options, "basePort", config.basePort, { min: 1 });
const blockSize = readOptionalInteger(options, "blockSize", config.blockSize, { min: 1 });
const blockCount = readOptionalInteger(options, "blockCount", config.blockCount, {
min: 1,
});
const primaryPortOffset = readOptionalInteger(
options,
"primaryPortOffset",
config.primaryPortOffset,
{ min: 0 },
);
const lockDirectory = readOptionalString(options, "lockDirectory", config.lockDirectory);
if (primaryPortOffset >= blockSize) {
throw new Error("primaryPortOffset must be smaller than blockSize.");
}
const maxPort = basePort + blockSize * blockCount - 1;
if (maxPort > 65535) {
throw new Error(
`Port range exceeds 65535 (basePort=${basePort}, blockSize=${blockSize}, blockCount=${blockCount}).`,
);
}
const lockRoot = resolvePath(workspaceRoot, lockDirectory);
await mkdir(lockRoot, { recursive: true });
const seed = hashToUnsignedInt(sessionId);
const preferredBlock = seed % blockCount;
let startPort = -1;
let endPort = -1;
let blockIndex = -1;
let lockPath = "";
for (let offset = 0; offset < blockCount; offset += 1) {
const candidateBlock = (preferredBlock + offset) % blockCount;
const candidateStart = basePort + candidateBlock * blockSize;
const candidateEnd = candidateStart + blockSize - 1;
const candidateLockPath = resolve(
lockRoot,
`${String(candidateStart)}-${String(candidateEnd)}.lock`,
);
const lockHandle = await tryCreateLockFile(candidateLockPath, {
sessionId,
allocatedAt: new Date().toISOString(),
startPort: candidateStart,
endPort: candidateEnd,
});
if (!lockHandle) {
continue;
}
await lockHandle.close();
startPort = candidateStart;
endPort = candidateEnd;
blockIndex = candidateBlock;
lockPath = candidateLockPath;
break;
}
if (startPort < 0 || endPort < 0 || blockIndex < 0 || !lockPath) {
throw new Error("No free deterministic port block is available.");
}
const primaryPort = startPort + primaryPortOffset;
return {
kind: "port-range",
hard: {
basePort,
blockSize,
blockCount,
blockIndex,
startPort,
endPort,
primaryPort,
lockPath,
},
soft: {
env: {
AGENT_PORT_RANGE_START: String(startPort),
AGENT_PORT_RANGE_END: String(endPort),
AGENT_PORT_PRIMARY: String(primaryPort),
},
promptSections: [
`Assigned deterministic port range: ${String(startPort)}-${String(endPort)}`,
`Primary port: ${String(primaryPort)}`,
],
metadata: {
port_range_start: startPort,
port_range_end: endPort,
port_primary: primaryPort,
port_block_index: blockIndex,
},
},
release: async () => {
await unlink(lockPath);
},
};
},
};
}
export function createDefaultResourceProvisioningOrchestrator(
input: BuiltInProvisioningConfigInput = {},
): ResourceProvisioningOrchestrator {
const orchestrator = new ResourceProvisioningOrchestrator();
orchestrator.registerProvider(
createGitWorktreeProvider({
...DEFAULT_GIT_WORKTREE_CONFIG,
...input.gitWorktree,
}),
);
orchestrator.registerProvider(
createPortRangeProvider({
...DEFAULT_PORT_RANGE_CONFIG,
...input.portRange,
}),
);
return orchestrator;
}
async function runGit(args: string[]): Promise<string> {
try {
const { stdout } = await execFileAsync("git", args, {
encoding: "utf8",
});
return stdout.trim();
} catch (error) {
throw new Error(`git ${args.join(" ")} failed: ${toErrorMessage(error)}`);
}
}
function buildScopedName(sessionId: string): string {
const safeSessionId = sessionId.replace(/[^a-zA-Z0-9_-]/g, "-").replace(/-+/g, "-");
const base = safeSessionId || "session";
const hash = createHash("sha256").update(sessionId).digest("hex").slice(0, 8);
return `${base.slice(0, 32)}-${hash}`;
}
function resolvePath(basePath: string, maybeRelativePath: string): string {
if (isAbsolute(maybeRelativePath)) {
return maybeRelativePath;
}
return resolve(basePath, maybeRelativePath);
}
function sanitizeEnv(source: Record<string, string | undefined>): Record<string, string> {
const env: Record<string, string> = {};
for (const [key, value] of Object.entries(source)) {
if (value !== undefined) {
env[key] = value;
}
}
return env;
}
async function releaseInReverse(
releases: Array<{
kind: string;
release: () => Promise<void>;
}>,
): Promise<void> {
for (let index = releases.length - 1; index >= 0; index -= 1) {
const releaser = releases[index];
if (!releaser) {
continue;
}
try {
await releaser.release();
} catch {
// Ignore rollback release errors to preserve the original provisioning error.
}
}
}
function hashToUnsignedInt(value: string): number {
const digest = createHash("sha256").update(value).digest();
return digest.readUInt32BE(0);
}
async function tryCreateLockFile(
lockPath: string,
payload: Record<string, JsonValue>,
): Promise<Awaited<ReturnType<typeof open>> | undefined> {
try {
const handle = await open(lockPath, "wx");
await handle.writeFile(`${JSON.stringify(payload)}\n`, "utf8");
return handle;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "EEXIST") {
return undefined;
}
throw new Error(`Failed to create lock file "${lockPath}": ${toErrorMessage(error)}`);
}
}
function readOptionalString(
options: Record<string, unknown>,
key: string,
fallback: string,
): string {
const value = options[key];
if (value === undefined) {
return fallback;
}
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`Option "${key}" must be a non-empty string.`);
}
return value.trim();
}
function readOptionalInteger(
options: Record<string, unknown>,
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const value = options[key];
if (value === undefined) {
return fallback;
}
if (typeof value !== "number" || !Number.isInteger(value) || value < bounds.min) {
throw new Error(`Option "${key}" must be an integer >= ${String(bounds.min)}.`);
}
return value;
}
function readNumberFromAllocation(allocation: Record<string, JsonValue>, key: string): number {
const value = allocation[key];
if (typeof value !== "number" || !Number.isInteger(value)) {
throw new Error(`Allocation field "${key}" must be an integer.`);
}
return value;
}
function readStringFromAllocation(allocation: Record<string, JsonValue>, key: string): string {
const value = allocation[key];
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`Allocation field "${key}" must be a non-empty string.`);
}
return value;
}
function getHardConstraint(
snapshot: DiscoverySnapshot,
kind: string,
): Record<string, JsonValue> | undefined {
for (const constraint of snapshot.hardConstraints) {
if (constraint.kind === kind) {
return constraint.allocation;
}
}
return undefined;
}
function clamp(value: number, min: number, max: number): number {
return Math.min(Math.max(value, min), max);
}
export function buildChildResourceRequests(input: ChildResourceSuballocationInput): ResourceRequest[] {
if (!Number.isInteger(input.childCount) || input.childCount < 1) {
throw new Error("childCount must be a positive integer.");
}
if (!Number.isInteger(input.childIndex) || input.childIndex < 0 || input.childIndex >= input.childCount) {
throw new Error("childIndex must be an integer in [0, childCount).");
}
const requests: ResourceRequest[] = [];
const parentGit = getHardConstraint(input.parentSnapshot, "git-worktree");
if (parentGit) {
const parentWorktreePath = readStringFromAllocation(parentGit, "worktreePath");
const baseRefRaw = parentGit.baseRef;
const baseRef = typeof baseRefRaw === "string" && baseRefRaw.trim().length > 0 ? baseRefRaw : "HEAD";
requests.push({
kind: "git-worktree",
options: {
rootDirectory: resolve(
parentWorktreePath,
".ai_ops/child-worktrees",
buildScopedName(input.parentSnapshot.sessionId),
),
baseRef,
},
});
}
const parentPortRange = getHardConstraint(input.parentSnapshot, "port-range");
if (parentPortRange) {
const parentStart = readNumberFromAllocation(parentPortRange, "startPort");
const parentEnd = readNumberFromAllocation(parentPortRange, "endPort");
const parentPrimary = readNumberFromAllocation(parentPortRange, "primaryPort");
const lockPath = readStringFromAllocation(parentPortRange, "lockPath");
const parentSize = parentEnd - parentStart + 1;
const minChildSize = Math.floor(parentSize / input.childCount);
if (minChildSize < 1) {
throw new Error(
`Cannot suballocate ${String(input.childCount)} child port blocks from parent range ${String(parentStart)}-${String(parentEnd)}.`,
);
}
const childStart = parentStart + minChildSize * input.childIndex;
const childEnd =
input.childIndex === input.childCount - 1 ? parentEnd : childStart + minChildSize - 1;
const childBlockSize = childEnd - childStart + 1;
const primaryOffset = clamp(parentPrimary - childStart, 0, childBlockSize - 1);
requests.push({
kind: "port-range",
options: {
basePort: childStart,
blockSize: childBlockSize,
blockCount: 1,
primaryPortOffset: primaryOffset,
lockDirectory: dirname(lockPath),
},
});
}
return requests;
}
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}

148
src/agents/runtime.ts Normal file
View File

@@ -0,0 +1,148 @@
import { AgentManager, type AgentManagerLimits } from "./manager.js";
import {
createDefaultResourceProvisioningOrchestrator,
type BuiltInProvisioningConfigInput,
type ResourceProvisioningOrchestrator,
} from "./provisioning.js";
const DEFAULT_LIMITS: AgentManagerLimits = {
maxConcurrentAgents: 4,
maxSessionAgents: 2,
maxRecursiveDepth: 3,
};
const DEFAULT_PROVISIONING_CONFIG: BuiltInProvisioningConfigInput = {
gitWorktree: {
rootDirectory: ".ai_ops/worktrees",
baseRef: "HEAD",
},
portRange: {
basePort: 36000,
blockSize: 32,
blockCount: 512,
primaryPortOffset: 0,
lockDirectory: ".ai_ops/locks/ports",
},
};
function readPositiveIntegerEnv(
key: "AGENT_MAX_CONCURRENT" | "AGENT_MAX_SESSION" | "AGENT_MAX_RECURSIVE_DEPTH",
fallback: number,
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < 1) {
throw new Error(`Environment variable ${key} must be a positive integer.`);
}
return parsed;
}
function readOptionalStringEnv(key: string, fallback: string): string {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
return rawValue;
}
function readIntegerEnv(
key: string,
fallback: number,
bounds: {
min: number;
},
): number {
const rawValue = process.env[key]?.trim();
if (!rawValue) {
return fallback;
}
const parsed = Number(rawValue);
if (!Number.isInteger(parsed) || parsed < bounds.min) {
throw new Error(`Environment variable ${key} must be an integer >= ${String(bounds.min)}.`);
}
return parsed;
}
export function loadAgentManagerLimitsFromEnv(): AgentManagerLimits {
return {
maxConcurrentAgents: readPositiveIntegerEnv(
"AGENT_MAX_CONCURRENT",
DEFAULT_LIMITS.maxConcurrentAgents,
),
maxSessionAgents: readPositiveIntegerEnv(
"AGENT_MAX_SESSION",
DEFAULT_LIMITS.maxSessionAgents,
),
maxRecursiveDepth: readPositiveIntegerEnv(
"AGENT_MAX_RECURSIVE_DEPTH",
DEFAULT_LIMITS.maxRecursiveDepth,
),
};
}
let managerSingleton: AgentManager | undefined;
let provisioningSingleton: ResourceProvisioningOrchestrator | undefined;
export function getAgentManager(): AgentManager {
if (!managerSingleton) {
managerSingleton = new AgentManager(loadAgentManagerLimitsFromEnv());
}
return managerSingleton;
}
export function loadProvisioningConfigFromEnv(): BuiltInProvisioningConfigInput {
return {
gitWorktree: {
rootDirectory: readOptionalStringEnv(
"AGENT_WORKTREE_ROOT",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.rootDirectory ?? ".ai_ops/worktrees",
),
baseRef: readOptionalStringEnv(
"AGENT_WORKTREE_BASE_REF",
DEFAULT_PROVISIONING_CONFIG.gitWorktree?.baseRef ?? "HEAD",
),
},
portRange: {
basePort: readIntegerEnv(
"AGENT_PORT_BASE",
DEFAULT_PROVISIONING_CONFIG.portRange?.basePort ?? 36000,
{ min: 1 },
),
blockSize: readIntegerEnv(
"AGENT_PORT_BLOCK_SIZE",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockSize ?? 32,
{ min: 1 },
),
blockCount: readIntegerEnv(
"AGENT_PORT_BLOCK_COUNT",
DEFAULT_PROVISIONING_CONFIG.portRange?.blockCount ?? 512,
{ min: 1 },
),
primaryPortOffset: readIntegerEnv(
"AGENT_PORT_PRIMARY_OFFSET",
DEFAULT_PROVISIONING_CONFIG.portRange?.primaryPortOffset ?? 0,
{ min: 0 },
),
lockDirectory: readOptionalStringEnv(
"AGENT_PORT_LOCK_DIR",
DEFAULT_PROVISIONING_CONFIG.portRange?.lockDirectory ?? ".ai_ops/locks/ports",
),
},
};
}
export function getResourceProvisioningOrchestrator(): ResourceProvisioningOrchestrator {
if (!provisioningSingleton) {
provisioningSingleton = createDefaultResourceProvisioningOrchestrator(
loadProvisioningConfigFromEnv(),
);
}
return provisioningSingleton;
}

291
src/agents/state-context.ts Normal file
View File

@@ -0,0 +1,291 @@
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { dirname, resolve } from "node:path";
import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./types.js";
export type SessionHistoryEntry = {
nodeId: string;
event: string;
timestamp: string;
data?: JsonObject;
};
export type StoredSessionState = {
flags: Record<string, boolean>;
metadata: JsonObject;
history: SessionHistoryEntry[];
};
export type NodeHandoff = {
nodeId: string;
fromNodeId?: string;
payload: JsonObject;
createdAt: string;
};
export type NodeExecutionContext = {
sessionId: string;
nodeId: string;
handoff: NodeHandoff;
state: StoredSessionState;
};
const DEFAULT_STATE: StoredSessionState = {
flags: {},
metadata: {},
history: [],
};
function toSessionDirectory(rootDirectory: string, sessionId: string): string {
return resolve(rootDirectory, sessionId);
}
function toStatePath(rootDirectory: string, sessionId: string): string {
return resolve(toSessionDirectory(rootDirectory, sessionId), "state.json");
}
function toHandoffPath(rootDirectory: string, sessionId: string, nodeId: string): string {
return resolve(toSessionDirectory(rootDirectory, sessionId), "handoffs", `${nodeId}.json`);
}
function toJsonObject(value: unknown, errorMessage: string): JsonObject {
if (!isRecord(value)) {
throw new Error(errorMessage);
}
return value as JsonObject;
}
function toStoredSessionState(value: unknown): StoredSessionState {
if (!isRecord(value)) {
throw new Error("Stored state file is malformed.");
}
const flagsValue = value.flags;
if (!isRecord(flagsValue)) {
throw new Error("Stored state.flags is malformed.");
}
const flags: Record<string, boolean> = {};
for (const [key, flagValue] of Object.entries(flagsValue)) {
if (typeof flagValue !== "boolean") {
throw new Error(`Stored state flag \"${key}\" must be a boolean.`);
}
flags[key] = flagValue;
}
const metadata = toJsonObject(value.metadata, "Stored state.metadata is malformed.");
const historyValue = value.history;
if (!Array.isArray(historyValue)) {
throw new Error("Stored state.history is malformed.");
}
const history: SessionHistoryEntry[] = [];
for (const entry of historyValue) {
if (!isRecord(entry)) {
throw new Error("Stored state.history entry is malformed.");
}
const nodeId = entry.nodeId;
const event = entry.event;
const timestamp = entry.timestamp;
if (typeof nodeId !== "string" || nodeId.trim().length === 0) {
throw new Error("Stored state.history entry nodeId is malformed.");
}
if (typeof event !== "string" || event.trim().length === 0) {
throw new Error("Stored state.history entry event is malformed.");
}
if (typeof timestamp !== "string" || timestamp.trim().length === 0) {
throw new Error("Stored state.history entry timestamp is malformed.");
}
const data = entry.data === undefined ? undefined : toJsonObject(entry.data, "Stored state.history entry data is malformed.");
history.push({
nodeId,
event,
timestamp,
...(data ? { data } : {}),
});
}
return {
flags,
metadata,
history,
};
}
function toNodeHandoff(value: unknown): NodeHandoff {
if (!isRecord(value)) {
throw new Error("Stored handoff file is malformed.");
}
const nodeId = value.nodeId;
const createdAt = value.createdAt;
const payload = value.payload;
if (typeof nodeId !== "string" || nodeId.trim().length === 0) {
throw new Error("Stored handoff nodeId is malformed.");
}
if (typeof createdAt !== "string" || createdAt.trim().length === 0) {
throw new Error("Stored handoff createdAt is malformed.");
}
if (!isRecord(payload)) {
throw new Error("Stored handoff payload is malformed.");
}
const fromNodeId = value.fromNodeId;
if (fromNodeId !== undefined && (typeof fromNodeId !== "string" || fromNodeId.trim().length === 0)) {
throw new Error("Stored handoff fromNodeId is malformed.");
}
return {
nodeId,
...(typeof fromNodeId === "string" ? { fromNodeId } : {}),
payload: payload as JsonObject,
createdAt,
};
}
export class FileSystemStateContextManager {
private readonly rootDirectory: string;
constructor(input: {
rootDirectory: string;
}) {
this.rootDirectory = resolve(input.rootDirectory);
}
getRootDirectory(): string {
return this.rootDirectory;
}
async initializeSession(
sessionId: string,
initialState: Partial<StoredSessionState> = {},
): Promise<StoredSessionState> {
const state: StoredSessionState = {
flags: { ...(initialState.flags ?? {}) },
metadata: deepCloneJson((initialState.metadata ?? {}) as JsonValue) as JsonObject,
history: [...(initialState.history ?? [])],
};
await mkdir(toSessionDirectory(this.rootDirectory, sessionId), { recursive: true });
await this.writeState(sessionId, state);
return state;
}
async readState(sessionId: string): Promise<StoredSessionState> {
const path = toStatePath(this.rootDirectory, sessionId);
try {
const content = await readFile(path, "utf8");
const parsed = JSON.parse(content) as unknown;
return toStoredSessionState(parsed);
} catch (error) {
const code = (error as NodeJS.ErrnoException).code;
if (code === "ENOENT") {
return deepCloneJson(DEFAULT_STATE as JsonValue) as StoredSessionState;
}
throw error;
}
}
async writeState(sessionId: string, state: StoredSessionState): Promise<void> {
const path = toStatePath(this.rootDirectory, sessionId);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(state, null, 2)}\n`, "utf8");
}
async patchState(
sessionId: string,
patch: {
flags?: Record<string, boolean>;
metadata?: JsonObject;
historyEvent?: SessionHistoryEntry;
},
): Promise<StoredSessionState> {
const current = await this.readState(sessionId);
if (patch.flags) {
Object.assign(current.flags, patch.flags);
}
if (patch.metadata) {
Object.assign(current.metadata, patch.metadata);
}
if (patch.historyEvent) {
current.history.push(patch.historyEvent);
}
await this.writeState(sessionId, current);
return current;
}
async writeHandoff(
sessionId: string,
handoff: {
nodeId: string;
fromNodeId?: string;
payload: JsonObject;
},
): Promise<NodeHandoff> {
const nodeHandoff: NodeHandoff = {
nodeId: handoff.nodeId,
...(handoff.fromNodeId ? { fromNodeId: handoff.fromNodeId } : {}),
payload: deepCloneJson(handoff.payload),
createdAt: new Date().toISOString(),
};
const path = toHandoffPath(this.rootDirectory, sessionId, handoff.nodeId);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, `${JSON.stringify(nodeHandoff, null, 2)}\n`, "utf8");
return nodeHandoff;
}
async readHandoff(sessionId: string, nodeId: string): Promise<NodeHandoff | undefined> {
const path = toHandoffPath(this.rootDirectory, sessionId, nodeId);
try {
const content = await readFile(path, "utf8");
const parsed = JSON.parse(content) as unknown;
return toNodeHandoff(parsed);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return undefined;
}
throw error;
}
}
async buildFreshNodeContext(sessionId: string, nodeId: string): Promise<NodeExecutionContext> {
const handoff = await this.readHandoff(sessionId, nodeId);
if (!handoff) {
throw new Error(`No handoff exists for node \"${nodeId}\" in session \"${sessionId}\".`);
}
const state = await this.readState(sessionId);
return {
sessionId,
nodeId,
handoff: {
nodeId: handoff.nodeId,
...(handoff.fromNodeId ? { fromNodeId: handoff.fromNodeId } : {}),
payload: deepCloneJson(handoff.payload),
createdAt: handoff.createdAt,
},
state: {
flags: { ...state.flags },
metadata: deepCloneJson(state.metadata),
history: state.history.map((entry) => ({
nodeId: entry.nodeId,
event: entry.event,
timestamp: entry.timestamp,
...(entry.data ? { data: deepCloneJson(entry.data) } : {}),
})),
},
};
}
}

14
src/agents/types.ts Normal file
View File

@@ -0,0 +1,14 @@
export type JsonPrimitive = string | number | boolean | null;
export type JsonValue = JsonPrimitive | JsonValue[] | { [key: string]: JsonValue };
export type JsonObject = {
[key: string]: JsonValue;
};
export function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
export function deepCloneJson<T extends JsonValue>(value: T): T {
return JSON.parse(JSON.stringify(value)) as T;
}