Wire pipeline DAG execution to manager with events and project context

This commit is contained in:
2026-02-23 13:14:20 -05:00
parent 53af0d44cd
commit 889087daa1
13 changed files with 1668 additions and 380 deletions

View File

@@ -1,6 +1,6 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdtemp, writeFile } from "node:fs/promises";
import { mkdtemp, readFile, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { SchemaDrivenExecutionEngine } from "../src/agents/orchestration.js";
@@ -146,6 +146,7 @@ function createManifest(): unknown {
test("runs DAG pipeline with state-dependent routing and retry behavior", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
await writeFile(resolve(workspaceRoot, "PRD.md"), "# PRD\n", "utf8");
@@ -156,6 +157,7 @@ test("runs DAG pipeline with state-dependent routing and retry behavior", async
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
runtimeContext: {
repo: "ai_ops",
ticket: "AIOPS-123",
@@ -246,3 +248,422 @@ test("runs DAG pipeline with state-dependent routing and retry behavior", async
assert.deepEqual(engine.planChildPersonas({ parentPersonaId: "task", depth: 1 }), ["coder"]);
});
test("runs parallel topology blocks concurrently and routes via domain-event edges", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["parallel", "retry-unrolled", "sequential"],
personas: [
{
id: "planner",
displayName: "Planner",
systemPromptTemplate: "Planner {{repo}}",
toolClearance: {
allowlist: ["read_file"],
banlist: [],
},
},
{
id: "coder",
displayName: "Coder",
systemPromptTemplate: "Coder {{repo}}",
toolClearance: {
allowlist: ["read_file", "write_file"],
banlist: [],
},
},
{
id: "integrator",
displayName: "Integrator",
systemPromptTemplate: "Integrator {{repo}}",
toolClearance: {
allowlist: ["read_file"],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 5,
maxRetries: 2,
},
pipeline: {
entryNodeId: "plan",
nodes: [
{
id: "plan",
actorId: "plan_actor",
personaId: "planner",
},
{
id: "code-a",
actorId: "code_a",
personaId: "coder",
topology: {
kind: "parallel",
blockId: "implementation",
},
},
{
id: "code-b",
actorId: "code_b",
personaId: "coder",
topology: {
kind: "parallel",
blockId: "implementation",
},
},
{
id: "integrate",
actorId: "integrate_actor",
personaId: "integrator",
},
],
edges: [
{
from: "plan",
to: "code-a",
on: "success",
},
{
from: "plan",
to: "code-b",
on: "success",
},
{
from: "code-a",
to: "integrate",
event: "code_committed",
},
{
from: "code-b",
to: "integrate",
event: "code_committed",
},
],
},
} as const;
let activeCoders = 0;
let maxConcurrentCoders = 0;
let releaseCoders: (() => void) | undefined;
const codersReleased = new Promise<void>((resolve) => {
releaseCoders = resolve;
});
let coderStarts = 0;
let notifyBothCodersStarted: (() => void) | undefined;
const bothCodersStarted = new Promise<void>((resolve) => {
notifyBothCodersStarted = resolve;
});
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
runtimeContext: {
repo: "ai_ops",
},
maxDepth: 5,
maxRetries: 2,
maxChildren: 4,
},
actorExecutors: {
plan_actor: async () => ({
status: "success",
payload: {
phase: "plan",
},
}),
code_a: async () => {
activeCoders += 1;
maxConcurrentCoders = Math.max(maxConcurrentCoders, activeCoders);
coderStarts += 1;
if (coderStarts === 2) {
notifyBothCodersStarted?.();
}
await codersReleased;
activeCoders = Math.max(activeCoders - 1, 0);
return {
status: "success",
payload: {
branch: "feature/a",
},
events: [
{
type: "code_committed",
payload: {
summary: "Feature A committed",
},
},
],
projectContextPatch: {
artifactPointers: {
feature_a_commit: "feature/a@abc123",
},
},
};
},
code_b: async () => {
activeCoders += 1;
maxConcurrentCoders = Math.max(maxConcurrentCoders, activeCoders);
coderStarts += 1;
if (coderStarts === 2) {
notifyBothCodersStarted?.();
}
await codersReleased;
activeCoders = Math.max(activeCoders - 1, 0);
return {
status: "success",
payload: {
branch: "feature/b",
},
events: [
{
type: "code_committed",
payload: {
summary: "Feature B committed",
},
},
],
projectContextPatch: {
enqueueTasks: [
{
id: "task-integrate",
title: "Integrate feature branches",
status: "pending",
},
],
},
};
},
integrate_actor: async () => ({
status: "success",
payload: {
merged: true,
},
events: [
{
type: "branch_merged",
payload: {
summary: "Branches merged",
},
},
],
}),
},
});
const runPromise = engine.runSession({
sessionId: "session-parallel-domain-events",
initialPayload: {
task: "Parallel implementation",
},
});
await bothCodersStarted;
releaseCoders?.();
const result = await runPromise;
assert.equal(maxConcurrentCoders, 2);
assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}`),
["plan:success", "code-a:success", "code-b:success", "integrate:success"],
);
const storedContextRaw = await readFile(projectContextPath, "utf8");
const storedContext = JSON.parse(storedContextRaw) as {
artifactPointers: Record<string, string>;
taskQueue: Array<{ id: string }>;
};
assert.equal(storedContext.artifactPointers.feature_a_commit, "feature/a@abc123");
assert.equal(storedContext.taskQueue[0]?.id, "task-integrate");
const finalStatePointer = storedContext.artifactPointers["sessions/session-parallel-domain-events/final_state"];
assert.ok(finalStatePointer);
assert.match(finalStatePointer, /state\.json$/);
});
test("fails fast after two sequential hard failures", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "coder",
displayName: "Coder",
systemPromptTemplate: "Coder",
toolClearance: {
allowlist: [],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 4,
maxRetries: 0,
},
pipeline: {
entryNodeId: "first",
nodes: [
{
id: "first",
actorId: "first_actor",
personaId: "coder",
},
{
id: "second",
actorId: "second_actor",
personaId: "coder",
},
],
edges: [
{
from: "first",
to: "second",
on: "failure",
},
],
},
} as const;
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
maxDepth: 4,
maxRetries: 0,
maxChildren: 2,
runtimeContext: {},
},
actorExecutors: {
first_actor: async () => ({
status: "failure",
payload: {
error: "network timeout while reaching upstream API",
},
failureKind: "hard",
}),
second_actor: async () => ({
status: "failure",
payload: {
error: "HTTP 403 from provider",
},
failureKind: "hard",
}),
},
});
await assert.rejects(
() =>
engine.runSession({
sessionId: "session-hard-failure",
initialPayload: {
task: "Trigger hard failures",
},
}),
/Hard failure threshold reached/,
);
});
test("propagates abort signal into actor execution and stops the run", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "coder",
displayName: "Coder",
systemPromptTemplate: "Coder",
toolClearance: {
allowlist: [],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 2,
maxRetries: 0,
},
pipeline: {
entryNodeId: "long-run",
nodes: [
{
id: "long-run",
actorId: "long_actor",
personaId: "coder",
},
],
edges: [],
},
} as const;
let observedAbort = false;
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
maxDepth: 2,
maxRetries: 0,
maxChildren: 2,
runtimeContext: {},
},
actorExecutors: {
long_actor: async (input) => {
await new Promise<void>((resolve, reject) => {
const timeout = setTimeout(resolve, 5000);
input.signal.addEventListener(
"abort",
() => {
observedAbort = true;
clearTimeout(timeout);
reject(input.signal.reason ?? new Error("aborted"));
},
{ once: true },
);
});
return {
status: "success",
payload: {
unreachable: true,
},
};
},
},
});
const controller = new AbortController();
const runPromise = engine.runSession({
sessionId: "session-abort",
initialPayload: {
task: "Abort test",
},
signal: controller.signal,
});
setTimeout(() => {
controller.abort(new Error("manual-abort"));
}, 20);
await assert.rejects(() => runPromise, /(AbortError|manual-abort|aborted)/i);
assert.equal(observedAbort, true);
});

View File

@@ -0,0 +1,58 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdtemp } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { FileSystemProjectContextStore } from "../src/agents/project-context.js";
test("project context store reads defaults and applies domain patches", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
const store = new FileSystemProjectContextStore({
filePath: resolve(root, "project-context.json"),
});
const initial = await store.readState();
assert.deepEqual(initial, {
globalFlags: {},
artifactPointers: {},
taskQueue: [],
});
await store.patchState({
globalFlags: {
requirements_defined: true,
},
artifactPointers: {
prd: "docs/PRD.md",
},
enqueueTasks: [
{
id: "task-1",
title: "Build parser",
status: "pending",
},
],
});
const updated = await store.patchState({
upsertTasks: [
{
id: "task-1",
title: "Build parser",
status: "in_progress",
},
{
id: "task-2",
title: "Add tests",
status: "pending",
},
],
});
assert.equal(updated.globalFlags.requirements_defined, true);
assert.equal(updated.artifactPointers.prd, "docs/PRD.md");
assert.deepEqual(
updated.taskQueue.map((task) => `${task.id}:${task.status}`),
["task-1:in_progress", "task-2:pending"],
);
});