Refactor pipeline policies, MCP registry, and unified config/runtime

This commit is contained in:
2026-02-23 13:56:45 -05:00
parent 889087daa1
commit 9b4216dda9
22 changed files with 1441 additions and 587 deletions

22
tests/config.test.ts Normal file
View File

@@ -0,0 +1,22 @@
import test from "node:test";
import assert from "node:assert/strict";
import { loadConfig } from "../src/config.js";
test("loads defaults and freezes config", () => {
const config = loadConfig({});
assert.equal(config.agentManager.maxConcurrentAgents, 4);
assert.equal(config.orchestration.maxDepth, 4);
assert.equal(config.provisioning.portRange.basePort, 36000);
assert.equal(config.discovery.fileRelativePath, ".agent-context/resources.json");
assert.equal(Object.isFrozen(config), true);
assert.equal(Object.isFrozen(config.orchestration), true);
});
test("validates boolean env values", () => {
assert.throws(
() => loadConfig({ CODEX_SKIP_GIT_CHECK: "maybe" }),
/must be "true" or "false"/,
);
});

View File

@@ -2,6 +2,7 @@ import test from "node:test";
import assert from "node:assert/strict";
import {
inferTransport,
normalizeSharedMcpServer,
toClaudeServerConfig,
toCodexServerConfig,
} from "../src/mcp/converters.js";
@@ -41,6 +42,42 @@ test("maps shared headers to codex http_headers", () => {
});
});
test("normalizes header aliases into a single headers object", () => {
const normalized = normalizeSharedMcpServer({
url: "http://localhost:3000/mcp",
http_headers: {
"X-Source": "legacy",
},
headers: {
Authorization: "Bearer token",
},
});
assert.deepEqual(normalized.headers, {
"X-Source": "legacy",
Authorization: "Bearer token",
});
assert.equal("http_headers" in normalized, false);
});
test("maps legacy http_headers alias for claude conversion", () => {
const claudeConfig = toClaudeServerConfig("legacy-http-headers", {
type: "http",
url: "http://localhost:3000/mcp",
http_headers: {
Authorization: "Bearer token",
},
});
assert.deepEqual(claudeConfig, {
type: "http",
url: "http://localhost:3000/mcp",
headers: {
Authorization: "Bearer token",
},
});
});
test("throws for claude http server without url", () => {
assert.throws(
() => toClaudeServerConfig("bad-http", { type: "http" }),

View File

@@ -0,0 +1,65 @@
import test from "node:test";
import assert from "node:assert/strict";
import {
McpRegistry,
createDefaultMcpRegistry,
createMcpHandlerShell,
} from "../src/mcp/handlers.js";
test("mcp registries are isolated instances", () => {
const registryA = createDefaultMcpRegistry();
const registryB = createDefaultMcpRegistry();
registryA.register(
createMcpHandlerShell({
id: "custom-a",
description: "custom handler",
matches: () => false,
}),
);
assert.equal(registryA.listHandlers().some((handler) => handler.id === "custom-a"), true);
assert.equal(registryB.listHandlers().some((handler) => handler.id === "custom-a"), false);
});
test("mcp registry resolves generic fallback by default", () => {
const registry = createDefaultMcpRegistry();
const resolved = registry.resolveServerWithHandler({
serverName: "local-files",
server: {
type: "stdio",
command: "npx",
args: ["-y", "@modelcontextprotocol/server-filesystem", "."],
},
context: {},
fullConfig: {
servers: {},
},
});
assert.equal(resolved.handlerId, "generic");
assert.ok(resolved.codex);
assert.ok(resolved.claude);
});
test("mcp registry rejects unknown explicit handlers", () => {
const registry = new McpRegistry();
assert.throws(
() =>
registry.resolveServerWithHandler({
serverName: "broken",
server: {
type: "http",
url: "http://localhost:3000/mcp",
handler: "missing-handler",
},
context: {},
fullConfig: {
servers: {},
},
}),
/Unknown MCP handler/,
);
});

View File

@@ -230,6 +230,7 @@ test("runs DAG pipeline with state-dependent routing and retry behavior", async
task: "Implement pipeline",
},
});
assert.equal(result.status, "success");
assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}:${String(record.attempt)}`),
@@ -471,6 +472,7 @@ test("runs parallel topology blocks concurrently and routes via domain-event edg
const result = await runPromise;
assert.equal(maxConcurrentCoders, 2);
assert.equal(result.status, "success");
assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}`),
["plan:success", "code-a:success", "code-b:success", "integrate:success"],
@@ -577,6 +579,96 @@ test("fails fast after two sequential hard failures", async () => {
);
});
test("marks aggregate status as failure when a terminal node fails", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));
const projectContextPath = resolve(stateRoot, "project-context.json");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "coder",
displayName: "Coder",
systemPromptTemplate: "Coder",
toolClearance: {
allowlist: [],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 3,
maxRetries: 0,
},
pipeline: {
entryNodeId: "build",
nodes: [
{
id: "build",
actorId: "build_actor",
personaId: "coder",
},
{
id: "verify",
actorId: "verify_actor",
personaId: "coder",
},
],
edges: [
{
from: "build",
to: "verify",
on: "success",
},
],
},
} as const;
const engine = new SchemaDrivenExecutionEngine({
manifest,
settings: {
workspaceRoot,
stateRoot,
projectContextPath,
maxDepth: 3,
maxRetries: 0,
maxChildren: 2,
runtimeContext: {},
},
actorExecutors: {
build_actor: async () => ({
status: "success",
payload: {
step: "build",
},
}),
verify_actor: async () => ({
status: "failure",
payload: {
error: "verification failed",
},
failureKind: "soft",
}),
},
});
const result = await engine.runSession({
sessionId: "session-terminal-failure",
initialPayload: {
task: "Aggregate failure status",
},
});
assert.equal(result.status, "failure");
assert.deepEqual(
result.records.map((record) => `${record.nodeId}:${record.status}`),
["build:success", "verify:failure"],
);
});
test("propagates abort signal into actor execution and stops the run", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-workspace-"));
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-state-"));

View File

@@ -1,6 +1,6 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdtemp } from "node:fs/promises";
import { mkdtemp, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { FileSystemProjectContextStore } from "../src/agents/project-context.js";
@@ -13,6 +13,7 @@ test("project context store reads defaults and applies domain patches", async ()
const initial = await store.readState();
assert.deepEqual(initial, {
schemaVersion: 1,
globalFlags: {},
artifactPointers: {},
taskQueue: [],
@@ -55,4 +56,35 @@ test("project context store reads defaults and applies domain patches", async ()
updated.taskQueue.map((task) => `${task.id}:${task.status}`),
["task-1:in_progress", "task-2:pending"],
);
assert.equal(updated.schemaVersion, 1);
});
test("project context parser merges missing root keys with defaults", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-project-context-"));
const filePath = resolve(root, "project-context.json");
const store = new FileSystemProjectContextStore({ filePath });
await writeFile(
filePath,
`${JSON.stringify(
{
taskQueue: [
{
id: "task-1",
title: "Migrate",
status: "pending",
},
],
},
null,
2,
)}\n`,
"utf8",
);
const state = await store.readState();
assert.equal(state.schemaVersion, 1);
assert.deepEqual(state.globalFlags, {});
assert.deepEqual(state.artifactPointers, {});
assert.equal(state.taskQueue[0]?.id, "task-1");
});