Implement explicit session lifecycle and task-scoped worktrees

This commit is contained in:
2026-02-24 10:09:07 -05:00
parent 23ad28ad12
commit ca5fd3f096
21 changed files with 1201 additions and 45 deletions

View File

@@ -20,10 +20,12 @@ TypeScript runtime for deterministic multi-agent execution with:
- Runtime events are emitted as best-effort side-channel telemetry and do not affect orchestration control flow.
- `AgentManager` is an internal utility used by the pipeline when fan-out/retry-unrolled behavior is required.
- Session state is persisted under `AGENT_STATE_ROOT`.
- Project state is persisted under `AGENT_PROJECT_CONTEXT_PATH` with schema-versioned JSON (`schemaVersion`) and domains:
- Session lifecycle is explicit (`POST /api/sessions`, `POST /api/sessions/:id/run`, `POST /api/sessions/:id/close`) and each session is bound to a target project path.
- Session project context is persisted as schema-versioned JSON (`schemaVersion`) with domains:
- `globalFlags`
- `artifactPointers`
- `taskQueue`
- each task record stores `taskId`, status, and optional `worktreePath` for task-scoped workspace ownership
## Deep Dives
@@ -128,7 +130,7 @@ Pipeline edges can route via:
Domain events are typed and can trigger edges directly:
- planning: `requirements_defined`, `tasks_planned`
- execution: `code_committed`, `task_blocked`
- execution: `code_committed`, `task_ready_for_review`, `task_blocked`
- validation: `validation_passed`, `validation_failed`
- integration: `branch_merged`

View File

@@ -2,7 +2,7 @@ import { randomUUID } from "node:crypto";
import type { JsonObject } from "./types.js";
export type PlanningDomainEventType = "requirements_defined" | "tasks_planned";
export type ExecutionDomainEventType = "code_committed" | "task_blocked";
export type ExecutionDomainEventType = "code_committed" | "task_blocked" | "task_ready_for_review";
export type ValidationDomainEventType = "validation_passed" | "validation_failed";
export type IntegrationDomainEventType = "branch_merged";
@@ -46,6 +46,7 @@ const DOMAIN_EVENT_TYPES = new Set<DomainEventType>([
"tasks_planned",
"code_committed",
"task_blocked",
"task_ready_for_review",
"validation_passed",
"validation_failed",
"branch_merged",

View File

@@ -13,10 +13,16 @@ import {
type ActorExecutionSecurityContext,
type ActorExecutor,
type PipelineRunSummary,
type TaskExecutionLifecycle,
} from "./pipeline.js";
import { FileSystemProjectContextStore } from "./project-context.js";
import {
FileSystemProjectContextStore,
type ProjectTask,
type ProjectTaskStatus,
} from "./project-context.js";
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
import type { JsonObject } from "./types.js";
import { SessionWorktreeManager, type SessionMetadata } from "./session-lifecycle.js";
import {
SecureCommandExecutor,
type SecurityAuditEvent,
@@ -221,6 +227,35 @@ function createActorSecurityContext(input: {
};
}
function resolveSessionProjectContextPath(stateRoot: string, sessionId: string): string {
return resolve(stateRoot, sessionId, "project-context.json");
}
function readTaskIdFromPayload(payload: JsonObject, fallback: string): string {
const candidates = [payload.taskId, payload.task_id, payload.task];
for (const candidate of candidates) {
if (typeof candidate === "string" && candidate.trim().length > 0) {
return candidate.trim();
}
}
return fallback;
}
function toTaskStatusForFailure(resultStatus: "validation_fail" | "failure"): ProjectTaskStatus {
if (resultStatus === "failure") {
return "failed";
}
return "in_progress";
}
function shouldMergeFromStatus(statusAtStart: string): boolean {
return statusAtStart === "review";
}
function toTaskIdLabel(task: ProjectTask): string {
return task.taskId || task.id || "task";
}
export class SchemaDrivenExecutionEngine {
private readonly manifest: AgentManifest;
private readonly personaRegistry = new PersonaRegistry();
@@ -234,6 +269,7 @@ export class SchemaDrivenExecutionEngine {
private readonly mcpRegistry: McpRegistry;
private readonly runtimeEventPublisher: RuntimeEventPublisher;
private readonly securityContext: ActorExecutionSecurityContext;
private readonly sessionWorktreeManager: SessionWorktreeManager;
constructor(input: {
manifest: AgentManifest | unknown;
@@ -273,6 +309,10 @@ export class SchemaDrivenExecutionEngine {
this.projectContextStore = new FileSystemProjectContextStore({
filePath: this.settings.projectContextPath,
});
this.sessionWorktreeManager = new SessionWorktreeManager({
worktreeRoot: resolve(this.settings.workspaceRoot, this.config.provisioning.gitWorktree.rootDirectory),
baseRef: this.config.provisioning.gitWorktree.baseRef,
});
this.actorExecutors = toExecutorMap(input.actorExecutors);
this.manager =
@@ -352,9 +392,22 @@ export class SchemaDrivenExecutionEngine {
initialPayload: JsonObject;
initialState?: Partial<StoredSessionState>;
signal?: AbortSignal;
sessionMetadata?: SessionMetadata;
}): Promise<PipelineRunSummary> {
const managerSessionId = `${input.sessionId}__pipeline`;
const managerSession = this.manager.createSession(managerSessionId);
const workspaceRoot = input.sessionMetadata?.baseWorkspacePath ?? this.settings.workspaceRoot;
const projectContextStore = input.sessionMetadata
? new FileSystemProjectContextStore({
filePath: resolveSessionProjectContextPath(this.settings.stateRoot, input.sessionId),
})
: this.projectContextStore;
const taskLifecycle = input.sessionMetadata
? this.createTaskExecutionLifecycle({
session: input.sessionMetadata,
projectContextStore,
})
: undefined;
const executor = new PipelineExecutor(
this.manifest,
@@ -362,25 +415,26 @@ export class SchemaDrivenExecutionEngine {
this.stateManager,
this.actorExecutors,
{
workspaceRoot: this.settings.workspaceRoot,
workspaceRoot,
runtimeContext: this.settings.runtimeContext,
defaultModelConstraint: this.config.provider.claudeModel,
resolvedExecutionSecurityConstraints: {
dropUid: this.config.security.dropUid !== undefined,
dropGid: this.config.security.dropGid !== undefined,
worktreePath: this.settings.workspaceRoot,
worktreePath: workspaceRoot,
violationMode: this.settings.securityViolationHandling,
},
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
manager: this.manager,
managerSessionId,
projectContextStore: this.projectContextStore,
resolveMcpConfig: ({ providerHint, prompt, toolClearance }) =>
projectContextStore,
resolveMcpConfig: ({ providerHint, prompt, toolClearance, workingDirectory }) =>
loadMcpConfigFromEnv(
{
providerHint,
prompt,
...(workingDirectory ? { workingDirectory } : {}),
},
{
config: this.config,
@@ -391,6 +445,7 @@ export class SchemaDrivenExecutionEngine {
securityViolationHandling: this.settings.securityViolationHandling,
securityContext: this.securityContext,
runtimeEventPublisher: this.runtimeEventPublisher,
...(taskLifecycle ? { taskLifecycle } : {}),
},
);
try {
@@ -405,6 +460,97 @@ export class SchemaDrivenExecutionEngine {
}
}
private createTaskExecutionLifecycle(input: {
session: SessionMetadata;
projectContextStore: FileSystemProjectContextStore;
}): TaskExecutionLifecycle {
return {
prepareTaskExecution: async ({ node, context }) => {
const taskId = readTaskIdFromPayload(context.handoff.payload, node.id);
const projectContext = await input.projectContextStore.readState();
const existing = projectContext.taskQueue.find(
(task) => toTaskIdLabel(task) === taskId,
);
const ensured = await this.sessionWorktreeManager.ensureTaskWorktree({
sessionId: input.session.sessionId,
taskId,
baseWorkspacePath: input.session.baseWorkspacePath,
...(existing?.worktreePath ? { existingWorktreePath: existing.worktreePath } : {}),
});
const statusAtStart: ProjectTaskStatus =
existing?.status === "review" ? "review" : "in_progress";
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId,
id: taskId,
status: statusAtStart,
worktreePath: ensured.taskWorktreePath,
...(existing?.title ? { title: existing.title } : { title: taskId }),
},
],
});
return {
taskId,
worktreePath: ensured.taskWorktreePath,
statusAtStart,
};
},
finalizeTaskExecution: async ({ task, result }) => {
if (result.status === "failure" || result.status === "validation_fail") {
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: toTaskStatusForFailure(result.status),
worktreePath: task.worktreePath,
title: task.taskId,
},
],
});
return;
}
if (shouldMergeFromStatus(task.statusAtStart)) {
await this.sessionWorktreeManager.mergeTaskIntoBase({
taskId: task.taskId,
baseWorkspacePath: input.session.baseWorkspacePath,
taskWorktreePath: task.worktreePath,
});
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "merged",
title: task.taskId,
},
],
});
return;
}
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "review",
worktreePath: task.worktreePath,
title: task.taskId,
},
],
});
},
};
}
private assertRelationshipConstraints(): void {
for (const [parent, edges] of this.childrenByParent.entries()) {
if (edges.length > this.settings.maxChildren) {

View File

@@ -153,6 +153,7 @@ export type PipelineExecutorOptions = {
securityViolationHandling?: SecurityViolationHandling;
securityContext?: ActorExecutionSecurityContext;
runtimeEventPublisher?: RuntimeEventPublisher;
taskLifecycle?: TaskExecutionLifecycle;
};
export type ActorExecutionSecurityContext = {
@@ -166,6 +167,27 @@ export type ActorExecutionSecurityContext = {
}) => SecureCommandExecutor;
};
export type TaskExecutionResolution = {
taskId: string;
worktreePath: string;
statusAtStart: string;
};
export type TaskExecutionLifecycle = {
prepareTaskExecution: (input: {
sessionId: string;
node: PipelineNode;
context: NodeExecutionContext;
}) => Promise<TaskExecutionResolution>;
finalizeTaskExecution: (input: {
sessionId: string;
node: PipelineNode;
task: TaskExecutionResolution;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
}) => Promise<void>;
};
type QueueItem = {
nodeId: string;
depth: number;
@@ -580,9 +602,11 @@ export class PipelineExecutor {
globalFlags: { ...projectContext.globalFlags },
artifactPointers: { ...projectContext.artifactPointers },
taskQueue: projectContext.taskQueue.map((task) => ({
id: task.id,
title: task.title,
taskId: task.taskId,
id: task.id ?? task.taskId,
...(task.title ? { title: task.title } : {}),
status: task.status,
...(task.worktreePath ? { worktreePath: task.worktreePath } : {}),
...(task.assignee ? { assignee: task.assignee } : {}),
...(task.metadata ? { metadata: task.metadata } : {}),
})),
@@ -854,6 +878,13 @@ export class PipelineExecutor {
})();
const context = await this.stateManager.buildFreshNodeContext(sessionId, node.id);
const taskResolution = this.options.taskLifecycle
? await this.options.taskLifecycle.prepareTaskExecution({
sessionId,
node,
context,
})
: undefined;
const prompt = this.personaRegistry.renderSystemPrompt({
personaId: node.personaId,
runtimeContext: {
@@ -869,6 +900,7 @@ export class PipelineExecutor {
node,
toolClearance,
prompt,
worktreePathOverride: taskResolution?.worktreePath,
});
const result = await this.invokeActorExecutor({
@@ -889,7 +921,16 @@ export class PipelineExecutor {
customEvents: result.events,
});
const topologyKind: NodeTopologyKind = node.topology?.kind ?? "sequential";
const payloadForNext = result.payload ?? context.handoff.payload;
const payloadForNext = {
...context.handoff.payload,
...(result.payload ?? {}),
...(taskResolution
? {
taskId: taskResolution.taskId,
worktreePath: taskResolution.worktreePath,
}
: {}),
};
const shouldRetry =
result.status === "validation_fail" &&
this.shouldRetryValidation(node) &&
@@ -907,6 +948,16 @@ export class PipelineExecutor {
topologyKind,
});
if (taskResolution && this.options.taskLifecycle) {
await this.options.taskLifecycle.finalizeTaskExecution({
sessionId,
node,
task: taskResolution,
result,
domainEvents,
});
}
const emittedEventTypes = domainEvents.map((event) => event.type);
nodeRecords.push({
nodeId: node.id,
@@ -1006,7 +1057,10 @@ export class PipelineExecutor {
context: input.context,
signal: input.signal,
executionContext: input.executionContext,
mcp: this.buildActorMcpContext(input.executionContext, input.prompt),
mcp: this.buildActorMcpContext({
executionContext: input.executionContext,
prompt: input.prompt,
}),
security: this.securityContext,
});
} catch (error) {
@@ -1047,9 +1101,15 @@ export class PipelineExecutor {
node: PipelineNode;
toolClearance: ToolClearancePolicy;
prompt: string;
worktreePathOverride?: string;
}): ResolvedExecutionContext {
const normalizedToolClearance = parseToolClearancePolicy(input.toolClearance);
const toolUniverse = this.resolveAvailableToolsForAttempt(normalizedToolClearance, input.prompt);
const worktreePath = input.worktreePathOverride ?? this.options.resolvedExecutionSecurityConstraints.worktreePath;
const toolUniverse = this.resolveAvailableToolsForAttempt({
toolClearance: normalizedToolClearance,
prompt: input.prompt,
worktreePath,
});
const allowedTools = this.resolveAllowedToolsForAttempt({
toolClearance: normalizedToolClearance,
toolUniverse,
@@ -1065,6 +1125,7 @@ export class PipelineExecutor {
allowedTools,
security: {
...this.options.resolvedExecutionSecurityConstraints,
worktreePath,
},
};
}
@@ -1087,15 +1148,20 @@ export class PipelineExecutor {
return [];
}
private resolveAvailableToolsForAttempt(toolClearance: ToolClearancePolicy, prompt: string): string[] {
private resolveAvailableToolsForAttempt(input: {
toolClearance: ToolClearancePolicy;
prompt: string;
worktreePath: string;
}): string[] {
if (!this.options.resolveMcpConfig) {
return [];
}
const resolved = this.options.resolveMcpConfig({
providerHint: "codex",
prompt,
toolClearance,
prompt: input.prompt,
workingDirectory: input.worktreePath,
toolClearance: input.toolClearance,
});
const rawServers = resolved.codexConfig?.mcp_servers;
@@ -1115,10 +1181,11 @@ export class PipelineExecutor {
return dedupeStrings(tools);
}
private buildActorMcpContext(
executionContext: ResolvedExecutionContext,
prompt: string,
): ActorExecutionMcpContext {
private buildActorMcpContext(input: {
executionContext: ResolvedExecutionContext;
prompt: string;
}): ActorExecutionMcpContext {
const { executionContext, prompt } = input;
const toolPolicy = toAllowedToolPolicy(executionContext.allowedTools);
const filterToolsForProvider = (tools: string[]): string[] => {
const deduped = dedupeStrings(tools);
@@ -1129,6 +1196,7 @@ export class PipelineExecutor {
? this.options.resolveMcpConfig({
providerHint: "both",
prompt,
workingDirectory: executionContext.security.worktreePath,
toolClearance: toolPolicy,
})
: {};
@@ -1137,7 +1205,12 @@ export class PipelineExecutor {
executionContext.allowedTools,
);
const resolveConfig = (context: McpLoadContext = {}): LoadedMcpConfig => {
if (context.providerHint === "codex") {
const withWorkingDirectory: McpLoadContext = {
...context,
...(context.workingDirectory ? {} : { workingDirectory: executionContext.security.worktreePath }),
};
if (withWorkingDirectory.providerHint === "codex") {
return {
...(resolvedConfig.codexConfig ? { codexConfig: cloneMcpConfig(resolvedConfig).codexConfig } : {}),
...(resolvedConfig.sourcePath ? { sourcePath: resolvedConfig.sourcePath } : {}),
@@ -1147,7 +1220,7 @@ export class PipelineExecutor {
};
}
if (context.providerHint === "claude") {
if (withWorkingDirectory.providerHint === "claude") {
return {
...(resolvedConfig.claudeMcpServers
? { claudeMcpServers: cloneMcpConfig(resolvedConfig).claudeMcpServers }

View File

@@ -5,12 +5,21 @@ import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./type
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
export type ProjectTaskStatus =
| "pending"
| "in_progress"
| "review"
| "merged"
| "failed"
| "blocked"
| "done";
export type ProjectTask = {
id: string;
title: string;
taskId: string;
id?: string;
title?: string;
status: ProjectTaskStatus;
worktreePath?: string;
assignee?: string;
metadata?: JsonObject;
};
@@ -52,7 +61,15 @@ function toJsonObject(value: unknown, label: string): JsonObject {
}
function toTaskStatus(value: unknown, label: string): ProjectTaskStatus {
if (value === "pending" || value === "in_progress" || value === "blocked" || value === "done") {
if (
value === "pending" ||
value === "in_progress" ||
value === "review" ||
value === "merged" ||
value === "failed" ||
value === "blocked" ||
value === "done"
) {
return value;
}
throw new Error(`${label} has unsupported status "${String(value)}".`);
@@ -68,10 +85,28 @@ function toProjectTask(value: unknown, label: string): ProjectTask {
throw new Error(`${label}.assignee must be a non-empty string when provided.`);
}
const taskIdCandidate = value.taskId ?? value.id;
const taskId = assertNonEmptyString(taskIdCandidate, `${label}.taskId`);
const titleRaw = value.title;
if (titleRaw !== undefined && (typeof titleRaw !== "string" || titleRaw.trim().length === 0)) {
throw new Error(`${label}.title must be a non-empty string when provided.`);
}
const worktreePathRaw = value.worktreePath;
if (
worktreePathRaw !== undefined &&
(typeof worktreePathRaw !== "string" || worktreePathRaw.trim().length === 0)
) {
throw new Error(`${label}.worktreePath must be a non-empty string when provided.`);
}
return {
id: assertNonEmptyString(value.id, `${label}.id`),
title: assertNonEmptyString(value.title, `${label}.title`),
taskId,
id: taskId,
...(typeof titleRaw === "string" ? { title: titleRaw.trim() } : {}),
status: toTaskStatus(value.status, `${label}.status`),
...(typeof worktreePathRaw === "string" ? { worktreePath: worktreePathRaw.trim() } : {}),
...(typeof assignee === "string" ? { assignee: assignee.trim() } : {}),
...(value.metadata !== undefined
? { metadata: toJsonObject(value.metadata, `${label}.metadata`) }
@@ -157,10 +192,10 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
const byId = new Map<string, ProjectTask>();
for (const task of current) {
byId.set(task.id, task);
byId.set(task.taskId, task);
}
for (const task of upserts) {
byId.set(task.id, task);
byId.set(task.taskId, task);
}
return [...byId.values()];

View File

@@ -197,9 +197,9 @@ export class ResourceProvisioningOrchestrator {
async provisionSession(input: {
sessionId: string;
resources: ResourceRequest[];
workspaceRoot?: string;
workspaceRoot: string;
}): Promise<ProvisionedResources> {
const workspaceRoot = resolve(input.workspaceRoot ?? process.cwd());
const workspaceRoot = resolve(input.workspaceRoot);
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
const releases: ProvisionedResourcesState["releases"] = [];
const env: Record<string, string> = {};

View File

@@ -0,0 +1,389 @@
import { execFile } from "node:child_process";
import { randomUUID } from "node:crypto";
import { mkdir, readFile, readdir, stat } from "node:fs/promises";
import { dirname, isAbsolute, resolve } from "node:path";
import { promisify } from "node:util";
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
const execFileAsync = promisify(execFile);
const SESSION_METADATA_FILE_NAME = "session-metadata.json";
export type SessionStatus = "active" | "suspended" | "closed";
export type SessionMetadata = {
sessionId: string;
projectPath: string;
sessionStatus: SessionStatus;
baseWorkspacePath: string;
createdAt: string;
updatedAt: string;
};
export type CreateSessionRequest = {
projectPath: string;
};
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function assertAbsolutePath(path: string, label: string): string {
if (!isAbsolute(path)) {
throw new Error(`${label} must be an absolute path.`);
}
return resolve(path);
}
function assertNonEmptyString(value: unknown, label: string): string {
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`${label} must be a non-empty string.`);
}
return value.trim();
}
function toSessionStatus(value: unknown): SessionStatus {
if (value === "active" || value === "suspended" || value === "closed") {
return value;
}
throw new Error(`Session status "${String(value)}" is not supported.`);
}
function toSessionMetadata(value: unknown): SessionMetadata {
if (!value || typeof value !== "object" || Array.isArray(value)) {
throw new Error("Session metadata file is malformed.");
}
const raw = value as Record<string, unknown>;
return {
sessionId: assertNonEmptyString(raw.sessionId, "sessionId"),
projectPath: assertAbsolutePath(assertNonEmptyString(raw.projectPath, "projectPath"), "projectPath"),
baseWorkspacePath: assertAbsolutePath(
assertNonEmptyString(raw.baseWorkspacePath, "baseWorkspacePath"),
"baseWorkspacePath",
),
sessionStatus: toSessionStatus(raw.sessionStatus),
createdAt: assertNonEmptyString(raw.createdAt, "createdAt"),
updatedAt: assertNonEmptyString(raw.updatedAt, "updatedAt"),
};
}
async function runGit(args: string[]): Promise<string> {
try {
const { stdout } = await execFileAsync("git", args, {
encoding: "utf8",
});
return stdout.trim();
} catch (error) {
throw new Error(`git ${args.join(" ")} failed: ${toErrorMessage(error)}`);
}
}
async function pathExists(path: string): Promise<boolean> {
try {
await stat(path);
return true;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return false;
}
throw error;
}
}
function sanitizeSegment(value: string, fallback: string): string {
const normalized = value
.trim()
.replace(/[^a-zA-Z0-9_-]/g, "-")
.replace(/-+/g, "-")
.replace(/^-+/, "")
.replace(/-+$/, "");
return normalized || fallback;
}
export class FileSystemSessionMetadataStore {
private readonly stateRoot: string;
constructor(input: { stateRoot: string }) {
this.stateRoot = resolve(input.stateRoot);
}
getStateRoot(): string {
return this.stateRoot;
}
getSessionDirectory(sessionId: string): string {
return resolve(this.stateRoot, sessionId);
}
getSessionMetadataPath(sessionId: string): string {
return resolve(this.getSessionDirectory(sessionId), SESSION_METADATA_FILE_NAME);
}
getSessionProjectContextPath(sessionId: string): string {
return resolve(this.getSessionDirectory(sessionId), "project-context.json");
}
async createSession(input: {
projectPath: string;
baseWorkspacePath: string;
sessionId?: string;
}): Promise<SessionMetadata> {
const sessionId = input.sessionId?.trim() || randomUUID();
const now = new Date().toISOString();
const metadata: SessionMetadata = {
sessionId,
projectPath: assertAbsolutePath(input.projectPath, "projectPath"),
baseWorkspacePath: assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath"),
sessionStatus: "active",
createdAt: now,
updatedAt: now,
};
const sessionDirectory = this.getSessionDirectory(sessionId);
await mkdir(sessionDirectory, { recursive: true });
await this.writeSessionMetadata(metadata);
return metadata;
}
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
const metadataPath = this.getSessionMetadataPath(sessionId);
try {
const content = await readFile(metadataPath, "utf8");
return toSessionMetadata(JSON.parse(content) as unknown);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return undefined;
}
throw error;
}
}
async listSessions(): Promise<SessionMetadata[]> {
try {
const entries = await readdir(this.stateRoot, { withFileTypes: true });
const sessions: SessionMetadata[] = [];
for (const entry of entries) {
if (!entry.isDirectory()) {
continue;
}
const metadata = await this.readSession(entry.name);
if (metadata) {
sessions.push(metadata);
}
}
sessions.sort((left, right) => right.createdAt.localeCompare(left.createdAt));
return sessions;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return [];
}
throw error;
}
}
async updateSession(
sessionId: string,
patch: Partial<Pick<SessionMetadata, "projectPath" | "baseWorkspacePath" | "sessionStatus">>,
): Promise<SessionMetadata> {
const current = await this.readSession(sessionId);
if (!current) {
throw new Error(`Session "${sessionId}" does not exist.`);
}
const next: SessionMetadata = {
...current,
...(patch.projectPath ? { projectPath: assertAbsolutePath(patch.projectPath, "projectPath") } : {}),
...(patch.baseWorkspacePath
? { baseWorkspacePath: assertAbsolutePath(patch.baseWorkspacePath, "baseWorkspacePath") }
: {}),
...(patch.sessionStatus ? { sessionStatus: patch.sessionStatus } : {}),
updatedAt: new Date().toISOString(),
};
await this.writeSessionMetadata(next);
return next;
}
private async writeSessionMetadata(metadata: SessionMetadata): Promise<void> {
const metadataPath = this.getSessionMetadataPath(metadata.sessionId);
await mkdir(dirname(metadataPath), { recursive: true });
await withFileLock(`${metadataPath}.lock`, async () => {
await writeUtf8FileAtomic(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`);
});
}
}
export class SessionWorktreeManager {
private readonly worktreeRoot: string;
private readonly baseRef: string;
constructor(input: {
worktreeRoot: string;
baseRef: string;
}) {
this.worktreeRoot = assertAbsolutePath(input.worktreeRoot, "worktreeRoot");
this.baseRef = assertNonEmptyString(input.baseRef, "baseRef");
}
resolveBaseWorkspacePath(sessionId: string): string {
const scoped = sanitizeSegment(sessionId, "session");
return resolve(this.worktreeRoot, scoped, "base");
}
resolveTaskWorktreePath(sessionId: string, taskId: string): string {
const scopedSession = sanitizeSegment(sessionId, "session");
const scopedTask = sanitizeSegment(taskId, "task");
return resolve(this.worktreeRoot, scopedSession, "tasks", scopedTask);
}
private resolveBaseBranchName(sessionId: string): string {
const scoped = sanitizeSegment(sessionId, "session");
return `ai-ops/${scoped}/base`;
}
private resolveTaskBranchName(sessionId: string, taskId: string): string {
const scopedSession = sanitizeSegment(sessionId, "session");
const scopedTask = sanitizeSegment(taskId, "task");
return `ai-ops/${scopedSession}/task/${scopedTask}`;
}
async initializeSessionBaseWorkspace(input: {
sessionId: string;
projectPath: string;
baseWorkspacePath: string;
}): Promise<void> {
const projectPath = assertAbsolutePath(input.projectPath, "projectPath");
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
await mkdir(dirname(baseWorkspacePath), { recursive: true });
const alreadyExists = await pathExists(baseWorkspacePath);
if (alreadyExists) {
return;
}
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
const branchName = this.resolveBaseBranchName(input.sessionId);
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
}
async ensureTaskWorktree(input: {
sessionId: string;
taskId: string;
baseWorkspacePath: string;
existingWorktreePath?: string;
}): Promise<{
taskWorktreePath: string;
}> {
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
const maybeExisting = input.existingWorktreePath?.trim();
const worktreePath = maybeExisting
? assertAbsolutePath(maybeExisting, "existingWorktreePath")
: this.resolveTaskWorktreePath(input.sessionId, input.taskId);
if (!(await pathExists(worktreePath))) {
await mkdir(dirname(worktreePath), { recursive: true });
const branchName = this.resolveTaskBranchName(input.sessionId, input.taskId);
await runGit(["-C", baseWorkspacePath, "worktree", "add", "-B", branchName, worktreePath, "HEAD"]);
}
return {
taskWorktreePath: worktreePath,
};
}
async mergeTaskIntoBase(input: {
taskId: string;
baseWorkspacePath: string;
taskWorktreePath: string;
}): Promise<void> {
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
const taskWorktreePath = assertAbsolutePath(input.taskWorktreePath, "taskWorktreePath");
await runGit(["-C", taskWorktreePath, "add", "-A"]);
const hasPending = await this.hasStagedChanges(taskWorktreePath);
if (hasPending) {
await runGit([
"-C",
taskWorktreePath,
"commit",
"-m",
`ai_ops: finalize task ${input.taskId}`,
]);
}
const branchName = await runGit(["-C", taskWorktreePath, "rev-parse", "--abbrev-ref", "HEAD"]);
await runGit(["-C", baseWorkspacePath, "merge", "--no-ff", "--no-edit", branchName]);
await this.removeWorktree({
repoPath: baseWorkspacePath,
worktreePath: taskWorktreePath,
});
}
async closeSession(input: {
session: SessionMetadata;
taskWorktreePaths: string[];
mergeBaseIntoProject?: boolean;
}): Promise<void> {
const projectPath = assertAbsolutePath(input.session.projectPath, "projectPath");
const baseWorkspacePath = assertAbsolutePath(input.session.baseWorkspacePath, "baseWorkspacePath");
for (const taskWorktreePath of input.taskWorktreePaths) {
if (!taskWorktreePath.trim()) {
continue;
}
await this.removeWorktree({
repoPath: baseWorkspacePath,
worktreePath: taskWorktreePath,
});
}
if (input.mergeBaseIntoProject) {
const baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
await runGit(["-C", projectPath, "merge", "--no-ff", "--no-edit", baseBranch]);
}
await this.removeWorktree({
repoPath: projectPath,
worktreePath: baseWorkspacePath,
});
}
private async removeWorktree(input: {
repoPath: string;
worktreePath: string;
}): Promise<void> {
if (!(await pathExists(input.worktreePath))) {
return;
}
await runGit(["-C", input.repoPath, "worktree", "remove", "--force", input.worktreePath]);
await runGit(["-C", input.repoPath, "worktree", "prune"]);
}
private async hasStagedChanges(worktreePath: string): Promise<boolean> {
try {
await execFileAsync("git", ["-C", worktreePath, "diff", "--cached", "--quiet"], {
encoding: "utf8",
});
return false;
} catch (error) {
const exitCode = (error as { code?: number }).code;
if (exitCode === 1) {
return true;
}
throw new Error(`Unable to inspect staged changes: ${toErrorMessage(error)}`);
}
}
}

View File

@@ -85,6 +85,7 @@ export async function runClaudePrompt(
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
const sessionContext = await createSessionContextFn("claude", {
prompt,
workspaceRoot: process.cwd(),
config,
});

View File

@@ -48,6 +48,7 @@ export async function runCodexPrompt(
const writeOutput = dependencies.writeOutput ?? ((output: string) => console.log(output));
const sessionContext = await createSessionContextFn("codex", {
prompt,
workspaceRoot: process.cwd(),
config,
});

View File

@@ -28,6 +28,7 @@ export async function createSessionContext(
provider: SessionProvider,
input: {
prompt: string;
workspaceRoot: string;
config?: Readonly<AppConfig>;
mcpRegistry?: McpRegistry;
},
@@ -58,6 +59,7 @@ export async function createSessionContext(
provisionedResources = await resourceProvisioning.provisionSession({
sessionId: agentSession.id,
resources: [{ kind: "git-worktree" }, { kind: "port-range" }],
workspaceRoot: input.workspaceRoot,
});
const providerAuthEnv =
@@ -82,6 +84,7 @@ export async function createSessionContext(
{
providerHint: provider,
prompt: input.prompt,
workingDirectory: runtimeInjection.workingDirectory,
},
{
config,

View File

@@ -1,5 +1,5 @@
import { existsSync, readFileSync } from "node:fs";
import { resolve } from "node:path";
import { isAbsolute, resolve } from "node:path";
import type { CodexOptions } from "@openai/codex-sdk";
import { getConfig, type AppConfig } from "./config.js";
import { normalizeSharedMcpConfigFile } from "./mcp/converters.js";
@@ -23,12 +23,17 @@ import type {
import { parseMcpConfig } from "./mcp/types.js";
import type { ToolClearancePolicy } from "./security/schemas.js";
function readConfigFile(configPath: string): {
function readConfigFile(input: {
configPath: string;
workingDirectory?: string;
}): {
config?: SharedMcpConfigFile;
sourcePath?: string;
} {
const candidatePath = configPath.trim() || "./mcp.config.json";
const resolvedPath = resolve(process.cwd(), candidatePath);
const candidatePath = input.configPath.trim() || "./mcp.config.json";
const resolvedPath = isAbsolute(candidatePath)
? candidatePath
: resolve(input.workingDirectory ?? process.cwd(), candidatePath);
if (!existsSync(resolvedPath)) {
if (candidatePath !== "./mcp.config.json") {
@@ -83,7 +88,10 @@ export function loadMcpConfigFromEnv(
const registry = options?.registry ?? defaultMcpRegistry;
const warn = options?.warn ?? ((message: string) => console.warn(message));
const { config, sourcePath } = readConfigFile(runtimeConfig.mcp.configPath);
const { config, sourcePath } = readConfigFile({
configPath: runtimeConfig.mcp.configPath,
workingDirectory: context.workingDirectory,
});
if (!config) {
return {};
}

View File

@@ -50,6 +50,7 @@ export type SharedMcpConfigFile = {
export type McpLoadContext = {
providerHint?: "codex" | "claude" | "both";
prompt?: string;
workingDirectory?: string;
};
export type LoadedMcpConfig = {

View File

@@ -333,7 +333,7 @@ function buildActorPrompt(input: ActorExecutionInput): string {
},
events: [
{
type: "requirements_defined | tasks_planned | code_committed | task_blocked | validation_passed | validation_failed | branch_merged",
type: "requirements_defined | tasks_planned | code_committed | task_ready_for_review | task_blocked | validation_passed | validation_failed | branch_merged",
payload: {
summary: "optional",
details: {},
@@ -553,10 +553,12 @@ export async function createProviderRunRuntime(input: {
provider: RunProvider;
initialPrompt: string;
config: Readonly<AppConfig>;
projectPath: string;
}): Promise<ProviderRunRuntime> {
const sessionContext = await createSessionContext(input.provider, {
prompt: input.initialPrompt,
config: input.config,
workspaceRoot: input.projectPath,
});
return {

View File

@@ -25,6 +25,7 @@ const dom = {
runProvider: document.querySelector("#run-provider"),
runTopologyHint: document.querySelector("#run-topology-hint"),
runFlags: document.querySelector("#run-flags"),
runRuntimeContext: document.querySelector("#run-runtime-context"),
runValidationNodes: document.querySelector("#run-validation-nodes"),
killRun: document.querySelector("#kill-run"),
runStatus: document.querySelector("#run-status"),
@@ -111,6 +112,7 @@ const MANIFEST_EVENT_TRIGGERS = [
"requirements_defined",
"tasks_planned",
"code_committed",
"task_ready_for_review",
"task_blocked",
"validation_passed",
"validation_failed",
@@ -129,6 +131,7 @@ const LABEL_HELP_BY_CONTROL = Object.freeze({
"run-provider": "Choose which model provider backend handles provider-mode runs.",
"run-topology-hint": "Optional hint that nudges orchestration toward a topology strategy.",
"run-flags": "Optional JSON object passed in as initial run flags.",
"run-runtime-context": "Optional JSON object of template values injected into persona prompts (for example repo or ticket).",
"run-validation-nodes": "Optional comma-separated node IDs to simulate validation outcomes for.",
"events-limit": "Set how many recent runtime events are loaded per refresh.",
"cfg-webhook-url": "Webhook endpoint that receives runtime event notifications.",
@@ -1486,6 +1489,12 @@ async function startRun(event) {
return;
}
const runtimeContext = parseJsonSafe(dom.runRuntimeContext.value, {});
if (typeof runtimeContext !== "object" || Array.isArray(runtimeContext) || !runtimeContext) {
showRunStatus("Runtime Context Overrides must be a JSON object.", true);
return;
}
const manifestSelection = dom.runManifestSelect.value.trim();
const payload = {
@@ -1494,6 +1503,7 @@ async function startRun(event) {
provider: dom.runProvider.value,
topologyHint: dom.runTopologyHint.value.trim() || undefined,
initialFlags: flags,
runtimeContextOverrides: runtimeContext,
simulateValidationNodeIds: fromCsv(dom.runValidationNodes.value),
};

View File

@@ -75,6 +75,10 @@
Initial Flags (JSON)
<textarea id="run-flags" rows="3" placeholder='{"needs_bootstrap": true}'></textarea>
</label>
<label>
Runtime Context Overrides (JSON)
<textarea id="run-runtime-context" rows="3" placeholder='{"repo":"ai_ops","ticket":"AIOPS-123"}'></textarea>
</label>
<label>
Simulate Validation Nodes (CSV)
<input id="run-validation-nodes" type="text" placeholder="coder-1,qa-1" />

View File

@@ -3,11 +3,17 @@ import { mkdir, readFile, writeFile } from "node:fs/promises";
import { resolve } from "node:path";
import { SchemaDrivenExecutionEngine } from "../agents/orchestration.js";
import { parseAgentManifest, type AgentManifest } from "../agents/manifest.js";
import { FileSystemProjectContextStore } from "../agents/project-context.js";
import type {
ActorExecutionResult,
ActorExecutor,
PipelineAggregateStatus,
} from "../agents/pipeline.js";
import {
FileSystemSessionMetadataStore,
SessionWorktreeManager,
type SessionMetadata,
} from "../agents/session-lifecycle.js";
import { loadConfig, type AppConfig } from "../config.js";
import { parseEnvFile } from "./env-store.js";
import {
@@ -240,6 +246,19 @@ async function loadRuntimeConfig(envPath: string): Promise<Readonly<AppConfig>>
});
}
function resolveRuntimePaths(input: {
workspaceRoot: string;
config: Readonly<AppConfig>;
}): {
stateRoot: string;
worktreeRoot: string;
} {
return {
stateRoot: resolve(input.workspaceRoot, input.config.orchestration.stateRoot),
worktreeRoot: resolve(input.workspaceRoot, input.config.provisioning.gitWorktree.rootDirectory),
};
}
async function writeRunMeta(input: {
stateRoot: string;
sessionId: string;
@@ -319,6 +338,92 @@ export class UiRunService {
this.envFilePath = resolve(this.workspaceRoot, input.envFilePath ?? ".env");
}
private async loadRuntime(): Promise<{
config: Readonly<AppConfig>;
stateRoot: string;
sessionStore: FileSystemSessionMetadataStore;
worktreeManager: SessionWorktreeManager;
}> {
const config = await loadRuntimeConfig(this.envFilePath);
const paths = resolveRuntimePaths({
workspaceRoot: this.workspaceRoot,
config,
});
return {
config,
stateRoot: paths.stateRoot,
sessionStore: new FileSystemSessionMetadataStore({
stateRoot: paths.stateRoot,
}),
worktreeManager: new SessionWorktreeManager({
worktreeRoot: paths.worktreeRoot,
baseRef: config.provisioning.gitWorktree.baseRef,
}),
};
}
async createSession(input: {
projectPath: string;
sessionId?: string;
}): Promise<SessionMetadata> {
const runtime = await this.loadRuntime();
const sessionId = input.sessionId?.trim() || toSessionId();
const baseWorkspacePath = runtime.worktreeManager.resolveBaseWorkspacePath(sessionId);
const session = await runtime.sessionStore.createSession({
sessionId,
projectPath: resolve(input.projectPath),
baseWorkspacePath,
});
await runtime.worktreeManager.initializeSessionBaseWorkspace({
sessionId: session.sessionId,
projectPath: session.projectPath,
baseWorkspacePath: session.baseWorkspacePath,
});
return session;
}
async listSessions(): Promise<SessionMetadata[]> {
const runtime = await this.loadRuntime();
return runtime.sessionStore.listSessions();
}
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
const runtime = await this.loadRuntime();
return runtime.sessionStore.readSession(sessionId);
}
async closeSession(input: {
sessionId: string;
mergeToProject?: boolean;
}): Promise<SessionMetadata> {
const runtime = await this.loadRuntime();
const session = await runtime.sessionStore.readSession(input.sessionId);
if (!session) {
throw new Error(`Session \"${input.sessionId}\" does not exist.`);
}
const sessionProjectContextStore = new FileSystemProjectContextStore({
filePath: runtime.sessionStore.getSessionProjectContextPath(session.sessionId),
});
const projectContext = await sessionProjectContextStore.readState();
const taskWorktreePaths = projectContext.taskQueue
.map((task) => task.worktreePath)
.filter((path): path is string => typeof path === "string" && path.trim().length > 0);
await runtime.worktreeManager.closeSession({
session,
taskWorktreePaths,
mergeBaseIntoProject: input.mergeToProject === true,
});
return runtime.sessionStore.updateSession(session.sessionId, {
sessionStatus: "closed",
});
}
listRuns(): RunRecord[] {
const output = [...this.runHistory.values()].sort((left, right) => {
return right.startedAt.localeCompare(left.startedAt);
@@ -331,11 +436,21 @@ export class UiRunService {
}
async startRun(input: StartRunInput): Promise<RunRecord> {
const config = await loadRuntimeConfig(this.envFilePath);
const runtime = await this.loadRuntime();
const config = runtime.config;
const manifest = parseAgentManifest(input.manifest);
const executionMode = input.executionMode ?? "mock";
const provider = input.provider ?? "codex";
const sessionId = input.sessionId?.trim() || toSessionId();
const session = input.sessionId?.trim()
? await runtime.sessionStore.readSession(sessionId)
: undefined;
if (input.sessionId?.trim() && !session) {
throw new Error(`Session \"${sessionId}\" does not exist.`);
}
if (session && session.sessionStatus === "closed") {
throw new Error(`Session \"${sessionId}\" is closed and cannot run new tasks.`);
}
const runId = randomUUID();
const controller = new AbortController();
@@ -359,6 +474,7 @@ export class UiRunService {
provider,
initialPrompt: input.prompt,
config,
projectPath: session?.baseWorkspacePath ?? this.workspaceRoot,
});
}
@@ -376,11 +492,20 @@ export class UiRunService {
actorExecutors,
settings: {
workspaceRoot: this.workspaceRoot,
stateRoot: config.orchestration.stateRoot,
projectContextPath: config.orchestration.projectContextPath,
stateRoot: runtime.stateRoot,
projectContextPath: session
? runtime.sessionStore.getSessionProjectContextPath(sessionId)
: resolve(this.workspaceRoot, config.orchestration.projectContextPath),
runtimeContext: {
ui_mode: executionMode,
run_provider: provider,
...(session
? {
session_id: sessionId,
project_path: session.projectPath,
base_workspace_path: session.baseWorkspacePath,
}
: {}),
...(input.runtimeContextOverrides ?? {}),
},
},
@@ -388,7 +513,7 @@ export class UiRunService {
});
await writeRunMeta({
stateRoot: config.orchestration.stateRoot,
stateRoot: runtime.stateRoot,
sessionId,
run: record,
});
@@ -404,6 +529,7 @@ export class UiRunService {
},
},
signal: controller.signal,
...(session ? { sessionMetadata: session } : {}),
});
const completedRecord = this.runHistory.get(runId);
@@ -419,7 +545,7 @@ export class UiRunService {
this.runHistory.set(runId, next);
await writeRunMeta({
stateRoot: config.orchestration.stateRoot,
stateRoot: runtime.stateRoot,
sessionId,
run: next,
});
@@ -439,7 +565,7 @@ export class UiRunService {
this.runHistory.set(runId, next);
await writeRunMeta({
stateRoot: config.orchestration.stateRoot,
stateRoot: runtime.stateRoot,
sessionId,
run: next,
});

View File

@@ -23,6 +23,14 @@ type StartRunRequest = {
provider?: RunProvider;
};
type CreateSessionRequest = {
projectPath: string;
};
type CloseSessionRequest = {
mergeToProject?: boolean;
};
function parsePort(value: string | undefined): number {
const parsed = Number(value ?? "4317");
if (!Number.isInteger(parsed) || parsed < 1 || parsed > 65535) {
@@ -102,6 +110,13 @@ function ensureProvider(value: unknown): RunProvider {
return value === "claude" ? "claude" : "codex";
}
function ensureNonEmptyString(value: unknown, field: string): string {
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`Field "${field}" is required.`);
}
return value.trim();
}
async function readRuntimePaths(configStore: UiConfigStore, workspaceRoot: string): Promise<{
stateRoot: string;
runtimeEventLogPath: string;
@@ -299,6 +314,20 @@ async function handleApiRequest(input: {
}
if (pathname === "/api/sessions") {
if (method === "POST") {
const body = await parseJsonBody<CreateSessionRequest>(request);
const projectPath = ensureNonEmptyString(body.projectPath, "projectPath");
const session = await runService.createSession({
projectPath,
});
sendJson(response, 201, {
ok: true,
session,
});
return true;
}
if (method !== "GET") {
methodNotAllowed(response);
return true;
@@ -309,10 +338,12 @@ async function handleApiRequest(input: {
stateRoot,
runtimeEventLogPath,
});
const metadata = await runService.listSessions();
sendJson(response, 200, {
ok: true,
sessions,
sessionMetadata: metadata,
runs: runService.listRuns(),
});
return true;
@@ -362,6 +393,118 @@ async function handleApiRequest(input: {
return true;
}
if (pathname.startsWith("/api/sessions/") && pathname.endsWith("/run")) {
if (method !== "POST") {
methodNotAllowed(response);
return true;
}
const sessionId = toRelativePathFromApi(pathname.slice("/api/sessions/".length, -"/run".length));
if (!sessionId) {
sendJson(response, 400, {
ok: false,
error: "Session id is required.",
});
return true;
}
const body = await parseJsonBody<StartRunRequest>(request);
if (typeof body.prompt !== "string" || body.prompt.trim().length === 0) {
sendJson(response, 400, {
ok: false,
error: 'Field "prompt" is required.',
});
return true;
}
const manifestSource = (() => {
if (body.manifest !== undefined) {
return body.manifest;
}
if (typeof body.manifestPath === "string" && body.manifestPath.trim().length > 0) {
return undefined;
}
return undefined;
})();
const resolvedManifest = manifestSource ?? (() => {
if (!body.manifestPath) {
return undefined;
}
return body.manifestPath;
})();
let manifest: unknown;
if (typeof resolvedManifest === "string") {
manifest = (await manifestStore.read(resolvedManifest)).source;
} else if (resolvedManifest !== undefined) {
manifest = resolvedManifest;
}
if (!manifest) {
sendJson(response, 400, {
ok: false,
error: "A manifest or manifestPath is required to start a run.",
});
return true;
}
const record = await runService.startRun({
prompt: body.prompt,
manifest,
manifestPath: body.manifestPath,
sessionId,
topologyHint: body.topologyHint,
initialFlags: ensureBooleanRecord(body.initialFlags),
runtimeContextOverrides: ensureRuntimeContext(body.runtimeContextOverrides),
simulateValidationNodeIds: ensureStringArray(body.simulateValidationNodeIds),
executionMode: ensureExecutionMode(body.executionMode),
provider: ensureProvider(body.provider),
});
sendJson(response, 202, {
ok: true,
run: record,
});
return true;
}
if (pathname.startsWith("/api/sessions/") && pathname.endsWith("/close")) {
if (method !== "POST") {
methodNotAllowed(response);
return true;
}
const sessionId = toRelativePathFromApi(pathname.slice("/api/sessions/".length, -"/close".length));
if (!sessionId) {
sendJson(response, 400, {
ok: false,
error: "Session id is required.",
});
return true;
}
let body: CloseSessionRequest = {};
try {
body = await parseJsonBody<CloseSessionRequest>(request);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (message !== "Request body is required.") {
throw error;
}
}
const session = await runService.closeSession({
sessionId,
mergeToProject: body.mergeToProject === true,
});
sendJson(response, 200, {
ok: true,
session,
});
return true;
}
if (pathname === "/api/runs") {
if (method === "GET") {
sendJson(response, 200, {

View File

@@ -614,6 +614,7 @@ test("runs parallel topology blocks concurrently and routes via domain-event edg
projectContextPatch: {
enqueueTasks: [
{
taskId: "task-integrate",
id: "task-integrate",
title: "Integrate feature branches",
status: "pending",

View File

@@ -28,6 +28,7 @@ test("project context store reads defaults and applies domain patches", async ()
},
enqueueTasks: [
{
taskId: "task-1",
id: "task-1",
title: "Build parser",
status: "pending",
@@ -38,11 +39,13 @@ test("project context store reads defaults and applies domain patches", async ()
const updated = await store.patchState({
upsertTasks: [
{
taskId: "task-1",
id: "task-1",
title: "Build parser",
status: "in_progress",
},
{
taskId: "task-2",
id: "task-2",
title: "Add tests",
status: "pending",
@@ -70,6 +73,7 @@ test("project context parser merges missing root keys with defaults", async () =
{
taskQueue: [
{
taskId: "task-1",
id: "task-1",
title: "Migrate",
status: "pending",

View File

@@ -1,10 +1,14 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdtemp, writeFile } from "node:fs/promises";
import { execFile } from "node:child_process";
import { mkdtemp, mkdir, stat, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { promisify } from "node:util";
import { UiRunService, readRunMetaBySession } from "../src/ui/run-service.js";
const execFileAsync = promisify(execFile);
async function waitForTerminalRun(
runService: UiRunService,
runId: string,
@@ -94,3 +98,89 @@ test("run service persists failure when pipeline summary is failure", async () =
});
assert.equal(persisted?.status, "failure");
});
test("run service creates, runs, and closes explicit sessions", async () => {
const workspaceRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-run-service-session-"));
const stateRoot = resolve(workspaceRoot, "state");
const envPath = resolve(workspaceRoot, ".env");
const projectPath = resolve(workspaceRoot, "project");
await mkdir(projectPath, { recursive: true });
await execFileAsync("git", ["init", projectPath], { encoding: "utf8" });
await execFileAsync("git", ["-C", projectPath, "config", "user.name", "AI Ops"], { encoding: "utf8" });
await execFileAsync("git", ["-C", projectPath, "config", "user.email", "ai-ops@example.local"], { encoding: "utf8" });
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
await execFileAsync("git", ["-C", projectPath, "add", "README.md"], { encoding: "utf8" });
await execFileAsync("git", ["-C", projectPath, "commit", "-m", "initial"], { encoding: "utf8" });
await writeFile(
envPath,
[
`AGENT_STATE_ROOT=${stateRoot}`,
"AGENT_WORKTREE_ROOT=.ai_ops/worktrees",
"AGENT_WORKTREE_BASE_REF=HEAD",
].join("\n"),
"utf8",
);
const runService = new UiRunService({
workspaceRoot,
envFilePath: ".env",
});
const createdSession = await runService.createSession({
projectPath,
});
assert.equal(createdSession.sessionStatus, "active");
const manifest = {
schemaVersion: "1",
topologies: ["sequential"],
personas: [
{
id: "writer",
displayName: "Writer",
systemPromptTemplate: "Write draft",
toolClearance: {
allowlist: ["read_file", "write_file"],
banlist: [],
},
},
],
relationships: [],
topologyConstraints: {
maxDepth: 1,
maxRetries: 0,
},
pipeline: {
entryNodeId: "write-node",
nodes: [
{
id: "write-node",
actorId: "writer-actor",
personaId: "writer",
},
],
edges: [],
},
};
const started = await runService.startRun({
prompt: "complete task",
manifest,
sessionId: createdSession.sessionId,
executionMode: "mock",
});
const terminalStatus = await waitForTerminalRun(runService, started.runId);
assert.equal(terminalStatus, "success");
const closed = await runService.closeSession({
sessionId: createdSession.sessionId,
});
assert.equal(closed.sessionStatus, "closed");
await assert.rejects(() => stat(createdSession.baseWorkspacePath), {
code: "ENOENT",
});
});

View File

@@ -0,0 +1,116 @@
import test from "node:test";
import assert from "node:assert/strict";
import { execFile } from "node:child_process";
import { mkdtemp, mkdir, readFile, writeFile, stat } from "node:fs/promises";
import { tmpdir } from "node:os";
import { resolve } from "node:path";
import { promisify } from "node:util";
import {
FileSystemSessionMetadataStore,
SessionWorktreeManager,
type SessionMetadata,
} from "../src/agents/session-lifecycle.js";
const execFileAsync = promisify(execFile);
async function git(args: string[]): Promise<string> {
const { stdout } = await execFileAsync("git", args, {
encoding: "utf8",
});
return stdout.trim();
}
test("session metadata store persists and updates session metadata", async () => {
const stateRoot = await mkdtemp(resolve(tmpdir(), "ai-ops-session-store-"));
const store = new FileSystemSessionMetadataStore({ stateRoot });
const created = await store.createSession({
sessionId: "session-abc",
projectPath: resolve(stateRoot, "project"),
baseWorkspacePath: resolve(stateRoot, "worktrees", "session-abc", "base"),
});
assert.equal(created.sessionStatus, "active");
assert.equal(created.sessionId, "session-abc");
const listed = await store.listSessions();
assert.equal(listed.length, 1);
assert.equal(listed[0]?.sessionId, "session-abc");
const updated = await store.updateSession("session-abc", {
sessionStatus: "closed",
});
assert.equal(updated.sessionStatus, "closed");
const readBack = await store.readSession("session-abc");
assert.equal(readBack?.sessionStatus, "closed");
});
test("session worktree manager provisions and merges task worktrees", async () => {
const root = await mkdtemp(resolve(tmpdir(), "ai-ops-session-worktree-"));
const projectPath = resolve(root, "project");
const worktreeRoot = resolve(root, "worktrees");
await mkdir(projectPath, { recursive: true });
await git(["init", projectPath]);
await git(["-C", projectPath, "config", "user.name", "AI Ops"]);
await git(["-C", projectPath, "config", "user.email", "ai-ops@example.local"]);
await writeFile(resolve(projectPath, "README.md"), "# project\n", "utf8");
await git(["-C", projectPath, "add", "README.md"]);
await git(["-C", projectPath, "commit", "-m", "initial commit"]);
const manager = new SessionWorktreeManager({
worktreeRoot,
baseRef: "HEAD",
});
const sessionId = "session-1";
const baseWorkspacePath = manager.resolveBaseWorkspacePath(sessionId);
await manager.initializeSessionBaseWorkspace({
sessionId,
projectPath,
baseWorkspacePath,
});
const baseStats = await stat(baseWorkspacePath);
assert.equal(baseStats.isDirectory(), true);
const taskWorktreePath = (
await manager.ensureTaskWorktree({
sessionId,
taskId: "task-1",
baseWorkspacePath,
})
).taskWorktreePath;
await writeFile(resolve(taskWorktreePath, "feature.txt"), "task output\n", "utf8");
await manager.mergeTaskIntoBase({
taskId: "task-1",
baseWorkspacePath,
taskWorktreePath,
});
const mergedFile = await readFile(resolve(baseWorkspacePath, "feature.txt"), "utf8");
assert.equal(mergedFile, "task output\n");
const session: SessionMetadata = {
sessionId,
projectPath,
baseWorkspacePath,
sessionStatus: "active",
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
await manager.closeSession({
session,
taskWorktreePaths: [],
mergeBaseIntoProject: false,
});
await assert.rejects(() => stat(baseWorkspacePath), {
code: "ENOENT",
});
});